diff --git a/TESTING.asciidoc b/TESTING.asciidoc index a76fd86e9e0..d9fb3daac98 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -354,7 +354,7 @@ These are the linux flavors the Vagrantfile currently supports: * oel-6 aka Oracle Enterprise Linux 6 * oel-7 aka Oracle Enterprise Linux 7 * sles-12 -* opensuse-13 +* opensuse-42 aka Leap We're missing the following from the support matrix because there aren't high quality boxes available in vagrant atlas: diff --git a/Vagrantfile b/Vagrantfile index 00cc9bd638f..a4dc935f15d 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -60,8 +60,8 @@ Vagrant.configure(2) do |config| config.vm.box = "elastic/fedora-25-x86_64" dnf_common config end - config.vm.define "opensuse-13" do |config| - config.vm.box = "elastic/opensuse-13-x86_64" + config.vm.define "opensuse-42" do |config| + config.vm.box = "elastic/opensuse-42-x86_64" opensuse_common config end config.vm.define "sles-12" do |config| diff --git a/build.gradle b/build.gradle index 7f95e69538f..00d1730a26c 100644 --- a/build.gradle +++ b/build.gradle @@ -123,42 +123,39 @@ allprojects { } } -task('verifyVersions') { - description 'Verifies that all released versions that are indexed compatible are listed in Version.java.' - group 'Verification' - enabled = false == gradle.startParameter.isOffline() +task verifyVersions { doLast { + if (gradle.startParameter.isOffline()) { + throw new GradleException("Must run in online mode to verify versions") + } // Read the list from maven central Node xml new URL('https://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s -> xml = new XmlParser().parse(s) } - Set knownVersions = new TreeSet<>(xml.versioning.versions.version.collect { it.text() }.findAll { it ==~ /\d\.\d\.\d/ }) + Set knownVersions = new TreeSet<>(xml.versioning.versions.version.collect { it.text() }.findAll { it ==~ /\d\.\d\.\d/ }.collect { Version.fromString(it) }) - // Limit the known versions to those that should be index compatible - knownVersions = knownVersions.findAll { Integer.parseInt(it.split('\\.')[0]) >= prevMajor } + // Limit the known versions to those that should be index compatible, and are not future versions + knownVersions = knownVersions.findAll { it.major >= prevMajor && it.before(VersionProperties.elasticsearch) } /* Limit the listed versions to those that have been marked as released. * Versions not marked as released don't get the same testing and we want * to make sure that we flip all unreleased versions to released as soon * as possible after release. */ - Set actualVersions = new TreeSet<>( - indexCompatVersions - .findAll { false == it.snapshot } - .collect { it.toString() }) - - // TODO this is almost certainly going to fail on 5.4 when we release 5.5.0 + Set actualVersions = new TreeSet<>(indexCompatVersions.findAll { false == it.snapshot }) // Finally, compare! - if (!knownVersions.equals(actualVersions)) { - throw new GradleException("out-of-date versions\nActual :" + - actualVersions + "\nExpected:" + knownVersions + - "; update Version.java") + if (knownVersions.equals(actualVersions) == false) { + throw new GradleException("out-of-date released versions\nActual :" + actualVersions + "\nExpected:" + knownVersions + + "\nUpdate Version.java. Note that Version.CURRENT doesn't count because it is not released.") } } } -task('precommit') { - dependsOn(verifyVersions) + +task branchConsistency { + description 'Ensures this branch is internally consistent. For example, that versions constants match released versions.' + group 'Verification' + dependsOn verifyVersions } subprojects { diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy index e2230b116c7..d3d07db0d20 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy @@ -12,10 +12,38 @@ import org.gradle.api.tasks.testing.Test class RandomizedTestingPlugin implements Plugin { void apply(Project project) { + setupSeed(project) replaceTestTask(project.tasks) configureAnt(project.ant) } + /** + * Pins the test seed at configuration time so it isn't different on every + * {@link RandomizedTestingTask} execution. This is useful if random + * decisions in one run of {@linkplain RandomizedTestingTask} influence the + * outcome of subsequent runs. Pinning the seed up front like this makes + * the reproduction line from one run be useful on another run. + */ + static void setupSeed(Project project) { + if (project.rootProject.ext.has('testSeed')) { + /* Skip this if we've already pinned the testSeed. It is important + * that this checks the rootProject so that we know we've only ever + * initialized one time. */ + return + } + String testSeed = System.getProperty('tests.seed') + if (testSeed == null) { + long seed = new Random(System.currentTimeMillis()).nextLong() + testSeed = Long.toUnsignedString(seed, 16).toUpperCase(Locale.ROOT) + } + /* Set the testSeed on the root project first so other projects can use + * it during initialization. */ + project.rootProject.ext.testSeed = testSeed + project.rootProject.subprojects { + project.ext.testSeed = testSeed + } + } + static void replaceTestTask(TaskContainer tasks) { Test oldTestTask = tasks.findByPath('test') if (oldTestTask == null) { diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy index e24c226837d..1817ea57e7a 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy @@ -9,6 +9,7 @@ import org.apache.tools.ant.DefaultLogger import org.apache.tools.ant.RuntimeConfigurable import org.apache.tools.ant.UnknownElement import org.gradle.api.DefaultTask +import org.gradle.api.InvalidUserDataException import org.gradle.api.file.FileCollection import org.gradle.api.file.FileTreeElement import org.gradle.api.internal.tasks.options.Option @@ -259,8 +260,13 @@ class RandomizedTestingTask extends DefaultTask { } } for (Map.Entry prop : systemProperties) { + if (prop.getKey().equals('tests.seed')) { + throw new InvalidUserDataException('Seed should be ' + + 'set on the project instead of a system property') + } sysproperty key: prop.getKey(), value: prop.getValue().toString() } + systemProperty 'tests.seed', project.testSeed for (Map.Entry envvar : environmentVariables) { env key: envvar.getKey(), value: envvar.getValue().toString() } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 87d5ec9ae53..af7716804bf 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -120,6 +120,7 @@ class BuildPlugin implements Plugin { println " JDK Version : ${gradleJavaVersionDetails}" println " JAVA_HOME : ${gradleJavaHome}" } + println " Random Testing Seed : ${project.testSeed}" // enforce gradle version GradleVersion minGradle = GradleVersion.version('3.3') @@ -525,7 +526,12 @@ class BuildPlugin implements Plugin { systemProperty 'tests.logger.level', 'WARN' for (Map.Entry property : System.properties.entrySet()) { if (property.getKey().startsWith('tests.') || - property.getKey().startsWith('es.')) { + property.getKey().startsWith('es.')) { + if (property.getKey().equals('tests.seed')) { + /* The seed is already set on the project so we + * shouldn't attempt to override it. */ + continue; + } systemProperty property.getKey(), property.getValue() } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/Version.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/Version.groovy index 1c236c6c44c..b59f26381f2 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/Version.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/Version.groovy @@ -19,9 +19,12 @@ package org.elasticsearch.gradle +import groovy.transform.Sortable + /** * Encapsulates comparison and printing logic for an x.y.z version. */ +@Sortable(includes=['id']) public class Version { final int major @@ -57,10 +60,6 @@ public class Version { return "${major}.${minor}.${bugfix}${snapshotStr}" } - public boolean equals(Version compareTo) { - return id == compareTo.id - } - public boolean before(String compareTo) { return id < fromString(compareTo).id } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index afb8b621829..ab618a0fdc7 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -76,6 +76,14 @@ class ClusterConfiguration { " " + "-Xmx" + System.getProperty('tests.heap.size', '512m') + " " + System.getProperty('tests.jvm.argline', '') + /** + * Should the shared environment be cleaned on cluster startup? Defaults + * to {@code true} so we run with a clean cluster but some tests wish to + * preserve snapshots between clusters so they set this to true. + */ + @Input + boolean cleanShared = true + /** * A closure to call which returns the unicast host to connect to for cluster formation. * diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index e58a87238c5..4dbf3efe595 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -54,14 +54,24 @@ class ClusterFormationTasks { */ static List setup(Project project, String prefix, Task runner, ClusterConfiguration config) { File sharedDir = new File(project.buildDir, "cluster/shared") - // first we remove everything in the shared cluster directory to ensure there are no leftovers in repos or anything - // in theory this should not be necessary but repositories are only deleted in the cluster-state and not on-disk - // such that snapshots survive failures / test runs and there is no simple way today to fix that. - Task cleanup = project.tasks.create(name: "${prefix}#prepareCluster.cleanShared", type: Delete, dependsOn: config.dependencies) { - delete sharedDir - doLast { - sharedDir.mkdirs() - } + Object startDependencies = config.dependencies + /* First, if we want a clean environment, we remove everything in the + * shared cluster directory to ensure there are no leftovers in repos + * or anything in theory this should not be necessary but repositories + * are only deleted in the cluster-state and not on-disk such that + * snapshots survive failures / test runs and there is no simple way + * today to fix that. */ + if (config.cleanShared) { + Task cleanup = project.tasks.create( + name: "${prefix}#prepareCluster.cleanShared", + type: Delete, + dependsOn: startDependencies) { + delete sharedDir + doLast { + sharedDir.mkdirs() + } + } + startDependencies = cleanup } List startTasks = [] List nodes = [] @@ -103,7 +113,7 @@ class ClusterFormationTasks { } NodeInfo node = new NodeInfo(config, i, project, prefix, elasticsearchVersion, sharedDir) nodes.add(node) - Task dependsOn = startTasks.empty ? cleanup : startTasks.get(0) + Object dependsOn = startTasks.empty ? startDependencies : startTasks.get(0) startTasks.add(configureNode(project, prefix, runner, dependsOn, node, config, distro, nodes.get(0))) } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantPropertiesExtension.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantPropertiesExtension.groovy index f16913d5be6..e6e7fca62f9 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantPropertiesExtension.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantPropertiesExtension.groovy @@ -25,12 +25,6 @@ class VagrantPropertiesExtension { @Input List boxes - @Input - Long testSeed - - @Input - String formattedTestSeed - @Input String upgradeFromVersion diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantSupportPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantSupportPlugin.groovy new file mode 100644 index 00000000000..d3b7e3aa880 --- /dev/null +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantSupportPlugin.groovy @@ -0,0 +1,127 @@ +package org.elasticsearch.gradle.vagrant + +import org.gradle.api.GradleException +import org.gradle.api.InvalidUserDataException +import org.gradle.api.Plugin +import org.gradle.api.Project +import org.gradle.process.ExecResult +import org.gradle.process.internal.ExecException + +/** + * Global configuration for if Vagrant tasks are supported in this + * build environment. + */ +class VagrantSupportPlugin implements Plugin { + + @Override + void apply(Project project) { + if (project.rootProject.ext.has('vagrantEnvChecksDone') == false) { + Map vagrantInstallation = getVagrantInstallation(project) + Map virtualBoxInstallation = getVirtualBoxInstallation(project) + + project.rootProject.ext.vagrantInstallation = vagrantInstallation + project.rootProject.ext.virtualBoxInstallation = virtualBoxInstallation + project.rootProject.ext.vagrantSupported = vagrantInstallation.supported && virtualBoxInstallation.supported + project.rootProject.ext.vagrantEnvChecksDone = true + + // Finding that HOME needs to be set when performing vagrant updates + String homeLocation = System.getenv("HOME") + if (project.rootProject.ext.vagrantSupported && homeLocation == null) { + throw new GradleException("Could not locate \$HOME environment variable. Vagrant is enabled " + + "and requires \$HOME to be set to function properly.") + } + } + + addVerifyInstallationTasks(project) + } + + private Map getVagrantInstallation(Project project) { + try { + ByteArrayOutputStream pipe = new ByteArrayOutputStream() + ExecResult runResult = project.exec { + commandLine 'vagrant', '--version' + standardOutput pipe + ignoreExitValue true + } + String version = pipe.toString().trim() + if (runResult.exitValue == 0) { + if (version ==~ /Vagrant 1\.(8\.[6-9]|9\.[0-9])+/) { + return [ 'supported' : true ] + } else { + return [ 'supported' : false, + 'info' : "Illegal version of vagrant [${version}]. Need [Vagrant 1.8.6+]" ] + } + } else { + return [ 'supported' : false, + 'info' : "Could not read installed vagrant version:\n" + version ] + } + } catch (ExecException e) { + // Exec still throws this if it cannot find the command, regardless if ignoreExitValue is set. + // Swallow error. Vagrant isn't installed. Don't halt the build here. + return [ 'supported' : false, 'info' : "Could not find vagrant: " + e.message ] + } + } + + private Map getVirtualBoxInstallation(Project project) { + try { + ByteArrayOutputStream pipe = new ByteArrayOutputStream() + ExecResult runResult = project.exec { + commandLine 'vboxmanage', '--version' + standardOutput = pipe + ignoreExitValue true + } + String version = pipe.toString().trim() + if (runResult.exitValue == 0) { + try { + String[] versions = version.split('\\.') + int major = Integer.parseInt(versions[0]) + int minor = Integer.parseInt(versions[1]) + if ((major < 5) || (major == 5 && minor < 1)) { + return [ 'supported' : false, + 'info' : "Illegal version of virtualbox [${version}]. Need [5.1+]" ] + } else { + return [ 'supported' : true ] + } + } catch (NumberFormatException | ArrayIndexOutOfBoundsException e) { + return [ 'supported' : false, + 'info' : "Unable to parse version of virtualbox [${version}]. Required [5.1+]" ] + } + } else { + return [ 'supported': false, 'info': "Could not read installed virtualbox version:\n" + version ] + } + } catch (ExecException e) { + // Exec still throws this if it cannot find the command, regardless if ignoreExitValue is set. + // Swallow error. VirtualBox isn't installed. Don't halt the build here. + return [ 'supported' : false, 'info' : "Could not find virtualbox: " + e.message ] + } + } + + private void addVerifyInstallationTasks(Project project) { + createCheckVagrantVersionTask(project) + createCheckVirtualBoxVersionTask(project) + } + + private void createCheckVagrantVersionTask(Project project) { + project.tasks.create('vagrantCheckVersion') { + description 'Check the Vagrant version' + group 'Verification' + doLast { + if (project.rootProject.vagrantInstallation.supported == false) { + throw new InvalidUserDataException(project.rootProject.vagrantInstallation.info) + } + } + } + } + + private void createCheckVirtualBoxVersionTask(Project project) { + project.tasks.create('virtualboxCheckVersion') { + description 'Check the Virtualbox version' + group 'Verification' + doLast { + if (project.rootProject.virtualBoxInstallation.supported == false) { + throw new InvalidUserDataException(project.rootProject.virtualBoxInstallation.info) + } + } + } + } +} diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy index 399b816280b..8c429e94e46 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -1,5 +1,6 @@ package org.elasticsearch.gradle.vagrant +import com.carrotsearch.gradle.junit4.RandomizedTestingPlugin import org.elasticsearch.gradle.FileContentsTask import org.gradle.api.* import org.gradle.api.artifacts.dsl.RepositoryHandler @@ -20,7 +21,7 @@ class VagrantTestPlugin implements Plugin { 'fedora-25', 'oel-6', 'oel-7', - 'opensuse-13', + 'opensuse-42', 'sles-12', 'ubuntu-1404', 'ubuntu-1604' @@ -100,23 +101,10 @@ class VagrantTestPlugin implements Plugin { private static void createBatsConfiguration(Project project) { project.configurations.create(BATS) - final long seed - final String formattedSeed - String maybeTestsSeed = System.getProperty("tests.seed") - if (maybeTestsSeed != null) { - if (maybeTestsSeed.trim().isEmpty()) { - throw new GradleException("explicit tests.seed cannot be empty") - } - String masterSeed = maybeTestsSeed.tokenize(':').get(0) - seed = new BigInteger(masterSeed, 16).longValue() - formattedSeed = maybeTestsSeed - } else { - seed = new Random().nextLong() - formattedSeed = String.format("%016X", seed) - } - String upgradeFromVersion = System.getProperty("tests.packaging.upgradeVersion"); if (upgradeFromVersion == null) { + String firstPartOfSeed = project.rootProject.testSeed.tokenize(':').get(0) + final long seed = Long.parseUnsignedLong(firstPartOfSeed, 16) upgradeFromVersion = project.indexCompatVersions[new Random(seed).nextInt(project.indexCompatVersions.size())] } @@ -130,8 +118,6 @@ class VagrantTestPlugin implements Plugin { project.dependencies.add(BATS, "org.elasticsearch.distribution.${it}:elasticsearch:${upgradeFromVersion}@${it}") } - project.extensions.esvagrant.testSeed = seed - project.extensions.esvagrant.formattedTestSeed = formattedSeed project.extensions.esvagrant.upgradeFromVersion = upgradeFromVersion } @@ -227,43 +213,6 @@ class VagrantTestPlugin implements Plugin { vagrantSetUpTask.dependsOn copyBatsTests, copyBatsUtils, copyBatsArchives, createVersionFile, createUpgradeFromFile } - private static void createCheckVagrantVersionTask(Project project) { - project.tasks.create('vagrantCheckVersion', Exec) { - description 'Check the Vagrant version' - group 'Verification' - commandLine 'vagrant', '--version' - standardOutput = new ByteArrayOutputStream() - doLast { - String version = standardOutput.toString().trim() - if ((version ==~ /Vagrant 1\.(8\.[6-9]|9\.[0-9])+/) == false) { - throw new InvalidUserDataException("Illegal version of vagrant [${version}]. Need [Vagrant 1.8.6+]") - } - } - } - } - - private static void createCheckVirtualBoxVersionTask(Project project) { - project.tasks.create('virtualboxCheckVersion', Exec) { - description 'Check the Virtualbox version' - group 'Verification' - commandLine 'vboxmanage', '--version' - standardOutput = new ByteArrayOutputStream() - doLast { - String version = standardOutput.toString().trim() - try { - String[] versions = version.split('\\.') - int major = Integer.parseInt(versions[0]) - int minor = Integer.parseInt(versions[1]) - if ((major < 5) || (major == 5 && minor < 1)) { - throw new InvalidUserDataException("Illegal version of virtualbox [${version}]. Need [5.1+]") - } - } catch (NumberFormatException | ArrayIndexOutOfBoundsException e) { - throw new InvalidUserDataException("Unable to parse version of virtualbox [${version}]. Required [5.1+]", e) - } - } - } - } - private static void createPackagingTestTask(Project project) { project.tasks.create('packagingTest') { group 'Verification' @@ -291,8 +240,6 @@ class VagrantTestPlugin implements Plugin { createCleanTask(project) createStopTask(project) createSmokeTestTask(project) - createCheckVagrantVersionTask(project) - createCheckVirtualBoxVersionTask(project) createPrepareVagrantTestEnvTask(project) createPackagingTestTask(project) createPlatformTestTask(project) @@ -395,7 +342,7 @@ class VagrantTestPlugin implements Plugin { void afterExecute(Task task, TaskState state) { if (state.failure != null) { println "REPRODUCE WITH: gradle ${packaging.path} " + - "-Dtests.seed=${project.extensions.esvagrant.formattedTestSeed} " + "-Dtests.seed=${project.testSeed} " } } } @@ -415,14 +362,14 @@ class VagrantTestPlugin implements Plugin { environmentVars vagrantEnvVars dependsOn up finalizedBy halt - args '--command', PLATFORM_TEST_COMMAND + " -Dtests.seed=${-> project.extensions.esvagrant.formattedTestSeed}" + args '--command', PLATFORM_TEST_COMMAND + " -Dtests.seed=${-> project.testSeed}" } TaskExecutionAdapter platformReproListener = new TaskExecutionAdapter() { @Override void afterExecute(Task task, TaskState state) { if (state.failure != null) { println "REPRODUCE WITH: gradle ${platform.path} " + - "-Dtests.seed=${project.extensions.esvagrant.formattedTestSeed} " + "-Dtests.seed=${project.testSeed} " } } } diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.vagrantsupport.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.vagrantsupport.properties new file mode 100644 index 00000000000..73a3f412349 --- /dev/null +++ b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.vagrantsupport.properties @@ -0,0 +1 @@ +implementation-class=org.elasticsearch.gradle.vagrant.VagrantSupportPlugin \ No newline at end of file diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index ccd8fea0120..678155c6561 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -16,7 +16,6 @@ - @@ -428,7 +427,6 @@ - diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 94f3c1c73af..e7243b9dad9 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ # When updating elasticsearch, please update 'rest' version in core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy -elasticsearch = 6.0.0-alpha2 +elasticsearch = 6.0.0-alpha3 lucene = 7.0.0-snapshot-a0aef2f # optional dependencies diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java index d8a5e4fa0ea..ca5f3220567 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java @@ -57,6 +57,11 @@ public class RestNoopBulkAction extends BaseRestHandler { controller.registerHandler(PUT, "/{index}/{type}/_noop_bulk", this); } + @Override + public String getName() { + return "noop_bulk_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { BulkRequest bulkRequest = Requests.bulkRequest(); @@ -73,8 +78,8 @@ public class RestNoopBulkAction extends BaseRestHandler { } bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT)); bulkRequest.setRefreshPolicy(request.param("refresh")); - bulkRequest.add(request.content(), defaultIndex, defaultType, defaultRouting, defaultFields, null, defaultPipeline, null, true, - request.getXContentType()); + bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting, defaultFields, + null, defaultPipeline, null, true, request.getXContentType()); // short circuit the call to the transport layer return channel -> { diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/RestNoopSearchAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/RestNoopSearchAction.java index 48a453c3725..39c9510b8a2 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/RestNoopSearchAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/RestNoopSearchAction.java @@ -42,6 +42,11 @@ public class RestNoopSearchAction extends BaseRestHandler { controller.registerHandler(POST, "/{index}/{type}/_noop_search", this); } + @Override + public String getName() { + return "noop_search_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { SearchRequest searchRequest = new SearchRequest(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java index d62f47c0e31..9e881cf7b9a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java @@ -33,7 +33,9 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest; @@ -63,7 +65,7 @@ import java.util.StringJoiner; final class Request { - private static final XContentType REQUEST_BODY_CONTENT_TYPE = XContentType.JSON; + static final XContentType REQUEST_BODY_CONTENT_TYPE = XContentType.JSON; final String method; final String endpoint; @@ -338,6 +340,16 @@ final class Request { return new Request(HttpGet.METHOD_NAME, endpoint, params.getParams(), entity); } + static Request searchScroll(SearchScrollRequest searchScrollRequest) throws IOException { + HttpEntity entity = createEntity(searchScrollRequest, REQUEST_BODY_CONTENT_TYPE); + return new Request("GET", "/_search/scroll", Collections.emptyMap(), entity); + } + + static Request clearScroll(ClearScrollRequest clearScrollRequest) throws IOException { + HttpEntity entity = createEntity(clearScrollRequest, REQUEST_BODY_CONTENT_TYPE); + return new Request("DELETE", "/_search/scroll", Collections.emptyMap(), entity); + } + private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef(); return new ByteArrayEntity(source.bytes, source.offset, source.length, ContentType.create(xContentType.mediaType())); @@ -483,7 +495,7 @@ final class Request { return this; } - Params withIndicesOptions (IndicesOptions indicesOptions) { + Params withIndicesOptions(IndicesOptions indicesOptions) { putParam("ignore_unavailable", Boolean.toString(indicesOptions.ignoreUnavailable())); putParam("allow_no_indices", Boolean.toString(indicesOptions.allowNoIndices())); String expandWildcards; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 47645817c84..a354bdfb7ba 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -36,8 +36,11 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.main.MainRequest; import org.elasticsearch.action.main.MainResponse; +import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.common.CheckedFunction; @@ -325,14 +328,57 @@ public class RestHighLevelClient { performRequestAsyncAndParseEntity(searchRequest, Request::search, SearchResponse::fromXContent, listener, emptySet(), headers); } - private Resp performRequestAndParseEntity(Req request, + /** + * Executes a search using the Search Scroll api + * + * See Search Scroll + * API on elastic.co + */ + public SearchResponse searchScroll(SearchScrollRequest searchScrollRequest, Header... headers) throws IOException { + return performRequestAndParseEntity(searchScrollRequest, Request::searchScroll, SearchResponse::fromXContent, emptySet(), headers); + } + + /** + * Asynchronously executes a search using the Search Scroll api + * + * See Search Scroll + * API on elastic.co + */ + public void searchScrollAsync(SearchScrollRequest searchScrollRequest, ActionListener listener, Header... headers) { + performRequestAsyncAndParseEntity(searchScrollRequest, Request::searchScroll, SearchResponse::fromXContent, + listener, emptySet(), headers); + } + + /** + * Clears one or more scroll ids using the Clear Scroll api + * + * See + * Clear Scroll API on elastic.co + */ + public ClearScrollResponse clearScroll(ClearScrollRequest clearScrollRequest, Header... headers) throws IOException { + return performRequestAndParseEntity(clearScrollRequest, Request::clearScroll, ClearScrollResponse::fromXContent, + emptySet(), headers); + } + + /** + * Asynchronously clears one or more scroll ids using the Clear Scroll api + * + * See + * Clear Scroll API on elastic.co + */ + public void clearScrollAsync(ClearScrollRequest clearScrollRequest, ActionListener listener, Header... headers) { + performRequestAsyncAndParseEntity(clearScrollRequest, Request::clearScroll, ClearScrollResponse::fromXContent, + listener, emptySet(), headers); + } + + protected Resp performRequestAndParseEntity(Req request, CheckedFunction requestConverter, CheckedFunction entityParser, Set ignores, Header... headers) throws IOException { return performRequest(request, requestConverter, (response) -> parseEntity(response.getEntity(), entityParser), ignores, headers); } - Resp performRequest(Req request, + protected Resp performRequest(Req request, CheckedFunction requestConverter, CheckedFunction responseConverter, Set ignores, Header... headers) throws IOException { @@ -354,6 +400,7 @@ public class RestHighLevelClient { } throw parseResponseException(e); } + try { return responseConverter.apply(response); } catch(Exception e) { @@ -361,7 +408,7 @@ public class RestHighLevelClient { } } - private void performRequestAsyncAndParseEntity(Req request, + protected void performRequestAsyncAndParseEntity(Req request, CheckedFunction requestConverter, CheckedFunction entityParser, ActionListener listener, Set ignores, Header... headers) { @@ -369,7 +416,7 @@ public class RestHighLevelClient { listener, ignores, headers); } - void performRequestAsync(Req request, + protected void performRequestAsync(Req request, CheckedFunction requestConverter, CheckedFunction responseConverter, ActionListener listener, Set ignores, Header... headers) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java index 4abfd713bcd..b078a983357 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java @@ -580,7 +580,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { BulkResponse bulkResponse = execute(bulkRequest, highLevelClient()::bulk, highLevelClient()::bulkAsync); assertEquals(RestStatus.OK, bulkResponse.status()); - assertTrue(bulkResponse.getTookInMillis() > 0); + assertTrue(bulkResponse.getTook().getMillis() > 0); assertEquals(nbItems, bulkResponse.getItems().length); validateBulkResponses(nbItems, errors, bulkResponse, bulkRequest); @@ -671,7 +671,7 @@ public class CrudIT extends ESRestHighLevelClientTestCase { BulkRequest bulkRequest = requestRef.get(); assertEquals(RestStatus.OK, bulkResponse.status()); - assertTrue(bulkResponse.getTookInMillis() > 0); + assertTrue(bulkResponse.getTook().getMillis() > 0); assertEquals(nbItems, bulkResponse.getItems().length); assertNull(error.get()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java new file mode 100644 index 00000000000..8ad42c22320 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java @@ -0,0 +1,181 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.Header; +import org.apache.http.HttpEntity; +import org.apache.http.HttpHost; +import org.apache.http.HttpResponse; +import org.apache.http.ProtocolVersion; +import org.apache.http.RequestLine; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.entity.ContentType; +import org.apache.http.message.BasicHeader; +import org.apache.http.message.BasicHttpResponse; +import org.apache.http.message.BasicRequestLine; +import org.apache.http.message.BasicStatusLine; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Build; +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.main.MainRequest; +import org.elasticsearch.action.main.MainResponse; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static org.elasticsearch.client.ESRestHighLevelClientTestCase.execute; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyMapOf; +import static org.mockito.Matchers.anyObject; +import static org.mockito.Matchers.anyVararg; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; + +/** + * Test and demonstrates how {@link RestHighLevelClient} can be extended to support custom endpoints. + */ +public class CustomRestHighLevelClientTests extends ESTestCase { + + private static final String ENDPOINT = "/_custom"; + + private CustomRestClient restHighLevelClient; + + @Before + @SuppressWarnings("unchecked") + public void initClients() throws IOException { + if (restHighLevelClient == null) { + final RestClient restClient = mock(RestClient.class); + restHighLevelClient = new CustomRestClient(restClient); + + doAnswer(mock -> mockPerformRequest((Header) mock.getArguments()[4])) + .when(restClient) + .performRequest(eq(HttpGet.METHOD_NAME), eq(ENDPOINT), anyMapOf(String.class, String.class), anyObject(), anyVararg()); + + doAnswer(mock -> mockPerformRequestAsync((Header) mock.getArguments()[5], (ResponseListener) mock.getArguments()[4])) + .when(restClient) + .performRequestAsync(eq(HttpGet.METHOD_NAME), eq(ENDPOINT), anyMapOf(String.class, String.class), + any(HttpEntity.class), any(ResponseListener.class), anyVararg()); + } + } + + public void testCustomEndpoint() throws IOException { + final MainRequest request = new MainRequest(); + final Header header = new BasicHeader("node_name", randomAlphaOfLengthBetween(1, 10)); + + MainResponse response = execute(request, restHighLevelClient::custom, restHighLevelClient::customAsync, header); + assertEquals(header.getValue(), response.getNodeName()); + + response = execute(request, restHighLevelClient::customAndParse, restHighLevelClient::customAndParseAsync, header); + assertEquals(header.getValue(), response.getNodeName()); + } + + /** + * The {@link RestHighLevelClient} must declare the following execution methods using the protected modifier + * so that they can be used by subclasses to implement custom logic. + */ + @SuppressForbidden(reason = "We're forced to uses Class#getDeclaredMethods() here because this test checks protected methods") + public void testMethodsVisibility() throws ClassNotFoundException { + String[] methodNames = new String[]{"performRequest", "performRequestAndParseEntity", "performRequestAsync", + "performRequestAsyncAndParseEntity"}; + for (String methodName : methodNames) { + boolean found = false; + for (Method method : RestHighLevelClient.class.getDeclaredMethods()) { + if (method.getName().equals(methodName)) { + assertTrue("Method " + methodName + " must be protected", Modifier.isProtected(method.getModifiers())); + found = true; + } + } + assertTrue("Failed to find method " + methodName, found); + } + } + + /** + * Mocks the asynchronous request execution by calling the {@link #mockPerformRequest(Header)} method. + */ + private Void mockPerformRequestAsync(Header httpHeader, ResponseListener responseListener) { + try { + responseListener.onSuccess(mockPerformRequest(httpHeader)); + } catch (IOException e) { + responseListener.onFailure(e); + } + return null; + } + + /** + * Mocks the synchronous request execution like if it was executed by Elasticsearch. + */ + private Response mockPerformRequest(Header httpHeader) throws IOException { + ProtocolVersion protocol = new ProtocolVersion("HTTP", 1, 1); + HttpResponse httpResponse = new BasicHttpResponse(new BasicStatusLine(protocol, 200, "OK")); + + MainResponse response = new MainResponse(httpHeader.getValue(), Version.CURRENT, ClusterName.DEFAULT, "_na", Build.CURRENT, true); + BytesRef bytesRef = XContentHelper.toXContent(response, XContentType.JSON, false).toBytesRef(); + httpResponse.setEntity(new ByteArrayEntity(bytesRef.bytes, ContentType.APPLICATION_JSON)); + + RequestLine requestLine = new BasicRequestLine(HttpGet.METHOD_NAME, ENDPOINT, protocol); + return new Response(requestLine, new HttpHost("localhost", 9200), httpResponse); + } + + /** + * A custom high level client that provides custom methods to execute a request and get its associate response back. + */ + static class CustomRestClient extends RestHighLevelClient { + + private CustomRestClient(RestClient restClient) { + super(restClient); + } + + MainResponse custom(MainRequest mainRequest, Header... headers) throws IOException { + return performRequest(mainRequest, this::toRequest, this::toResponse, emptySet(), headers); + } + + MainResponse customAndParse(MainRequest mainRequest, Header... headers) throws IOException { + return performRequestAndParseEntity(mainRequest, this::toRequest, MainResponse::fromXContent, emptySet(), headers); + } + + void customAsync(MainRequest mainRequest, ActionListener listener, Header... headers) { + performRequestAsync(mainRequest, this::toRequest, this::toResponse, listener, emptySet(), headers); + } + + void customAndParseAsync(MainRequest mainRequest, ActionListener listener, Header... headers) { + performRequestAsyncAndParseEntity(mainRequest, this::toRequest, MainResponse::fromXContent, listener, emptySet(), headers); + } + + Request toRequest(MainRequest mainRequest) throws IOException { + return new Request(HttpGet.METHOD_NAME, ENDPOINT, emptyMap(), null); + } + + MainResponse toResponse(Response response) throws IOException { + return parseEntity(response.getEntity(), MainResponse::fromXContent); + } + } +} \ No newline at end of file diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java index 12f0d991e7f..f18e348adce 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java @@ -28,7 +28,9 @@ import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest; @@ -40,6 +42,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -714,12 +717,44 @@ public class RequestTests extends ESTestCase { if (searchSourceBuilder == null) { assertNull(request.entity); } else { - BytesReference expectedBytes = XContentHelper.toXContent(searchSourceBuilder, XContentType.JSON, false); - assertEquals(XContentType.JSON.mediaType(), request.entity.getContentType().getValue()); - assertEquals(expectedBytes, new BytesArray(EntityUtils.toByteArray(request.entity))); + assertToXContentBody(searchSourceBuilder, request.entity); } } + public void testSearchScroll() throws IOException { + SearchScrollRequest searchScrollRequest = new SearchScrollRequest(); + searchScrollRequest.scrollId(randomAlphaOfLengthBetween(5, 10)); + if (randomBoolean()) { + searchScrollRequest.scroll(randomPositiveTimeValue()); + } + Request request = Request.searchScroll(searchScrollRequest); + assertEquals("GET", request.method); + assertEquals("/_search/scroll", request.endpoint); + assertEquals(0, request.params.size()); + assertToXContentBody(searchScrollRequest, request.entity); + assertEquals(Request.REQUEST_BODY_CONTENT_TYPE.mediaType(), request.entity.getContentType().getValue()); + } + + public void testClearScroll() throws IOException { + ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); + int numScrolls = randomIntBetween(1, 10); + for (int i = 0; i < numScrolls; i++) { + clearScrollRequest.addScrollId(randomAlphaOfLengthBetween(5, 10)); + } + Request request = Request.clearScroll(clearScrollRequest); + assertEquals("DELETE", request.method); + assertEquals("/_search/scroll", request.endpoint); + assertEquals(0, request.params.size()); + assertToXContentBody(clearScrollRequest, request.entity); + assertEquals(Request.REQUEST_BODY_CONTENT_TYPE.mediaType(), request.entity.getContentType().getValue()); + } + + private static void assertToXContentBody(ToXContent expectedBody, HttpEntity actualEntity) throws IOException { + BytesReference expectedBytes = XContentHelper.toXContent(expectedBody, Request.REQUEST_BODY_CONTENT_TYPE, false); + assertEquals(XContentType.JSON.mediaType(), actualEntity.getContentType().getValue()); + assertEquals(expectedBytes, new BytesArray(EntityUtils.toByteArray(actualEntity))); + } + public void testParams() { final int nbParams = randomIntBetween(0, 10); Request.Params params = Request.Params.builder(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 05883a066a5..7fc0733a7f0 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -33,6 +33,7 @@ import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHttpResponse; import org.apache.http.message.BasicRequestLine; import org.apache.http.message.BasicStatusLine; +import org.apache.http.nio.entity.NStringEntity; import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; @@ -41,21 +42,28 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.main.MainRequest; import org.elasticsearch.action.main.MainResponse; +import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.ClearScrollResponse; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchResponseSections; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.cbor.CborXContent; import org.elasticsearch.common.xcontent.smile.SmileXContent; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.ESTestCase; import org.junit.Before; import org.mockito.ArgumentMatcher; -import org.mockito.Matchers; import org.mockito.internal.matchers.ArrayEquals; import org.mockito.internal.matchers.VarargMatcher; @@ -68,6 +76,7 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import static org.elasticsearch.client.RestClientTestUtil.randomHeaders; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.hamcrest.CoreMatchers.instanceOf; import static org.mockito.Matchers.anyMapOf; @@ -76,6 +85,8 @@ import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.anyVararg; import static org.mockito.Matchers.argThat; import static org.mockito.Matchers.eq; +import static org.mockito.Matchers.isNotNull; +import static org.mockito.Matchers.isNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -95,49 +106,83 @@ public class RestHighLevelClientTests extends ESTestCase { } public void testPingSuccessful() throws IOException { - Header[] headers = RestClientTestUtil.randomHeaders(random(), "Header"); + Header[] headers = randomHeaders(random(), "Header"); Response response = mock(Response.class); when(response.getStatusLine()).thenReturn(newStatusLine(RestStatus.OK)); when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class), anyObject(), anyVararg())).thenReturn(response); assertTrue(restHighLevelClient.ping(headers)); verify(restClient).performRequest(eq("HEAD"), eq("/"), eq(Collections.emptyMap()), - Matchers.isNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers))); + isNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers))); } public void testPing404NotFound() throws IOException { - Header[] headers = RestClientTestUtil.randomHeaders(random(), "Header"); + Header[] headers = randomHeaders(random(), "Header"); Response response = mock(Response.class); when(response.getStatusLine()).thenReturn(newStatusLine(RestStatus.NOT_FOUND)); when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class), anyObject(), anyVararg())).thenReturn(response); assertFalse(restHighLevelClient.ping(headers)); verify(restClient).performRequest(eq("HEAD"), eq("/"), eq(Collections.emptyMap()), - Matchers.isNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers))); + isNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers))); } public void testPingSocketTimeout() throws IOException { - Header[] headers = RestClientTestUtil.randomHeaders(random(), "Header"); + Header[] headers = randomHeaders(random(), "Header"); when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class), anyObject(), anyVararg())).thenThrow(new SocketTimeoutException()); expectThrows(SocketTimeoutException.class, () -> restHighLevelClient.ping(headers)); verify(restClient).performRequest(eq("HEAD"), eq("/"), eq(Collections.emptyMap()), - Matchers.isNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers))); + isNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers))); } public void testInfo() throws IOException { - Header[] headers = RestClientTestUtil.randomHeaders(random(), "Header"); - Response response = mock(Response.class); + Header[] headers = randomHeaders(random(), "Header"); MainResponse testInfo = new MainResponse("nodeName", Version.CURRENT, new ClusterName("clusterName"), "clusterUuid", Build.CURRENT, true); - when(response.getEntity()).thenReturn( - new StringEntity(toXContent(testInfo, XContentType.JSON, false).utf8ToString(), ContentType.APPLICATION_JSON)); - when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class), - anyObject(), anyVararg())).thenReturn(response); + mockResponse(testInfo); MainResponse receivedInfo = restHighLevelClient.info(headers); assertEquals(testInfo, receivedInfo); verify(restClient).performRequest(eq("GET"), eq("/"), eq(Collections.emptyMap()), - Matchers.isNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers))); + isNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers))); + } + + public void testSearchScroll() throws IOException { + Header[] headers = randomHeaders(random(), "Header"); + SearchResponse mockSearchResponse = new SearchResponse(new SearchResponseSections(SearchHits.empty(), InternalAggregations.EMPTY, + null, false, false, null, 1), randomAlphaOfLengthBetween(5, 10), 5, 5, 100, new ShardSearchFailure[0]); + mockResponse(mockSearchResponse); + SearchResponse searchResponse = restHighLevelClient.searchScroll(new SearchScrollRequest(randomAlphaOfLengthBetween(5, 10)), + headers); + assertEquals(mockSearchResponse.getScrollId(), searchResponse.getScrollId()); + assertEquals(0, searchResponse.getHits().totalHits); + assertEquals(5, searchResponse.getTotalShards()); + assertEquals(5, searchResponse.getSuccessfulShards()); + assertEquals(100, searchResponse.getTook().getMillis()); + verify(restClient).performRequest(eq("GET"), eq("/_search/scroll"), eq(Collections.emptyMap()), + isNotNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers))); + } + + public void testClearScroll() throws IOException { + Header[] headers = randomHeaders(random(), "Header"); + ClearScrollResponse mockClearScrollResponse = new ClearScrollResponse(randomBoolean(), randomIntBetween(0, Integer.MAX_VALUE)); + mockResponse(mockClearScrollResponse); + ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); + clearScrollRequest.addScrollId(randomAlphaOfLengthBetween(5, 10)); + ClearScrollResponse clearScrollResponse = restHighLevelClient.clearScroll(clearScrollRequest, headers); + assertEquals(mockClearScrollResponse.isSucceeded(), clearScrollResponse.isSucceeded()); + assertEquals(mockClearScrollResponse.getNumFreed(), clearScrollResponse.getNumFreed()); + verify(restClient).performRequest(eq("DELETE"), eq("/_search/scroll"), eq(Collections.emptyMap()), + isNotNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers))); + } + + private void mockResponse(ToXContent toXContent) throws IOException { + Response response = mock(Response.class); + ContentType contentType = ContentType.parse(Request.REQUEST_BODY_CONTENT_TYPE.mediaType()); + String requestBody = toXContent(toXContent, Request.REQUEST_BODY_CONTENT_TYPE, false).utf8ToString(); + when(response.getEntity()).thenReturn(new NStringEntity(requestBody, contentType)); + when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class), + anyObject(), anyVararg())).thenReturn(response); } public void testRequestValidation() { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index 7aeee5bf47f..328f2ee32f5 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -19,11 +19,19 @@ package org.elasticsearch.client; +import org.apache.http.HttpEntity; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; +import org.apache.http.nio.entity.NStringEntity; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.join.aggregations.Children; import org.elasticsearch.join.aggregations.ChildrenAggregationBuilder; @@ -37,6 +45,7 @@ import org.elasticsearch.search.aggregations.matrix.stats.MatrixStats; import org.elasticsearch.search.aggregations.matrix.stats.MatrixStatsAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; @@ -46,10 +55,14 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collections; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThan; public class SearchIT extends ESRestHighLevelClientTestCase { @@ -161,7 +174,6 @@ public class SearchIT extends ESRestHighLevelClientTestCase { assertSearchHeader(searchResponse); assertNull(searchResponse.getSuggest()); assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); - assertThat(searchResponse.getTook().nanos(), greaterThan(0L)); assertEquals(5, searchResponse.getHits().totalHits); assertEquals(0, searchResponse.getHits().getHits().length); assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f); @@ -244,7 +256,6 @@ public class SearchIT extends ESRestHighLevelClientTestCase { assertSearchHeader(searchResponse); assertNull(searchResponse.getSuggest()); assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); - assertThat(searchResponse.getTook().nanos(), greaterThan(0L)); assertEquals(5, searchResponse.getHits().totalHits); assertEquals(0, searchResponse.getHits().getHits().length); assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f); @@ -324,7 +335,6 @@ public class SearchIT extends ESRestHighLevelClientTestCase { assertSearchHeader(searchResponse); assertNull(searchResponse.getSuggest()); assertEquals(Collections.emptyMap(), searchResponse.getProfileResults()); - assertThat(searchResponse.getTook().nanos(), greaterThan(0L)); assertEquals(3, searchResponse.getHits().totalHits); assertEquals(0, searchResponse.getHits().getHits().length); assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f); @@ -385,8 +395,67 @@ public class SearchIT extends ESRestHighLevelClientTestCase { } } + public void testSearchScroll() throws Exception { + + for (int i = 0; i < 100; i++) { + XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject(); + HttpEntity entity = new NStringEntity(builder.string(), ContentType.APPLICATION_JSON); + client().performRequest("PUT", "test/type1/" + Integer.toString(i), Collections.emptyMap(), entity); + } + client().performRequest("POST", "/test/_refresh"); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(35).sort("field", SortOrder.ASC); + SearchRequest searchRequest = new SearchRequest("test").scroll(TimeValue.timeValueMinutes(2)).source(searchSourceBuilder); + SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync); + + try { + long counter = 0; + assertSearchHeader(searchResponse); + assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse.getHits()) { + assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); + } + + searchResponse = execute(new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2)), + highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync); + + assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(35)); + for (SearchHit hit : searchResponse.getHits()) { + assertEquals(counter++, ((Number) hit.getSortValues()[0]).longValue()); + } + + searchResponse = execute(new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2)), + highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync); + + assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L)); + assertThat(searchResponse.getHits().getHits().length, equalTo(30)); + for (SearchHit hit : searchResponse.getHits()) { + assertEquals(counter++, ((Number) hit.getSortValues()[0]).longValue()); + } + } finally { + ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); + clearScrollRequest.addScrollId(searchResponse.getScrollId()); + ClearScrollResponse clearScrollResponse = execute(clearScrollRequest, + // Not using a method reference to work around https://bugs.eclipse.org/bugs/show_bug.cgi?id=517951 + (request, headers) -> highLevelClient().clearScroll(request, headers), + (request, listener, headers) -> highLevelClient().clearScrollAsync(request, listener, headers)); + assertThat(clearScrollResponse.getNumFreed(), greaterThan(0)); + assertTrue(clearScrollResponse.isSucceeded()); + + SearchScrollRequest scrollRequest = new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2)); + ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> execute(scrollRequest, + highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync)); + assertEquals(RestStatus.NOT_FOUND, exception.status()); + assertThat(exception.getRootCause(), instanceOf(ElasticsearchException.class)); + ElasticsearchException rootCause = (ElasticsearchException) exception.getRootCause(); + assertThat(rootCause.getMessage(), containsString("No search context found for")); + } + } + private static void assertSearchHeader(SearchResponse searchResponse) { - assertThat(searchResponse.getTook().nanos(), greaterThan(0L)); + assertThat(searchResponse.getTook().nanos(), greaterThanOrEqualTo(0L)); assertEquals(0, searchResponse.getFailedShards()); assertThat(searchResponse.getTotalShards(), greaterThan(0)); assertEquals(searchResponse.getTotalShards(), searchResponse.getSuccessfulShards()); diff --git a/core/build.gradle b/core/build.gradle index 141c647f488..2e2a7fc2fde 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -81,7 +81,7 @@ dependencies { compile "com.vividsolutions:jts:${versions.jts}", optional // logging - compile "org.apache.logging.log4j:log4j-api:${versions.log4j}", optional + compile "org.apache.logging.log4j:log4j-api:${versions.log4j}" compile "org.apache.logging.log4j:log4j-core:${versions.log4j}", optional // to bridge dependencies that are still on Log4j 1 to Log4j 2 compile "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}", optional @@ -118,7 +118,6 @@ compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-tr forbiddenPatterns { exclude '**/*.json' exclude '**/*.jmx' - exclude '**/org/elasticsearch/cluster/routing/shard_routes.txt' } task generateModulesList { diff --git a/core/src/main/java/org/apache/lucene/analysis/PrefixAnalyzer.java b/core/src/main/java/org/apache/lucene/analysis/PrefixAnalyzer.java deleted file mode 100644 index 0b7c433da7f..00000000000 --- a/core/src/main/java/org/apache/lucene/analysis/PrefixAnalyzer.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.lucene.analysis; - -import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; -import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; - -import java.io.IOException; -import java.util.Collections; -import java.util.Iterator; - -/** - * This {@link Analyzer} wraps another analyzer and adds a set of prefixes to the - * underlying TokenStream. While these prefixes are iterated the position attribute - * will not be incremented. Also each prefix will be separated from the other tokens - * by a separator character. - * NOTE: The sequence of prefixes needs to be not empty - */ -public class PrefixAnalyzer extends Analyzer { - - private final char separator; - private final Iterable prefix; - private final Analyzer analyzer; - - /** - * Create a new {@link PrefixAnalyzer}. The separator will be set to the DEFAULT_SEPARATOR. - * - * @param analyzer {@link Analyzer} to wrap - * @param prefix Single prefix - */ - public PrefixAnalyzer(Analyzer analyzer, char separator, CharSequence prefix) { - this(analyzer, separator, Collections.singleton(prefix)); - } - - /** - * Create a new {@link PrefixAnalyzer}. The separator will be set to the DEFAULT_SEPARATOR. - * - * @param analyzer {@link Analyzer} to wrap - * @param prefix {@link Iterable} of {@link CharSequence} which keeps all prefixes - */ - public PrefixAnalyzer(Analyzer analyzer, char separator, Iterable prefix) { - super(); - this.analyzer = analyzer; - this.prefix = prefix; - this.separator = separator; - } - - @Override - protected TokenStreamComponents createComponents(String fieldName) { - TokenStreamComponents createComponents = analyzer.createComponents(fieldName); - TokenStream stream = new PrefixTokenFilter(createComponents.getTokenStream(), separator, prefix); - TokenStreamComponents tsc = new TokenStreamComponents(createComponents.getTokenizer(), stream); - return tsc; - } - - /** - * The {@link PrefixTokenFilter} wraps a {@link TokenStream} and adds a set - * prefixes ahead. The position attribute will not be incremented for the prefixes. - */ - public static final class PrefixTokenFilter extends TokenFilter { - - private final char separator; - private final CharTermAttribute termAttr = addAttribute(CharTermAttribute.class); - private final PositionIncrementAttribute posAttr = addAttribute(PositionIncrementAttribute.class); - private final Iterable prefixes; - - private Iterator currentPrefix; - - /** - * Create a new {@link PrefixTokenFilter}. The separator will be set to the DEFAULT_SEPARATOR. - * - * @param input {@link TokenStream} to wrap - * @param separator Character used separate prefixes from other tokens - * @param prefixes {@link Iterable} of {@link CharSequence} which keeps all prefixes - */ - public PrefixTokenFilter(TokenStream input, char separator, Iterable prefixes) { - super(input); - this.prefixes = prefixes; - this.currentPrefix = null; - this.separator = separator; - if (prefixes == null || !prefixes.iterator().hasNext()) { - throw new IllegalArgumentException("one or more prefixes needed"); - } - } - - @Override - public boolean incrementToken() throws IOException { - if (currentPrefix != null) { - if (!currentPrefix.hasNext()) { - return input.incrementToken(); - } else { - posAttr.setPositionIncrement(0); - } - } else { - currentPrefix = prefixes.iterator(); - termAttr.setEmpty(); - posAttr.setPositionIncrement(1); - assert (currentPrefix.hasNext()) : "one or more prefixes needed"; - } - termAttr.setEmpty(); - termAttr.append(currentPrefix.next()); - termAttr.append(separator); - return true; - } - - @Override - public void reset() throws IOException { - super.reset(); - currentPrefix = null; - } - } -} diff --git a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java index dbad7e0bf72..cd5da674b8e 100644 --- a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java +++ b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java @@ -296,27 +296,6 @@ public abstract class BlendedTermQuery extends Query { return Objects.hash(classHash(), Arrays.hashCode(equalsTerms())); } - public static BlendedTermQuery booleanBlendedQuery(Term[] terms) { - return booleanBlendedQuery(terms, null); - } - - public static BlendedTermQuery booleanBlendedQuery(Term[] terms, final float[] boosts) { - return new BlendedTermQuery(terms, boosts) { - @Override - protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) { - BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder(); - for (int i = 0; i < terms.length; i++) { - Query query = new TermQuery(terms[i], ctx[i]); - if (boosts != null && boosts[i] != 1f) { - query = new BoostQuery(query, boosts[i]); - } - booleanQueryBuilder.add(query, BooleanClause.Occur.SHOULD); - } - return booleanQueryBuilder.build(); - } - }; - } - public static BlendedTermQuery commonTermsBlendedQuery(Term[] terms, final float[] boosts, final float maxTermFrequency) { return new BlendedTermQuery(terms, boosts) { @Override diff --git a/core/src/main/java/org/apache/lucene/queries/MinDocQuery.java b/core/src/main/java/org/apache/lucene/queries/MinDocQuery.java index d4f9ab72973..65c5c0f707c 100644 --- a/core/src/main/java/org/apache/lucene/queries/MinDocQuery.java +++ b/core/src/main/java/org/apache/lucene/queries/MinDocQuery.java @@ -66,46 +66,54 @@ public final class MinDocQuery extends Query { return null; } final int segmentMinDoc = Math.max(0, minDoc - context.docBase); - final DocIdSetIterator disi = new DocIdSetIterator() { - - int doc = -1; - - @Override - public int docID() { - return doc; - } - - @Override - public int nextDoc() throws IOException { - return advance(doc + 1); - } - - @Override - public int advance(int target) throws IOException { - assert target > doc; - if (doc == -1) { - // skip directly to minDoc - doc = Math.max(target, segmentMinDoc); - } else { - doc = target; - } - if (doc >= maxDoc) { - doc = NO_MORE_DOCS; - } - return doc; - } - - @Override - public long cost() { - return maxDoc - segmentMinDoc; - } - - }; + final DocIdSetIterator disi = new MinDocIterator(segmentMinDoc, maxDoc); return new ConstantScoreScorer(this, score(), disi); } }; } + static class MinDocIterator extends DocIdSetIterator { + final int segmentMinDoc; + final int maxDoc; + int doc = -1; + + MinDocIterator(int segmentMinDoc, int maxDoc) { + this.segmentMinDoc = segmentMinDoc; + this.maxDoc = maxDoc; + } + + @Override + public int docID() { + return doc; + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int advance(int target) throws IOException { + assert target > doc; + if (doc == -1) { + // skip directly to minDoc + doc = Math.max(target, segmentMinDoc); + } else { + doc = target; + } + if (doc >= maxDoc) { + doc = NO_MORE_DOCS; + } + return doc; + } + + @Override + public long cost() { + return maxDoc - segmentMinDoc; + } + } + + @Override public String toString(String field) { return "MinDocQuery(minDoc=" + minDoc + ")"; diff --git a/core/src/main/java/org/apache/lucene/queries/SearchAfterSortedDocQuery.java b/core/src/main/java/org/apache/lucene/queries/SearchAfterSortedDocQuery.java new file mode 100644 index 00000000000..b9ed2290350 --- /dev/null +++ b/core/src/main/java/org/apache/lucene/queries/SearchAfterSortedDocQuery.java @@ -0,0 +1,165 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.lucene.queries; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ConstantScoreScorer; +import org.apache.lucene.search.ConstantScoreWeight; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.EarlyTerminatingSortingCollector; +import org.apache.lucene.search.FieldComparator; +import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.search.Weight; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +/** + * A {@link Query} that only matches documents that are greater than the provided {@link FieldDoc}. + * This works only if the index is sorted according to the given search {@link Sort}. + */ +public class SearchAfterSortedDocQuery extends Query { + private final Sort sort; + private final FieldDoc after; + private final FieldComparator[] fieldComparators; + private final int[] reverseMuls; + + public SearchAfterSortedDocQuery(Sort sort, FieldDoc after) { + if (sort.getSort().length != after.fields.length) { + throw new IllegalArgumentException("after doc has " + after.fields.length + " value(s) but sort has " + + sort.getSort().length + "."); + } + this.sort = sort; + this.after = after; + int numFields = sort.getSort().length; + this.fieldComparators = new FieldComparator[numFields]; + this.reverseMuls = new int[numFields]; + for (int i = 0; i < numFields; i++) { + SortField sortField = sort.getSort()[i]; + FieldComparator fieldComparator = sortField.getComparator(1, i); + @SuppressWarnings("unchecked") + FieldComparator comparator = (FieldComparator) fieldComparator; + comparator.setTopValue(after.fields[i]); + fieldComparators[i] = fieldComparator; + reverseMuls[i] = sortField.getReverse() ? -1 : 1; + } + } + + @Override + public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { + return new ConstantScoreWeight(this, 1.0f) { + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + Sort segmentSort = context.reader().getMetaData().getSort(); + if (EarlyTerminatingSortingCollector.canEarlyTerminate(sort, segmentSort) == false) { + throw new IOException("search sort :[" + sort.getSort() + "] does not match the index sort:[" + segmentSort + "]"); + } + final int afterDoc = after.doc - context.docBase; + TopComparator comparator= getTopComparator(fieldComparators, reverseMuls, context, afterDoc); + final int maxDoc = context.reader().maxDoc(); + final int firstDoc = searchAfterDoc(comparator, 0, context.reader().maxDoc()); + if (firstDoc >= maxDoc) { + return null; + } + final DocIdSetIterator disi = new MinDocQuery.MinDocIterator(firstDoc, maxDoc); + return new ConstantScoreScorer(this, score(), disi); + } + }; + } + + @Override + public String toString(String field) { + return "SearchAfterSortedDocQuery(sort=" + sort + ", afterDoc=" + after.toString() + ")"; + } + + @Override + public boolean equals(Object other) { + return sameClassAs(other) && + equalsTo(getClass().cast(other)); + } + + private boolean equalsTo(SearchAfterSortedDocQuery other) { + return sort.equals(other.sort) && + after.doc == other.after.doc && + Double.compare(after.score, other.after.score) == 0 && + Arrays.equals(after.fields, other.after.fields); + } + + @Override + public int hashCode() { + return Objects.hash(classHash(), sort, after.doc, after.score, Arrays.hashCode(after.fields)); + } + + interface TopComparator { + boolean lessThanTop(int doc) throws IOException; + } + + static TopComparator getTopComparator(FieldComparator[] fieldComparators, + int[] reverseMuls, + LeafReaderContext leafReaderContext, + int topDoc) { + return doc -> { + // DVs use forward iterators so we recreate the iterator for each sort field + // every time we need to compare a document with the after doc. + // We could reuse the iterators when the comparison goes forward but + // this should only be called a few time per segment (binary search). + for (int i = 0; i < fieldComparators.length; i++) { + LeafFieldComparator comparator = fieldComparators[i].getLeafComparator(leafReaderContext); + int value = reverseMuls[i] * comparator.compareTop(doc); + if (value != 0) { + return value < 0; + } + } + + if (topDoc <= doc) { + return false; + } + return true; + }; + } + + /** + * Returns the first doc id greater than the provided after doc. + */ + static int searchAfterDoc(TopComparator comparator, int from, int to) throws IOException { + int low = from; + int high = to - 1; + + while (low <= high) { + int mid = (low + high) >>> 1; + if (comparator.lessThanTop(mid)) { + high = mid - 1; + } else { + low = mid + 1; + } + } + return low; + } + +} diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index 947c7cf3ccd..07f646a89d1 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -26,6 +26,7 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.FuzzyQuery; @@ -155,31 +156,20 @@ public class MapperQueryParser extends QueryParser { // if there is no match in the mappings. return new MatchNoDocsQuery("empty fields"); } - if (settings.useDisMax()) { - List queries = new ArrayList<>(); - boolean added = false; - for (String mField : fields) { - Query q = getFieldQuerySingle(mField, queryText, quoted); - if (q != null) { - added = true; - queries.add(applyBoost(mField, q)); - } + float tiebreaker = settings.useDisMax() ? settings.tieBreaker() : 1.0f; + List queries = new ArrayList<>(); + boolean added = false; + for (String mField : fields) { + Query q = getFieldQuerySingle(mField, queryText, quoted); + if (q != null) { + added = true; + queries.add(applyBoost(mField, q)); } - if (!added) { - return null; - } - return new DisjunctionMaxQuery(queries, settings.tieBreaker()); - } else { - List clauses = new ArrayList<>(); - for (String mField : fields) { - Query q = getFieldQuerySingle(mField, queryText, quoted); - if (q != null) { - clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD)); - } - } - if (clauses.isEmpty()) return null; // happens for stopwords - return getBooleanQuery(clauses); } + if (!added) { + return null; + } + return new DisjunctionMaxQuery(queries, tiebreaker); } else { return getFieldQuerySingle(field, queryText, quoted); } @@ -255,33 +245,21 @@ public class MapperQueryParser extends QueryParser { protected Query getFieldQuery(String field, String queryText, int slop) throws ParseException { Collection fields = extractMultiFields(field); if (fields != null) { - if (settings.useDisMax()) { - List queries = new ArrayList<>(); - boolean added = false; - for (String mField : fields) { - Query q = super.getFieldQuery(mField, queryText, slop); - if (q != null) { - added = true; - q = applySlop(q, slop); - queries.add(applyBoost(mField, q)); - } + float tiebreaker = settings.useDisMax() ? settings.tieBreaker() : 1.0f; + List queries = new ArrayList<>(); + boolean added = false; + for (String mField : fields) { + Query q = super.getFieldQuery(mField, queryText, slop); + if (q != null) { + added = true; + q = applySlop(q, slop); + queries.add(applyBoost(mField, q)); } - if (!added) { - return null; - } - return new DisjunctionMaxQuery(queries, settings.tieBreaker()); - } else { - List clauses = new ArrayList<>(); - for (String mField : fields) { - Query q = super.getFieldQuery(mField, queryText, slop); - if (q != null) { - q = applySlop(q, slop); - clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD)); - } - } - if (clauses.isEmpty()) return null; // happens for stopwords - return getBooleanQuery(clauses); } + if (!added) { + return null; + } + return new DisjunctionMaxQuery(queries, tiebreaker); } else { return super.getFieldQuery(field, queryText, slop); } @@ -308,31 +286,20 @@ public class MapperQueryParser extends QueryParser { return getRangeQuerySingle(fields.iterator().next(), part1, part2, startInclusive, endInclusive, context); } - if (settings.useDisMax()) { - List queries = new ArrayList<>(); - boolean added = false; - for (String mField : fields) { - Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive, context); - if (q != null) { - added = true; - queries.add(applyBoost(mField, q)); - } + float tiebreaker = settings.useDisMax() ? settings.tieBreaker() : 1.0f; + List queries = new ArrayList<>(); + boolean added = false; + for (String mField : fields) { + Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive, context); + if (q != null) { + added = true; + queries.add(applyBoost(mField, q)); } - if (!added) { - return null; - } - return new DisjunctionMaxQuery(queries, settings.tieBreaker()); - } else { - List clauses = new ArrayList<>(); - for (String mField : fields) { - Query q = getRangeQuerySingle(mField, part1, part2, startInclusive, endInclusive, context); - if (q != null) { - clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD)); - } - } - if (clauses.isEmpty()) return null; // happens for stopwords - return getBooleanQuery(clauses); } + if (!added) { + return null; + } + return new DisjunctionMaxQuery(queries, tiebreaker); } private Query getRangeQuerySingle(String field, String part1, String part2, @@ -367,30 +334,20 @@ public class MapperQueryParser extends QueryParser { if (fields.size() == 1) { return getFuzzyQuerySingle(fields.iterator().next(), termStr, minSimilarity); } - if (settings.useDisMax()) { - List queries = new ArrayList<>(); - boolean added = false; - for (String mField : fields) { - Query q = getFuzzyQuerySingle(mField, termStr, minSimilarity); - if (q != null) { - added = true; - queries.add(applyBoost(mField, q)); - } + float tiebreaker = settings.useDisMax() ? settings.tieBreaker() : 1.0f; + List queries = new ArrayList<>(); + boolean added = false; + for (String mField : fields) { + Query q = getFuzzyQuerySingle(mField, termStr, minSimilarity); + if (q != null) { + added = true; + queries.add(applyBoost(mField, q)); } - if (!added) { - return null; - } - return new DisjunctionMaxQuery(queries, settings.tieBreaker()); - } else { - List clauses = new ArrayList<>(); - for (String mField : fields) { - Query q = getFuzzyQuerySingle(mField, termStr, minSimilarity); - if (q != null) { - clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD)); - } - } - return getBooleanQuery(clauses); } + if (!added) { + return null; + } + return new DisjunctionMaxQuery(queries, tiebreaker); } else { return getFuzzyQuerySingle(field, termStr, minSimilarity); } @@ -430,31 +387,20 @@ public class MapperQueryParser extends QueryParser { if (fields.size() == 1) { return getPrefixQuerySingle(fields.iterator().next(), termStr); } - if (settings.useDisMax()) { - List queries = new ArrayList<>(); - boolean added = false; - for (String mField : fields) { - Query q = getPrefixQuerySingle(mField, termStr); - if (q != null) { - added = true; - queries.add(applyBoost(mField, q)); - } + float tiebreaker = settings.useDisMax() ? settings.tieBreaker() : 1.0f; + List queries = new ArrayList<>(); + boolean added = false; + for (String mField : fields) { + Query q = getPrefixQuerySingle(mField, termStr); + if (q != null) { + added = true; + queries.add(applyBoost(mField, q)); } - if (!added) { - return null; - } - return new DisjunctionMaxQuery(queries, settings.tieBreaker()); - } else { - List clauses = new ArrayList<>(); - for (String mField : fields) { - Query q = getPrefixQuerySingle(mField, termStr); - if (q != null) { - clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD)); - } - } - if (clauses.isEmpty()) return null; // happens for stopwords - return getBooleanQuery(clauses); } + if (!added) { + return null; + } + return new DisjunctionMaxQuery(queries, tiebreaker); } else { return getPrefixQuerySingle(field, termStr); } @@ -592,31 +538,20 @@ public class MapperQueryParser extends QueryParser { if (fields.size() == 1) { return getWildcardQuerySingle(fields.iterator().next(), termStr); } - if (settings.useDisMax()) { - List queries = new ArrayList<>(); - boolean added = false; - for (String mField : fields) { - Query q = getWildcardQuerySingle(mField, termStr); - if (q != null) { - added = true; - queries.add(applyBoost(mField, q)); - } + float tiebreaker = settings.useDisMax() ? settings.tieBreaker() : 1.0f; + List queries = new ArrayList<>(); + boolean added = false; + for (String mField : fields) { + Query q = getWildcardQuerySingle(mField, termStr); + if (q != null) { + added = true; + queries.add(applyBoost(mField, q)); } - if (!added) { - return null; - } - return new DisjunctionMaxQuery(queries, settings.tieBreaker()); - } else { - List clauses = new ArrayList<>(); - for (String mField : fields) { - Query q = getWildcardQuerySingle(mField, termStr); - if (q != null) { - clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD)); - } - } - if (clauses.isEmpty()) return null; // happens for stopwords - return getBooleanQuery(clauses); } + if (!added) { + return null; + } + return new DisjunctionMaxQuery(queries, tiebreaker); } else { return getWildcardQuerySingle(field, termStr); } @@ -656,31 +591,20 @@ public class MapperQueryParser extends QueryParser { if (fields.size() == 1) { return getRegexpQuerySingle(fields.iterator().next(), termStr); } - if (settings.useDisMax()) { - List queries = new ArrayList<>(); - boolean added = false; - for (String mField : fields) { - Query q = getRegexpQuerySingle(mField, termStr); - if (q != null) { - added = true; - queries.add(applyBoost(mField, q)); - } + float tiebreaker = settings.useDisMax() ? settings.tieBreaker() : 1.0f; + List queries = new ArrayList<>(); + boolean added = false; + for (String mField : fields) { + Query q = getRegexpQuerySingle(mField, termStr); + if (q != null) { + added = true; + queries.add(applyBoost(mField, q)); } - if (!added) { - return null; - } - return new DisjunctionMaxQuery(queries, settings.tieBreaker()); - } else { - List clauses = new ArrayList<>(); - for (String mField : fields) { - Query q = getRegexpQuerySingle(mField, termStr); - if (q != null) { - clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD)); - } - } - if (clauses.isEmpty()) return null; // happens for stopwords - return getBooleanQuery(clauses); } + if (!added) { + return null; + } + return new DisjunctionMaxQuery(queries, tiebreaker); } else { return getRegexpQuerySingle(field, termStr); } diff --git a/core/src/main/java/org/apache/lucene/search/grouping/CollapsingDocValuesSource.java b/core/src/main/java/org/apache/lucene/search/grouping/CollapsingDocValuesSource.java index e4877338902..cbcd1e3a411 100644 --- a/core/src/main/java/org/apache/lucene/search/grouping/CollapsingDocValuesSource.java +++ b/core/src/main/java/org/apache/lucene/search/grouping/CollapsingDocValuesSource.java @@ -40,7 +40,7 @@ import java.util.Collection; abstract class CollapsingDocValuesSource extends GroupSelector { protected final String field; - CollapsingDocValuesSource(String field) throws IOException { + CollapsingDocValuesSource(String field) { this.field = field; } @@ -58,7 +58,7 @@ abstract class CollapsingDocValuesSource extends GroupSelector { private long value; private boolean hasValue; - Numeric(String field) throws IOException { + Numeric(String field) { super(field); } @@ -148,7 +148,7 @@ abstract class CollapsingDocValuesSource extends GroupSelector { private SortedDocValues values; private int ord; - Keyword(String field) throws IOException { + Keyword(String field) { super(field); } diff --git a/core/src/main/java/org/apache/lucene/search/grouping/CollapsingTopDocsCollector.java b/core/src/main/java/org/apache/lucene/search/grouping/CollapsingTopDocsCollector.java index b5cb02bcd65..fedda3ead59 100644 --- a/core/src/main/java/org/apache/lucene/search/grouping/CollapsingTopDocsCollector.java +++ b/core/src/main/java/org/apache/lucene/search/grouping/CollapsingTopDocsCollector.java @@ -46,7 +46,7 @@ public final class CollapsingTopDocsCollector extends FirstPassGroupingCollec private final boolean trackMaxScore; CollapsingTopDocsCollector(GroupSelector groupSelector, String collapseField, Sort sort, - int topN, boolean trackMaxScore) throws IOException { + int topN, boolean trackMaxScore) { super(groupSelector, sort, topN); this.collapseField = collapseField; this.trackMaxScore = trackMaxScore; @@ -60,7 +60,7 @@ public final class CollapsingTopDocsCollector extends FirstPassGroupingCollec /** * Transform {@link FirstPassGroupingCollector#getTopGroups(int, boolean)} output in - * {@link CollapseTopFieldDocs}. The collapsing needs only one pass so we can create the final top docs at the end + * {@link CollapseTopFieldDocs}. The collapsing needs only one pass so we can get the final top docs at the end * of the first pass. */ public CollapseTopFieldDocs getTopDocs() throws IOException { @@ -132,10 +132,9 @@ public final class CollapsingTopDocsCollector extends FirstPassGroupingCollec * This must be non-null, ie, if you want to groupSort by relevance * use Sort.RELEVANCE. * @param topN How many top groups to keep. - * @throws IOException When I/O related errors occur */ public static CollapsingTopDocsCollector createNumeric(String collapseField, Sort sort, - int topN, boolean trackMaxScore) throws IOException { + int topN, boolean trackMaxScore) { return new CollapsingTopDocsCollector<>(new CollapsingDocValuesSource.Numeric(collapseField), collapseField, sort, topN, trackMaxScore); } @@ -152,12 +151,10 @@ public final class CollapsingTopDocsCollector extends FirstPassGroupingCollec * document per collapsed key. * This must be non-null, ie, if you want to groupSort by relevance use Sort.RELEVANCE. * @param topN How many top groups to keep. - * @throws IOException When I/O related errors occur */ public static CollapsingTopDocsCollector createKeyword(String collapseField, Sort sort, - int topN, boolean trackMaxScore) throws IOException { + int topN, boolean trackMaxScore) { return new CollapsingTopDocsCollector<>(new CollapsingDocValuesSource.Keyword(collapseField), collapseField, sort, topN, trackMaxScore); } } - diff --git a/core/src/main/java/org/apache/lucene/search/postingshighlight/CustomPassageFormatter.java b/core/src/main/java/org/apache/lucene/search/postingshighlight/CustomPassageFormatter.java deleted file mode 100644 index a33bf16dee4..00000000000 --- a/core/src/main/java/org/apache/lucene/search/postingshighlight/CustomPassageFormatter.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.lucene.search.postingshighlight; - -import org.apache.lucene.search.highlight.Snippet; -import org.apache.lucene.search.highlight.Encoder; -import org.elasticsearch.search.fetch.subphase.highlight.HighlightUtils; - -/** -Custom passage formatter that allows us to: -1) extract different snippets (instead of a single big string) together with their scores ({@link Snippet}) -2) use the {@link Encoder} implementations that are already used with the other highlighters - */ -public class CustomPassageFormatter extends PassageFormatter { - - private final String preTag; - private final String postTag; - private final Encoder encoder; - - public CustomPassageFormatter(String preTag, String postTag, Encoder encoder) { - this.preTag = preTag; - this.postTag = postTag; - this.encoder = encoder; - } - - @Override - public Snippet[] format(Passage[] passages, String content) { - Snippet[] snippets = new Snippet[passages.length]; - int pos; - for (int j = 0; j < passages.length; j++) { - Passage passage = passages[j]; - StringBuilder sb = new StringBuilder(); - pos = passage.getStartOffset(); - for (int i = 0; i < passage.getNumMatches(); i++) { - int start = passage.getMatchStarts()[i]; - int end = passage.getMatchEnds()[i]; - // its possible to have overlapping terms - if (start > pos) { - append(sb, content, pos, start); - } - if (end > pos) { - sb.append(preTag); - append(sb, content, Math.max(pos, start), end); - sb.append(postTag); - pos = end; - } - } - // its possible a "term" from the analyzer could span a sentence boundary. - append(sb, content, pos, Math.max(pos, passage.getEndOffset())); - //we remove the paragraph separator if present at the end of the snippet (we used it as separator between values) - if (sb.charAt(sb.length() - 1) == HighlightUtils.PARAGRAPH_SEPARATOR) { - sb.deleteCharAt(sb.length() - 1); - } else if (sb.charAt(sb.length() - 1) == HighlightUtils.NULL_SEPARATOR) { - sb.deleteCharAt(sb.length() - 1); - } - //and we trim the snippets too - snippets[j] = new Snippet(sb.toString().trim(), passage.getScore(), passage.getNumMatches() > 0); - } - return snippets; - } - - protected void append(StringBuilder dest, String content, int start, int end) { - dest.append(encoder.encodeText(content.substring(start, end))); - } -} diff --git a/core/src/main/java/org/apache/lucene/search/postingshighlight/CustomPostingsHighlighter.java b/core/src/main/java/org/apache/lucene/search/postingshighlight/CustomPostingsHighlighter.java deleted file mode 100644 index ac90a3e57ae..00000000000 --- a/core/src/main/java/org/apache/lucene/search/postingshighlight/CustomPostingsHighlighter.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.lucene.search.postingshighlight; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.highlight.Snippet; - -import java.io.IOException; -import java.text.BreakIterator; -import java.util.Map; - -/** - * Subclass of the {@link PostingsHighlighter} that works for a single field in a single document. - * Uses a custom {@link PassageFormatter}. Accepts field content as a constructor argument, given that loading - * is custom and can be done reading from _source field. Supports using different {@link BreakIterator} to break - * the text into fragments. Considers every distinct field value as a discrete passage for highlighting (unless - * the whole content needs to be highlighted). Supports both returning empty snippets and non highlighted snippets - * when no highlighting can be performed. - * - * The use that we make of the postings highlighter is not optimal. It would be much better to highlight - * multiple docs in a single call, as we actually lose its sequential IO. That would require to - * refactor the elasticsearch highlight api which currently works per hit. - */ -public final class CustomPostingsHighlighter extends PostingsHighlighter { - - private static final Snippet[] EMPTY_SNIPPET = new Snippet[0]; - private static final Passage[] EMPTY_PASSAGE = new Passage[0]; - - private final Analyzer analyzer; - private final CustomPassageFormatter passageFormatter; - private final BreakIterator breakIterator; - private final boolean returnNonHighlightedSnippets; - private final String fieldValue; - - /** - * Creates a new instance of {@link CustomPostingsHighlighter} - * - * @param analyzer the analyzer used for the field at index time, used for multi term queries internally - * @param passageFormatter our own {@link PassageFormatter} which generates snippets in forms of {@link Snippet} objects - * @param fieldValue the original field values as constructor argument, loaded from te _source field or the relevant stored field. - * @param returnNonHighlightedSnippets whether non highlighted snippets should be returned rather than empty snippets when - * no highlighting can be performed - */ - public CustomPostingsHighlighter(Analyzer analyzer, CustomPassageFormatter passageFormatter, String fieldValue, boolean returnNonHighlightedSnippets) { - this(analyzer, passageFormatter, null, fieldValue, returnNonHighlightedSnippets); - } - - /** - * Creates a new instance of {@link CustomPostingsHighlighter} - * - * @param analyzer the analyzer used for the field at index time, used for multi term queries internally - * @param passageFormatter our own {@link PassageFormatter} which generates snippets in forms of {@link Snippet} objects - * @param breakIterator an instance {@link BreakIterator} selected depending on the highlighting options - * @param fieldValue the original field values as constructor argument, loaded from te _source field or the relevant stored field. - * @param returnNonHighlightedSnippets whether non highlighted snippets should be returned rather than empty snippets when - * no highlighting can be performed - */ - public CustomPostingsHighlighter(Analyzer analyzer, CustomPassageFormatter passageFormatter, BreakIterator breakIterator, String fieldValue, boolean returnNonHighlightedSnippets) { - this.analyzer = analyzer; - this.passageFormatter = passageFormatter; - this.breakIterator = breakIterator; - this.returnNonHighlightedSnippets = returnNonHighlightedSnippets; - this.fieldValue = fieldValue; - } - - /** - * Highlights terms extracted from the provided query within the content of the provided field name - */ - public Snippet[] highlightField(String field, Query query, IndexSearcher searcher, int docId, int maxPassages) throws IOException { - Map fieldsAsObjects = super.highlightFieldsAsObjects(new String[]{field}, query, searcher, new int[]{docId}, new int[]{maxPassages}); - Object[] snippetObjects = fieldsAsObjects.get(field); - if (snippetObjects != null) { - //one single document at a time - assert snippetObjects.length == 1; - Object snippetObject = snippetObjects[0]; - if (snippetObject != null && snippetObject instanceof Snippet[]) { - return (Snippet[]) snippetObject; - } - } - return EMPTY_SNIPPET; - } - - @Override - protected PassageFormatter getFormatter(String field) { - return passageFormatter; - } - - @Override - protected BreakIterator getBreakIterator(String field) { - if (breakIterator == null) { - return super.getBreakIterator(field); - } - return breakIterator; - } - - /* - By default the postings highlighter returns non highlighted snippet when there are no matches. - We want to return no snippets by default, unless no_match_size is greater than 0 - */ - @Override - protected Passage[] getEmptyHighlight(String fieldName, BreakIterator bi, int maxPassages) { - if (returnNonHighlightedSnippets) { - //we want to return the first sentence of the first snippet only - return super.getEmptyHighlight(fieldName, bi, 1); - } - return EMPTY_PASSAGE; - } - - @Override - protected Analyzer getIndexAnalyzer(String field) { - return analyzer; - } - - @Override - protected String[][] loadFieldValues(IndexSearcher searcher, String[] fields, int[] docids, int maxLength) throws IOException { - //we only highlight one field, one document at a time - return new String[][]{new String[]{fieldValue}}; - } -} diff --git a/core/src/main/java/org/apache/lucene/search/uhighlight/CustomPassageFormatter.java b/core/src/main/java/org/apache/lucene/search/uhighlight/CustomPassageFormatter.java index 7a34a805db6..52eee559c68 100644 --- a/core/src/main/java/org/apache/lucene/search/uhighlight/CustomPassageFormatter.java +++ b/core/src/main/java/org/apache/lucene/search/uhighlight/CustomPassageFormatter.java @@ -20,7 +20,6 @@ package org.apache.lucene.search.uhighlight; import org.apache.lucene.search.highlight.Encoder; -import org.apache.lucene.search.highlight.Snippet; import org.elasticsearch.search.fetch.subphase.highlight.HighlightUtils; /** diff --git a/core/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java b/core/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java index 4a20fb0478f..ebc13298202 100644 --- a/core/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java +++ b/core/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java @@ -27,7 +27,6 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.highlight.Snippet; import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.search.spans.SpanNearQuery; import org.apache.lucene.search.spans.SpanOrQuery; @@ -182,13 +181,16 @@ public class CustomUnifiedHighlighter extends UnifiedHighlighter { positionSpanQueries[i] = innerQueries[0]; } } + + if (positionSpanQueries.length == 1) { + return Collections.singletonList(positionSpanQueries[0]); + } // sum position increments beyond 1 int positionGaps = 0; if (positions.length >= 2) { // positions are in increasing order. max(0,...) is just a safeguard. positionGaps = Math.max(0, positions[positions.length - 1] - positions[0] - positions.length + 1); } - //if original slop is 0 then require inOrder boolean inorder = (mpq.getSlop() == 0); return Collections.singletonList(new SpanNearQuery(positionSpanQueries, diff --git a/core/src/main/java/org/apache/lucene/search/highlight/Snippet.java b/core/src/main/java/org/apache/lucene/search/uhighlight/Snippet.java similarity index 90% rename from core/src/main/java/org/apache/lucene/search/highlight/Snippet.java rename to core/src/main/java/org/apache/lucene/search/uhighlight/Snippet.java index 81a3d406ea3..b7490c55fef 100644 --- a/core/src/main/java/org/apache/lucene/search/highlight/Snippet.java +++ b/core/src/main/java/org/apache/lucene/search/uhighlight/Snippet.java @@ -17,11 +17,11 @@ * under the License. */ -package org.apache.lucene.search.highlight; +package org.apache.lucene.search.uhighlight; /** * Represents a scored highlighted snippet. - * It's our own arbitrary object that we get back from the postings highlighter when highlighting a document. + * It's our own arbitrary object that we get back from the unified highlighter when highlighting a document. * Every snippet contains its formatted text and its score. * The score is needed in case we want to sort snippets by score, they get sorted by position in the text by default. */ diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index b020dc6ea1f..7c20ed7d2c4 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -765,8 +765,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte org.elasticsearch.search.SearchContextMissingException::new, 24, UNKNOWN_VERSION_ADDED), GENERAL_SCRIPT_EXCEPTION(org.elasticsearch.script.GeneralScriptException.class, org.elasticsearch.script.GeneralScriptException::new, 25, UNKNOWN_VERSION_ADDED), - BATCH_OPERATION_EXCEPTION(org.elasticsearch.index.shard.TranslogRecoveryPerformer.BatchOperationException.class, - org.elasticsearch.index.shard.TranslogRecoveryPerformer.BatchOperationException::new, 26, UNKNOWN_VERSION_ADDED), + // 26 was BatchOperationException SNAPSHOT_CREATION_EXCEPTION(org.elasticsearch.snapshots.SnapshotCreationException.class, org.elasticsearch.snapshots.SnapshotCreationException::new, 27, UNKNOWN_VERSION_ADDED), DELETE_FAILED_ENGINE_EXCEPTION(org.elasticsearch.index.engine.DeleteFailedEngineException.class, // deprecated in 6.0, remove in 7.0 @@ -830,8 +829,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte org.elasticsearch.transport.SendRequestTransportException::new, 58, UNKNOWN_VERSION_ADDED), ES_REJECTED_EXECUTION_EXCEPTION(org.elasticsearch.common.util.concurrent.EsRejectedExecutionException.class, org.elasticsearch.common.util.concurrent.EsRejectedExecutionException::new, 59, UNKNOWN_VERSION_ADDED), - EARLY_TERMINATION_EXCEPTION(org.elasticsearch.common.lucene.Lucene.EarlyTerminationException.class, - org.elasticsearch.common.lucene.Lucene.EarlyTerminationException::new, 60, UNKNOWN_VERSION_ADDED), + // 60 used to be for EarlyTerminationException // 61 used to be for RoutingValidationException NOT_SERIALIZABLE_EXCEPTION_WRAPPER(org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper.class, org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper::new, 62, UNKNOWN_VERSION_ADDED), diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index c6939756816..288a52a0a1f 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -72,19 +72,26 @@ public class Version implements Comparable { public static final Version V_5_3_1 = new Version(V_5_3_1_ID, org.apache.lucene.util.Version.LUCENE_6_4_2); public static final int V_5_3_2_ID = 5030299; public static final Version V_5_3_2 = new Version(V_5_3_2_ID, org.apache.lucene.util.Version.LUCENE_6_4_2); + public static final int V_5_3_3_ID = 5030399; + public static final Version V_5_3_3 = new Version(V_5_3_3_ID, org.apache.lucene.util.Version.LUCENE_6_4_2); public static final int V_5_4_0_ID = 5040099; public static final Version V_5_4_0 = new Version(V_5_4_0_ID, org.apache.lucene.util.Version.LUCENE_6_5_0); public static final int V_5_4_1_ID = 5040199; public static final Version V_5_4_1 = new Version(V_5_4_1_ID, org.apache.lucene.util.Version.LUCENE_6_5_1); public static final int V_5_5_0_ID = 5050099; - public static final Version V_5_5_0 = new Version(V_5_5_0_ID, org.apache.lucene.util.Version.LUCENE_6_5_1); + public static final Version V_5_5_0 = new Version(V_5_5_0_ID, org.apache.lucene.util.Version.LUCENE_6_6_0); + public static final int V_5_6_0_ID = 5060099; + public static final Version V_5_6_0 = new Version(V_5_6_0_ID, org.apache.lucene.util.Version.LUCENE_6_6_0); public static final int V_6_0_0_alpha1_ID = 6000001; public static final Version V_6_0_0_alpha1 = new Version(V_6_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_7_0_0); public static final int V_6_0_0_alpha2_ID = 6000002; public static final Version V_6_0_0_alpha2 = new Version(V_6_0_0_alpha2_ID, org.apache.lucene.util.Version.LUCENE_7_0_0); - public static final Version CURRENT = V_6_0_0_alpha2; + public static final int V_6_0_0_alpha3_ID = 6000003; + public static final Version V_6_0_0_alpha3 = + new Version(V_6_0_0_alpha3_ID, org.apache.lucene.util.Version.LUCENE_7_0_0); + public static final Version CURRENT = V_6_0_0_alpha3; // unreleased versions must be added to the above list with the suffix _UNRELEASED (with the exception of CURRENT) @@ -99,16 +106,22 @@ public class Version implements Comparable { public static Version fromId(int id) { switch (id) { + case V_6_0_0_alpha3_ID: + return V_6_0_0_alpha3; case V_6_0_0_alpha2_ID: return V_6_0_0_alpha2; case V_6_0_0_alpha1_ID: return V_6_0_0_alpha1; + case V_5_6_0_ID: + return V_5_6_0; case V_5_5_0_ID: return V_5_5_0; case V_5_4_1_ID: return V_5_4_1; case V_5_4_0_ID: return V_5_4_0; + case V_5_3_3_ID: + return V_5_3_3; case V_5_3_2_ID: return V_5_3_2; case V_5_3_1_ID: diff --git a/core/src/main/java/org/elasticsearch/action/ActionModule.java b/core/src/main/java/org/elasticsearch/action/ActionModule.java index d52175c9eb4..302c387cc13 100644 --- a/core/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/core/src/main/java/org/elasticsearch/action/ActionModule.java @@ -37,6 +37,8 @@ import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; import org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; +import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageAction; +import org.elasticsearch.action.admin.cluster.node.usage.TransportNodesUsageAction; import org.elasticsearch.action.admin.cluster.remote.RemoteInfoAction; import org.elasticsearch.action.admin.cluster.remote.TransportRemoteInfoAction; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryAction; @@ -234,6 +236,7 @@ import org.elasticsearch.rest.action.admin.cluster.RestListTasksAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesHotThreadsAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesInfoAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesStatsAction; +import org.elasticsearch.rest.action.admin.cluster.RestNodesUsageAction; import org.elasticsearch.rest.action.admin.cluster.RestPendingClusterTasksAction; import org.elasticsearch.rest.action.admin.cluster.RestPutRepositoryAction; import org.elasticsearch.rest.action.admin.cluster.RestPutStoredScriptAction; @@ -250,6 +253,9 @@ import org.elasticsearch.rest.action.admin.indices.RestDeleteIndexTemplateAction import org.elasticsearch.rest.action.admin.indices.RestFlushAction; import org.elasticsearch.rest.action.admin.indices.RestForceMergeAction; import org.elasticsearch.rest.action.admin.indices.RestGetAliasesAction; +import org.elasticsearch.rest.action.admin.indices.RestGetAllAliasesAction; +import org.elasticsearch.rest.action.admin.indices.RestGetAllMappingsAction; +import org.elasticsearch.rest.action.admin.indices.RestGetAllSettingsAction; import org.elasticsearch.rest.action.admin.indices.RestGetFieldMappingAction; import org.elasticsearch.rest.action.admin.indices.RestGetIndexTemplateAction; import org.elasticsearch.rest.action.admin.indices.RestGetIndicesAction; @@ -269,7 +275,6 @@ import org.elasticsearch.rest.action.admin.indices.RestRefreshAction; import org.elasticsearch.rest.action.admin.indices.RestRolloverIndexAction; import org.elasticsearch.rest.action.admin.indices.RestShrinkIndexAction; import org.elasticsearch.rest.action.admin.indices.RestSyncedFlushAction; -import org.elasticsearch.rest.action.admin.indices.RestTypesExistsAction; import org.elasticsearch.rest.action.admin.indices.RestUpdateSettingsAction; import org.elasticsearch.rest.action.admin.indices.RestUpgradeAction; import org.elasticsearch.rest.action.admin.indices.RestValidateQueryAction; @@ -310,6 +315,7 @@ import org.elasticsearch.rest.action.search.RestMultiSearchAction; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.rest.action.search.RestSearchScrollAction; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.usage.UsageService; import java.util.ArrayList; import java.util.List; @@ -346,7 +352,7 @@ public class ActionModule extends AbstractModule { public ActionModule(boolean transportClient, Settings settings, IndexNameExpressionResolver indexNameExpressionResolver, IndexScopedSettings indexScopedSettings, ClusterSettings clusterSettings, SettingsFilter settingsFilter, ThreadPool threadPool, List actionPlugins, NodeClient nodeClient, - CircuitBreakerService circuitBreakerService) { + CircuitBreakerService circuitBreakerService, UsageService usageService) { this.transportClient = transportClient; this.settings = settings; this.indexNameExpressionResolver = indexNameExpressionResolver; @@ -373,7 +379,7 @@ public class ActionModule extends AbstractModule { if (transportClient) { restController = null; } else { - restController = new RestController(settings, headers, restWrapper, nodeClient, circuitBreakerService); + restController = new RestController(settings, headers, restWrapper, nodeClient, circuitBreakerService, usageService); } } @@ -405,6 +411,7 @@ public class ActionModule extends AbstractModule { actions.register(NodesInfoAction.INSTANCE, TransportNodesInfoAction.class); actions.register(RemoteInfoAction.INSTANCE, TransportRemoteInfoAction.class); actions.register(NodesStatsAction.INSTANCE, TransportNodesStatsAction.class); + actions.register(NodesUsageAction.INSTANCE, TransportNodesUsageAction.class); actions.register(NodesHotThreadsAction.INSTANCE, TransportNodesHotThreadsAction.class); actions.register(ListTasksAction.INSTANCE, TransportListTasksAction.class); actions.register(GetTaskAction.INSTANCE, TransportGetTaskAction.class); @@ -515,6 +522,7 @@ public class ActionModule extends AbstractModule { registerHandler.accept(new RestNodesInfoAction(settings, restController, settingsFilter)); registerHandler.accept(new RestRemoteClusterInfoAction(settings, restController)); registerHandler.accept(new RestNodesStatsAction(settings, restController)); + registerHandler.accept(new RestNodesUsageAction(settings, restController)); registerHandler.accept(new RestNodesHotThreadsAction(settings, restController)); registerHandler.accept(new RestClusterAllocationExplainAction(settings, restController)); registerHandler.accept(new RestClusterStatsAction(settings, restController)); @@ -535,7 +543,9 @@ public class ActionModule extends AbstractModule { registerHandler.accept(new RestDeleteSnapshotAction(settings, restController)); registerHandler.accept(new RestSnapshotsStatusAction(settings, restController)); - registerHandler.accept(new RestTypesExistsAction(settings, restController)); + registerHandler.accept(new RestGetAllAliasesAction(settings, restController)); + registerHandler.accept(new RestGetAllMappingsAction(settings, restController)); + registerHandler.accept(new RestGetAllSettingsAction(settings, restController, indexScopedSettings, settingsFilter)); registerHandler.accept(new RestGetIndicesAction(settings, restController, indexScopedSettings, settingsFilter)); registerHandler.accept(new RestIndicesStatsAction(settings, restController)); registerHandler.accept(new RestIndicesSegmentsAction(settings, restController)); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java index da45a3e4027..7b43d1c259b 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/TransportNodesHotThreadsAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.TransportNodesAction; -import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; @@ -82,11 +81,6 @@ public class TransportNodesHotThreadsAction extends TransportNodesAction listener) { sendSetBanRequest(nodes, BanParentTaskRequest.createSetBanParentTaskRequest(new TaskId(clusterService.localNode().getId(), task.getId()), reason), diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java index 889628e373a..eb8a6ad4ca5 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java @@ -90,8 +90,4 @@ public class TransportListTasksAction extends TransportTasksAction restUsage; + + NodeUsage() { + } + + public static NodeUsage readNodeStats(StreamInput in) throws IOException { + NodeUsage nodeInfo = new NodeUsage(); + nodeInfo.readFrom(in); + return nodeInfo; + } + + /** + * @param node + * the node these statistics were collected from + * @param timestamp + * the timestamp for when these statistics were collected + * @param sinceTime + * the timestamp for when the collection of these statistics + * started + * @param restUsage + * a map containing the counts of the number of times each REST + * endpoint has been called + */ + public NodeUsage(DiscoveryNode node, long timestamp, long sinceTime, Map restUsage) { + super(node); + this.timestamp = timestamp; + this.sinceTime = sinceTime; + this.restUsage = restUsage; + } + + /** + * @return the timestamp for when these statistics were collected + */ + public long getTimestamp() { + return timestamp; + } + + /** + * @return the timestamp for when the collection of these statistics started + */ + public long getSinceTime() { + return sinceTime; + } + + /** + * @return a map containing the counts of the number of times each REST + * endpoint has been called + */ + public Map getRestUsage() { + return restUsage; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("since", sinceTime); + if (restUsage != null) { + builder.field("rest_actions"); + builder.map(restUsage); + } + return builder; + } + + @SuppressWarnings("unchecked") + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + timestamp = in.readLong(); + sinceTime = in.readLong(); + restUsage = (Map) in.readGenericValue(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeLong(timestamp); + out.writeLong(sinceTime); + out.writeGenericValue(restUsage); + } + +} diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/DummyCharFilterFactory.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageAction.java similarity index 54% rename from core/src/test/java/org/elasticsearch/indices/analysis/DummyCharFilterFactory.java rename to core/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageAction.java index 8c5896e59ec..358659e5f61 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/DummyCharFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageAction.java @@ -17,20 +17,28 @@ * under the License. */ -package org.elasticsearch.indices.analysis; +package org.elasticsearch.action.admin.cluster.node.usage; -import org.elasticsearch.index.analysis.CharFilterFactory; +import org.elasticsearch.action.Action; +import org.elasticsearch.client.ElasticsearchClient; -import java.io.Reader; +public class NodesUsageAction extends Action { -public class DummyCharFilterFactory implements CharFilterFactory { - @Override - public String name() { - return "dummy_char_filter"; + public static final NodesUsageAction INSTANCE = new NodesUsageAction(); + public static final String NAME = "cluster:monitor/nodes/usage"; + + protected NodesUsageAction() { + super(NAME); } @Override - public Reader create(Reader reader) { - return null; + public NodesUsageRequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new NodesUsageRequestBuilder(client, this); } -} + + @Override + public NodesUsageResponse newResponse() { + return new NodesUsageResponse(); + } + +} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageRequest.java new file mode 100644 index 00000000000..c4e80494aed --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageRequest.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.usage; + +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +public class NodesUsageRequest extends BaseNodesRequest { + + private boolean restActions; + + public NodesUsageRequest() { + super(); + } + + /** + * Get usage from nodes based on the nodes ids specified. If none are + * passed, usage for all nodes will be returned. + */ + public NodesUsageRequest(String... nodesIds) { + super(nodesIds); + } + + /** + * Sets all the request flags. + */ + public NodesUsageRequest all() { + this.restActions = true; + return this; + } + + /** + * Clears all the request flags. + */ + public NodesUsageRequest clear() { + this.restActions = false; + return this; + } + + /** + * Should the node rest actions usage statistics be returned. + */ + public boolean restActions() { + return this.restActions; + } + + /** + * Should the node rest actions usage statistics be returned. + */ + public NodesUsageRequest restActions(boolean restActions) { + this.restActions = restActions; + return this; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + this.restActions = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeBoolean(restActions); + } +} \ No newline at end of file diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageRequestBuilder.java similarity index 57% rename from core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalyzerProvider.java rename to core/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageRequestBuilder.java index 68beb817d70..76d14556b9c 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalyzerProvider.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageRequestBuilder.java @@ -17,24 +17,18 @@ * under the License. */ -package org.elasticsearch.indices.analysis; +package org.elasticsearch.action.admin.cluster.node.usage; -import org.elasticsearch.index.analysis.AnalyzerProvider; -import org.elasticsearch.index.analysis.AnalyzerScope; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.support.nodes.NodesOperationRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; -public class DummyAnalyzerProvider implements AnalyzerProvider { - @Override - public String name() { - return "dummy"; +public class NodesUsageRequestBuilder + extends NodesOperationRequestBuilder { + + public NodesUsageRequestBuilder(ElasticsearchClient client, + Action action) { + super(client, action, new NodesUsageRequest()); } - @Override - public AnalyzerScope scope() { - return AnalyzerScope.INDICES; - } - - @Override - public DummyAnalyzer get() { - return new DummyAnalyzer(); - } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageResponse.java new file mode 100644 index 00000000000..ff88145021c --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageResponse.java @@ -0,0 +1,85 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.usage; + +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; + +import java.io.IOException; +import java.util.List; + +/** + * The response for the nodes usage api which contains the individual usage + * statistics for all nodes queried. + */ +public class NodesUsageResponse extends BaseNodesResponse implements ToXContent { + + NodesUsageResponse() { + } + + public NodesUsageResponse(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); + } + + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readList(NodeUsage::readNodeStats); + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeStreamableList(nodes); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject("nodes"); + for (NodeUsage nodeUsage : getNodes()) { + builder.startObject(nodeUsage.getNode().getId()); + builder.field("timestamp", nodeUsage.getTimestamp()); + nodeUsage.toXContent(builder, params); + + builder.endObject(); + } + builder.endObject(); + + return builder; + } + + @Override + public String toString() { + try { + XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + builder.startObject(); + toXContent(builder, EMPTY_PARAMS); + builder.endObject(); + return builder.string(); + } catch (IOException e) { + return "{ \"error\" : \"" + e.getMessage() + "\"}"; + } + } + +} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/TransportNodesUsageAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/TransportNodesUsageAction.java new file mode 100644 index 00000000000..c87e0b9942d --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/TransportNodesUsageAction.java @@ -0,0 +1,99 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.usage; + +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.usage.UsageService; + +import java.io.IOException; +import java.util.List; + +public class TransportNodesUsageAction + extends TransportNodesAction { + + private UsageService usageService; + + @Inject + public TransportNodesUsageAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, UsageService usageService) { + super(settings, NodesUsageAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, + NodesUsageRequest::new, NodeUsageRequest::new, ThreadPool.Names.MANAGEMENT, NodeUsage.class); + this.usageService = usageService; + } + + @Override + protected NodesUsageResponse newResponse(NodesUsageRequest request, List responses, List failures) { + return new NodesUsageResponse(clusterService.getClusterName(), responses, failures); + } + + @Override + protected NodeUsageRequest newNodeRequest(String nodeId, NodesUsageRequest request) { + return new NodeUsageRequest(nodeId, request); + } + + @Override + protected NodeUsage newNodeResponse() { + return new NodeUsage(); + } + + @Override + protected NodeUsage nodeOperation(NodeUsageRequest nodeUsageRequest) { + NodesUsageRequest request = nodeUsageRequest.request; + return usageService.getUsageStats(clusterService.localNode(), request.restActions()); + } + + public static class NodeUsageRequest extends BaseNodeRequest { + + NodesUsageRequest request; + + public NodeUsageRequest() { + } + + NodeUsageRequest(String nodeId, NodesUsageRequest request) { + super(nodeId); + this.request = request; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + request = new NodesUsageRequest(); + request.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + request.writeTo(out); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java index 71a709f0b5b..872793f6ef2 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/TransportNodesSnapshotsStatus.java @@ -122,11 +122,6 @@ public class TransportNodesSnapshotsStatus extends TransportNodesAction { private Snapshot[] snapshots; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index d77bc599258..57eeb2d5eb4 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -118,11 +118,6 @@ public class TransportClusterStatsAction extends TransportNodesAction { private List allAliasActions = new ArrayList<>(); - //indices options that require every specified index to exist, expand wildcards only to open indices and - //don't allow that no indices are resolved from wildcard expressions - private static final IndicesOptions INDICES_OPTIONS = IndicesOptions.fromOptions(false, false, true, false); + // indices options that require every specified index to exist, expand wildcards only to open + // indices, don't allow that no indices are resolved from wildcard expressions and resolve the + // expressions only against indices + private static final IndicesOptions INDICES_OPTIONS = IndicesOptions.fromOptions(false, false, true, false, true, false, true); public IndicesAliasesRequest() { diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index d7e299b1cf1..11566378085 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -39,6 +39,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.settings.Settings; @@ -179,7 +180,8 @@ public class TransportAnalyzeAction extends TransportSingleShardAction tokenizerFactory = parseTokenizerFactory(request, indexAnalyzers, + analysisRegistry, environment); TokenFilterFactory[] tokenFilterFactories = new TokenFilterFactory[0]; tokenFilterFactories = getTokenFilterFactories(request, indexSettings, analysisRegistry, environment, tokenFilterFactories); @@ -187,7 +189,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction parseTokenizerFactory(AnalyzeRequest request, IndexAnalyzers indexAnalzyers, AnalysisRegistry analysisRegistry, Environment environment) throws IOException { + String name; TokenizerFactory tokenizerFactory; final AnalyzeRequest.NameOrDefinition tokenizer = request.tokenizer(); // parse anonymous settings @@ -568,6 +572,7 @@ public class TransportAnalyzeAction extends TransportSingleShardAction tokenizerFactoryFactory; @@ -576,18 +581,20 @@ public class TransportAnalyzeAction extends TransportSingleShardAction(name, tokenizerFactory); } private static IndexSettings getNaIndexSettings(Settings settings) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java index 35dd53276cd..7d948e7137e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.create; +import org.elasticsearch.Version; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -32,14 +33,16 @@ import java.io.IOException; public class CreateIndexResponse extends AcknowledgedResponse { private boolean shardsAcked; + private String index; protected CreateIndexResponse() { } - protected CreateIndexResponse(boolean acknowledged, boolean shardsAcked) { + protected CreateIndexResponse(boolean acknowledged, boolean shardsAcked, String index) { super(acknowledged); assert acknowledged || shardsAcked == false; // if its not acknowledged, then shards acked should be false too this.shardsAcked = shardsAcked; + this.index = index; } @Override @@ -47,6 +50,9 @@ public class CreateIndexResponse extends AcknowledgedResponse { super.readFrom(in); readAcknowledged(in); shardsAcked = in.readBoolean(); + if (in.getVersion().onOrAfter(Version.V_5_6_0)) { + index = in.readString(); + } } @Override @@ -54,6 +60,9 @@ public class CreateIndexResponse extends AcknowledgedResponse { super.writeTo(out); writeAcknowledged(out); out.writeBoolean(shardsAcked); + if (out.getVersion().onOrAfter(Version.V_5_6_0)) { + out.writeString(index); + } } /** @@ -65,7 +74,12 @@ public class CreateIndexResponse extends AcknowledgedResponse { return shardsAcked; } + public String index() { + return index; + } + public void addCustomFields(XContentBuilder builder) throws IOException { builder.field("shards_acknowledged", isShardsAcked()); + builder.field("index", index()); } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java index 354dcf23873..0ac8d02f977 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexAction.java @@ -79,7 +79,7 @@ public class TransportCreateIndexAction extends TransportMasterNodeAction - listener.onResponse(new CreateIndexResponse(response.isAcknowledged(), response.isShardsAcked())), + listener.onResponse(new CreateIndexResponse(response.isAcknowledged(), response.isShardsAcked(), indexName)), listener::onFailure)); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index 3796529b859..2abe0dad74e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -119,7 +119,7 @@ public class TransportRolloverAction extends TransportMasterNodeAction conditionResults = evaluateConditions(rolloverRequest.getConditions(), - statsResponse.getTotal().getDocs(), metaData.index(sourceIndexName)); + metaData.index(sourceIndexName), statsResponse); if (rolloverRequest.isDryRun()) { listener.onResponse( @@ -201,6 +201,11 @@ public class TransportRolloverAction extends TransportMasterNodeAction evaluateConditions(final Set conditions, final IndexMetaData metaData, + final IndicesStatsResponse statsResponse) { + return evaluateConditions(conditions, statsResponse.getPrimaries().getDocs(), metaData); + } + static void validate(MetaData metaData, RolloverRequest request) { final AliasOrIndex aliasOrIndex = metaData.getAliasAndIndexLookup().get(request.getAlias()); if (aliasOrIndex == null) { diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkResponse.java index e7ad0afe3aa..0c5149f6bf3 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkResponse.java @@ -25,7 +25,7 @@ public final class ShrinkResponse extends CreateIndexResponse { ShrinkResponse() { } - ShrinkResponse(boolean acknowledged, boolean shardsAcked) { - super(acknowledged, shardsAcked); + ShrinkResponse(boolean acknowledged, boolean shardsAcked, String index) { + super(acknowledged, shardsAcked, index); } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportShrinkAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportShrinkAction.java index 8c482eac10c..2555299709c 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportShrinkAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportShrinkAction.java @@ -91,8 +91,13 @@ public class TransportShrinkAction extends TransportMasterNodeAction - listener.onResponse(new ShrinkResponse(response.isAcknowledged(), response.isShardsAcked())), listener::onFailure)); + createIndexService.createIndex( + updateRequest, + ActionListener.wrap(response -> + listener.onResponse(new ShrinkResponse(response.isAcknowledged(), response.isShardsAcked(), updateRequest.index())), + listener::onFailure + ) + ); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index b60728b9d45..5836da3b8c4 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -41,7 +41,6 @@ import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; @@ -300,10 +299,16 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques if (token == null) { continue; } - assert token == XContentParser.Token.START_OBJECT; + if (token != XContentParser.Token.START_OBJECT) { + throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected " + + XContentParser.Token.START_OBJECT + " but found [" + token + "]"); + } // Move to FIELD_NAME, that's the action token = parser.nextToken(); - assert token == XContentParser.Token.FIELD_NAME; + if (token != XContentParser.Token.FIELD_NAME) { + throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected " + + XContentParser.Token.FIELD_NAME + " but found [" + token + "]"); + } String action = parser.currentName(); String index = defaultIndex; diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java index 8e0b48143dc..30bf2dc1477 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkResponse.java @@ -76,13 +76,6 @@ public class BulkResponse extends ActionResponse implements Iterable wrapActionListenerIfNeeded(long ingestTookInMillis, ActionListener actionListener) { if (itemResponses.isEmpty()) { return ActionListener.wrap( - response -> actionListener.onResponse( - new BulkResponse(response.getItems(), response.getTookInMillis(), ingestTookInMillis)), - actionListener::onFailure); + response -> actionListener.onResponse(new BulkResponse(response.getItems(), + response.getTook().getMillis(), ingestTookInMillis)), + actionListener::onFailure); } else { return new IngestBulkResponseListener(ingestTookInMillis, originalSlots, itemResponses, actionListener); } @@ -610,7 +610,9 @@ public class TransportBulkAction extends HandledTransportAction impleme public String toString() { return "delete {[" + index + "][" + type + "][" + id + "]}"; } + + /** + * Override this method from ReplicationAction, this is where we are storing our state in the request object (which we really shouldn't + * do). Once the transport client goes away we can move away from making this available, but in the meantime this is dangerous to set or + * use because the DeleteRequest object will always be wrapped in a bulk request envelope, which is where this *should* be set. + */ + @Override + public long primaryTerm() { + throw new UnsupportedOperationException("primary term should never be set on DeleteRequest"); + } + + /** + * Override this method from ReplicationAction, this is where we are storing our state in the request object (which we really shouldn't + * do). Once the transport client goes away we can move away from making this available, but in the meantime this is dangerous to set or + * use because the DeleteRequest object will always be wrapped in a bulk request envelope, which is where this *should* be set. + */ + @Override + public void primaryTerm(long term) { + throw new UnsupportedOperationException("primary term should never be set on DeleteRequest"); + } + + /** + * Override this method from ReplicationAction, this is where we are storing our state in the request object (which we really shouldn't + * do). Once the transport client goes away we can move away from making this available, but in the meantime this is dangerous to set or + * use because the DeleteRequest object will always be wrapped in a bulk request envelope, which is where this *should* be set. + */ + @Override + public DeleteRequest setShardId(ShardId shardId) { + throw new UnsupportedOperationException("shard id should never be set on DeleteRequest"); + } } diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 552780e7395..5667bf5f9d5 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -44,6 +44,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.shard.ShardId; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -608,4 +609,35 @@ public class IndexRequest extends ReplicatedWriteRequest implement public long getAutoGeneratedTimestamp() { return autoGeneratedTimestamp; } + + /** + * Override this method from ReplicationAction, this is where we are storing our state in the request object (which we really shouldn't + * do). Once the transport client goes away we can move away from making this available, but in the meantime this is dangerous to set or + * use because the IndexRequest object will always be wrapped in a bulk request envelope, which is where this *should* be set. + */ + @Override + public long primaryTerm() { + throw new UnsupportedOperationException("primary term should never be set on IndexRequest"); + } + + /** + * Override this method from ReplicationAction, this is where we are storing our state in the request object (which we really shouldn't + * do). Once the transport client goes away we can move away from making this available, but in the meantime this is dangerous to set or + * use because the IndexRequest object will always be wrapped in a bulk request envelope, which is where this *should* be set. + */ + @Override + public void primaryTerm(long term) { + throw new UnsupportedOperationException("primary term should never be set on IndexRequest"); + } + + /** + * Override this method from ReplicationAction, this is where we are storing our state in the request object (which we really shouldn't + * do). Once the transport client goes away we can move away from making this available, but in the meantime this is dangerous to set or + * use because the IndexRequest object will always be wrapped in a bulk request envelope, which is where this *should* be set. + */ + @Override + public IndexRequest setShardId(ShardId shardId) { + throw new UnsupportedOperationException("shard id should never be set on IndexRequest"); + } + } diff --git a/core/src/main/java/org/elasticsearch/action/search/AbstractAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/AbstractAsyncAction.java deleted file mode 100644 index 96db19d5472..00000000000 --- a/core/src/main/java/org/elasticsearch/action/search/AbstractAsyncAction.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.search; - -/** - * Base implementation for an async action. - */ -abstract class AbstractAsyncAction { - - private final long startTime; - - protected AbstractAsyncAction() { this(System.currentTimeMillis());} - - protected AbstractAsyncAction(long startTime) { - this.startTime = startTime; - } - - /** - * Return the time when the action started. - */ - protected final long startTime() { - return startTime; - } - - /** - * Builds how long it took to execute the search. - */ - protected final long buildTookInMillis() { - // protect ourselves against time going backwards - // negative values don't make sense and we want to be able to serialize that thing as a vLong - return Math.max(1, System.currentTimeMillis() - startTime); - } - - abstract void start(); -} diff --git a/core/src/main/java/org/elasticsearch/action/search/ClearScrollController.java b/core/src/main/java/org/elasticsearch/action/search/ClearScrollController.java new file mode 100644 index 00000000000..ac708d9b6b0 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/search/ClearScrollController.java @@ -0,0 +1,143 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.search; + +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportResponse; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.elasticsearch.action.search.TransportSearchHelper.parseScrollId; + +final class ClearScrollController implements Runnable { + private final DiscoveryNodes nodes; + private final SearchTransportService searchTransportService; + private final CountDown expectedOps; + private final ActionListener listener; + private final AtomicBoolean hasFailed = new AtomicBoolean(false); + private final AtomicInteger freedSearchContexts = new AtomicInteger(0); + private final Logger logger; + private final Runnable runner; + + ClearScrollController(ClearScrollRequest request, ActionListener listener, DiscoveryNodes nodes, Logger logger, + SearchTransportService searchTransportService) { + this.nodes = nodes; + this.logger = logger; + this.searchTransportService = searchTransportService; + this.listener = listener; + List scrollIds = request.getScrollIds(); + final int expectedOps; + if (scrollIds.size() == 1 && "_all".equals(scrollIds.get(0))) { + expectedOps = nodes.getSize(); + runner = this::cleanAllScrolls; + } else { + List parsedScrollIds = new ArrayList<>(); + for (String parsedScrollId : request.getScrollIds()) { + ScrollIdForNode[] context = parseScrollId(parsedScrollId).getContext(); + for (ScrollIdForNode id : context) { + parsedScrollIds.add(id); + } + } + if (parsedScrollIds.isEmpty()) { + expectedOps = 0; + runner = () -> listener.onResponse(new ClearScrollResponse(true, 0)); + } else { + expectedOps = parsedScrollIds.size(); + runner = () -> cleanScrollIds(parsedScrollIds); + } + } + this.expectedOps = new CountDown(expectedOps); + + } + + @Override + public void run() { + runner.run(); + } + + void cleanAllScrolls() { + for (final DiscoveryNode node : nodes) { + try { + Transport.Connection connection = searchTransportService.getConnection(null, node); + searchTransportService.sendClearAllScrollContexts(connection, new ActionListener() { + @Override + public void onResponse(TransportResponse response) { + onFreedContext(true); + } + + @Override + public void onFailure(Exception e) { + onFailedFreedContext(e, node); + } + }); + } catch (Exception e) { + onFailedFreedContext(e, node); + } + } + } + + void cleanScrollIds(List parsedScrollIds) { + SearchScrollAsyncAction.collectNodesAndRun(parsedScrollIds, nodes, searchTransportService, ActionListener.wrap( + lookup -> { + for (ScrollIdForNode target : parsedScrollIds) { + final DiscoveryNode node = lookup.apply(target.getClusterAlias(), target.getNode()); + if (node == null) { + onFreedContext(false); + } else { + try { + Transport.Connection connection = searchTransportService.getConnection(target.getClusterAlias(), node); + searchTransportService.sendFreeContext(connection, target.getScrollId(), + ActionListener.wrap(freed -> onFreedContext(freed.isFreed()), e -> onFailedFreedContext(e, node))); + } catch (Exception e) { + onFailedFreedContext(e, node); + } + } + } + }, listener::onFailure)); + } + + private void onFreedContext(boolean freed) { + if (freed) { + freedSearchContexts.incrementAndGet(); + } + if (expectedOps.countDown()) { + boolean succeeded = hasFailed.get() == false; + listener.onResponse(new ClearScrollResponse(succeeded, freedSearchContexts.get())); + } + } + + private void onFailedFreedContext(Throwable e, DiscoveryNode node) { + logger.warn((Supplier) () -> new ParameterizedMessage("Clear SC failed on node[{}]", node), e); + if (expectedOps.countDown()) { + listener.onResponse(new ClearScrollResponse(false, freedSearchContexts.get())); + } else { + hasFailed.set(true); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java b/core/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java index 078bd6e0b4e..bc673644a06 100644 --- a/core/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java +++ b/core/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java @@ -28,6 +28,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.collapse.CollapseBuilder; +import org.elasticsearch.search.internal.InternalSearchResponse; import java.io.IOException; import java.util.HashMap; @@ -42,11 +43,11 @@ import java.util.function.Function; */ final class ExpandSearchPhase extends SearchPhase { private final SearchPhaseContext context; - private final SearchResponse searchResponse; - private final Function nextPhaseFactory; + private final InternalSearchResponse searchResponse; + private final Function nextPhaseFactory; - ExpandSearchPhase(SearchPhaseContext context, SearchResponse searchResponse, - Function nextPhaseFactory) { + ExpandSearchPhase(SearchPhaseContext context, InternalSearchResponse searchResponse, + Function nextPhaseFactory) { super("expand"); this.context = context; this.searchResponse = searchResponse; @@ -65,7 +66,7 @@ final class ExpandSearchPhase extends SearchPhase { @Override public void run() throws IOException { - if (isCollapseRequest() && searchResponse.getHits().getHits().length > 0) { + if (isCollapseRequest() && searchResponse.hits().getHits().length > 0) { SearchRequest searchRequest = context.getRequest(); CollapseBuilder collapseBuilder = searchRequest.source().collapse(); final List innerHitBuilders = collapseBuilder.getInnerHits(); @@ -73,7 +74,7 @@ final class ExpandSearchPhase extends SearchPhase { if (collapseBuilder.getMaxConcurrentGroupRequests() > 0) { multiRequest.maxConcurrentSearchRequests(collapseBuilder.getMaxConcurrentGroupRequests()); } - for (SearchHit hit : searchResponse.getHits()) { + for (SearchHit hit : searchResponse.hits().getHits()) { BoolQueryBuilder groupQuery = new BoolQueryBuilder(); Object collapseValue = hit.field(collapseBuilder.getField()).getValue(); if (collapseValue != null) { @@ -97,7 +98,7 @@ final class ExpandSearchPhase extends SearchPhase { context.getSearchTransport().sendExecuteMultiSearch(multiRequest, context.getTask(), ActionListener.wrap(response -> { Iterator it = response.iterator(); - for (SearchHit hit : searchResponse.getHits()) { + for (SearchHit hit : searchResponse.hits.getHits()) { for (InnerHitBuilder innerHitBuilder : innerHitBuilders) { MultiSearchResponse.Item item = it.next(); if (item.isFailure()) { diff --git a/core/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java b/core/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java index b824a46c50f..c26fc63421d 100644 --- a/core/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java +++ b/core/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java @@ -36,7 +36,7 @@ import org.elasticsearch.transport.Transport; import java.io.IOException; import java.util.List; -import java.util.function.Function; +import java.util.function.BiFunction; /** * This search phase merges the query results from the previous phase together and calculates the topN hits for this search. @@ -46,7 +46,7 @@ final class FetchSearchPhase extends SearchPhase { private final AtomicArray fetchResults; private final SearchPhaseController searchPhaseController; private final AtomicArray queryResults; - private final Function nextPhaseFactory; + private final BiFunction nextPhaseFactory; private final SearchPhaseContext context; private final Logger logger; private final InitialSearchPhase.SearchPhaseResults resultConsumer; @@ -55,13 +55,13 @@ final class FetchSearchPhase extends SearchPhase { SearchPhaseController searchPhaseController, SearchPhaseContext context) { this(resultConsumer, searchPhaseController, context, - (response) -> new ExpandSearchPhase(context, response, // collapse only happens if the request has inner hits - (finalResponse) -> sendResponsePhase(finalResponse, context))); + (response, scrollId) -> new ExpandSearchPhase(context, response, // collapse only happens if the request has inner hits + (finalResponse) -> sendResponsePhase(finalResponse, scrollId, context))); } FetchSearchPhase(InitialSearchPhase.SearchPhaseResults resultConsumer, SearchPhaseController searchPhaseController, - SearchPhaseContext context, Function nextPhaseFactory) { + SearchPhaseContext context, BiFunction nextPhaseFactory) { super("fetch"); if (context.getNumShards() != resultConsumer.getNumShards()) { throw new IllegalStateException("number of shards must match the length of the query results but doesn't:" @@ -205,14 +205,14 @@ final class FetchSearchPhase extends SearchPhase { AtomicArray fetchResultsArr) { final InternalSearchResponse internalResponse = searchPhaseController.merge(context.getRequest().scroll() != null, reducedQueryPhase, fetchResultsArr.asList(), fetchResultsArr::get); - context.executeNextPhase(this, nextPhaseFactory.apply(context.buildSearchResponse(internalResponse, scrollId))); + context.executeNextPhase(this, nextPhaseFactory.apply(internalResponse, scrollId)); } - private static SearchPhase sendResponsePhase(SearchResponse response, SearchPhaseContext context) { + private static SearchPhase sendResponsePhase(InternalSearchResponse response, String scrollId, SearchPhaseContext context) { return new SearchPhase("response") { @Override public void run() throws IOException { - context.onResponse(response); + context.onResponse(context.buildSearchResponse(response, scrollId)); } }; } diff --git a/core/src/main/java/org/elasticsearch/action/search/ScrollIdForNode.java b/core/src/main/java/org/elasticsearch/action/search/ScrollIdForNode.java index 76d4ac11413..59e1a331067 100644 --- a/core/src/main/java/org/elasticsearch/action/search/ScrollIdForNode.java +++ b/core/src/main/java/org/elasticsearch/action/search/ScrollIdForNode.java @@ -19,12 +19,16 @@ package org.elasticsearch.action.search; +import org.elasticsearch.common.inject.internal.Nullable; + class ScrollIdForNode { private final String node; private final long scrollId; + private final String clusterAlias; - ScrollIdForNode(String node, long scrollId) { + ScrollIdForNode(@Nullable String clusterAlias, String node, long scrollId) { this.node = node; + this.clusterAlias = clusterAlias; this.scrollId = scrollId; } @@ -32,7 +36,20 @@ class ScrollIdForNode { return node; } + public String getClusterAlias() { + return clusterAlias; + } + public long getScrollId() { return scrollId; } + + @Override + public String toString() { + return "ScrollIdForNode{" + + "node='" + node + '\'' + + ", scrollId=" + scrollId + + ", clusterAlias='" + clusterAlias + '\'' + + '}'; + } } diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index e1e0205e7e5..879607d059e 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -405,9 +405,18 @@ public final class SearchPhaseController extends AbstractComponent { * @param queryResults a list of non-null query shard results */ public ReducedQueryPhase reducedQueryPhase(Collection queryResults, boolean isScrollRequest) { - return reducedQueryPhase(queryResults, null, new ArrayList<>(), new TopDocsStats(), 0, isScrollRequest); + return reducedQueryPhase(queryResults, isScrollRequest, true); } + /** + * Reduces the given query results and consumes all aggregations and profile results. + * @param queryResults a list of non-null query shard results + */ + public ReducedQueryPhase reducedQueryPhase(Collection queryResults, boolean isScrollRequest, boolean trackTotalHits) { + return reducedQueryPhase(queryResults, null, new ArrayList<>(), new TopDocsStats(trackTotalHits), 0, isScrollRequest); + } + + /** * Reduces the given query results and consumes all aggregations and profile results. * @param queryResults a list of non-null query shard results @@ -711,6 +720,7 @@ public final class SearchPhaseController extends AbstractComponent { boolean isScrollRequest = request.scroll() != null; final boolean hasAggs = source != null && source.aggregations() != null; final boolean hasTopDocs = source == null || source.size() != 0; + final boolean trackTotalHits = source == null || source.trackTotalHits(); if (isScrollRequest == false && (hasAggs || hasTopDocs)) { // no incremental reduce if scroll is used - we only hit a single shard or sometimes more... @@ -722,18 +732,30 @@ public final class SearchPhaseController extends AbstractComponent { return new InitialSearchPhase.SearchPhaseResults(numShards) { @Override public ReducedQueryPhase reduce() { - return reducedQueryPhase(results.asList(), isScrollRequest); + return reducedQueryPhase(results.asList(), isScrollRequest, trackTotalHits); } }; } static final class TopDocsStats { + final boolean trackTotalHits; long totalHits; long fetchHits; float maxScore = Float.NEGATIVE_INFINITY; + TopDocsStats() { + this(true); + } + + TopDocsStats(boolean trackTotalHits) { + this.trackTotalHits = trackTotalHits; + this.totalHits = trackTotalHits ? 0 : -1; + } + void add(TopDocs topDocs) { - totalHits += topDocs.totalHits; + if (trackTotalHits) { + totalHits += topDocs.totalHits; + } fetchHits += topDocs.scoreDocs.length; if (!Float.isNaN(topDocs.getMaxScore())) { maxScore = Math.max(maxScore, topDocs.getMaxScore()); diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 9e35cca05b9..01a3e94620a 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -39,6 +39,8 @@ import java.util.Arrays; import java.util.Collections; import java.util.Objects; +import static org.elasticsearch.action.ValidateActions.addValidationError; + /** * A request to execute search against one or more indices (or all). Best created using * {@link org.elasticsearch.client.Requests#searchRequest(String...)}. @@ -102,7 +104,12 @@ public final class SearchRequest extends ActionRequest implements IndicesRequest @Override public ActionRequestValidationException validate() { - return null; + ActionRequestValidationException validationException = null; + if (source != null && source.trackTotalHits() == false && scroll() != null) { + validationException = + addValidationError("disabling [track_total_hits] is not allowed in a scroll context", validationException); + } + return validationException; } /** diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index ffe2c1b20c5..0333092b917 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -363,14 +363,21 @@ public class SearchRequestBuilder extends ActionRequestBuilderfalse. + * Applies when sorting, and controls if scores will be tracked as well. Defaults to false. */ public SearchRequestBuilder setTrackScores(boolean trackScores) { sourceBuilder().trackScores(trackScores); return this; } + /** + * Indicates if the total hit count for the query should be tracked. Defaults to true + */ + public SearchRequestBuilder setTrackTotalHits(boolean trackTotalHits) { + sourceBuilder().trackTotalHits(trackTotalHits); + return this; + } + /** * Adds stored fields to load and return (note, it must be stored) as part of the search request. * To disable the stored fields entirely (source and metadata fields) use {@code storedField("_none_")}. diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/core/src/main/java/org/elasticsearch/action/search/SearchResponse.java index 4720a502e93..3aa5e3a2adb 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -45,8 +45,6 @@ import java.util.Map; import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearchFailure; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken; /** @@ -135,13 +133,6 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb return new TimeValue(tookInMillis); } - /** - * How long the search took in milliseconds. - */ - public long getTookInMillis() { - return tookInMillis; - } - /** * The total number of shards the search was executed on. */ @@ -252,7 +243,7 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb } else if (NUM_REDUCE_PHASES.match(currentFieldName)) { numReducePhases = parser.intValue(); } else { - throwUnknownField(currentFieldName, parser.getTokenLocation()); + parser.skipChildren(); } } else if (token == XContentParser.Token.START_OBJECT) { if (SearchHits.Fields.HITS.equals(currentFieldName)) { @@ -275,7 +266,7 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb } else if (RestActions.TOTAL_FIELD.match(currentFieldName)) { totalShards = parser.intValue(); } else { - throwUnknownField(currentFieldName, parser.getTokenLocation()); + parser.skipChildren(); } } else if (token == XContentParser.Token.START_ARRAY) { if (RestActions.FAILURES_FIELD.match(currentFieldName)) { @@ -283,14 +274,14 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb failures.add(ShardSearchFailure.fromXContent(parser)); } } else { - throwUnknownField(currentFieldName, parser.getTokenLocation()); + parser.skipChildren(); } } else { - throwUnknownToken(token, parser.getTokenLocation()); + parser.skipChildren(); } } } else { - throwUnknownField(currentFieldName, parser.getTokenLocation()); + parser.skipChildren(); } } } diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java new file mode 100644 index 00000000000..aa757a039b8 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java @@ -0,0 +1,286 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.search; + +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.search.SearchPhaseResult; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.internal.InternalScrollSearchRequest; +import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.transport.RemoteClusterService; +import org.elasticsearch.transport.Transport; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.BiFunction; + +import static org.elasticsearch.action.search.TransportSearchHelper.internalScrollSearchRequest; + +/** + * Abstract base class for scroll execution modes. This class encapsulates the basic logic to + * fan out to nodes and execute the query part of the scroll request. Subclasses can for instance + * run separate fetch phases etc. + */ +abstract class SearchScrollAsyncAction implements Runnable { + /* + * Some random TODO: + * Today we still have a dedicated executing mode for scrolls while we could simplify this by implementing + * scroll like functionality (mainly syntactic sugar) as an ordinary search with search_after. We could even go further and + * make the scroll entirely stateless and encode the state per shard in the scroll ID. + * + * Today we also hold a context per shard but maybe + * we want the context per coordinating node such that we route the scroll to the same coordinator all the time and hold the context + * here? This would have the advantage that if we loose that node the entire scroll is deal not just one shard. + * + * Additionally there is the possibility to associate the scroll with a seq. id. such that we can talk to any replica as long as + * the shards engine hasn't advanced that seq. id yet. Such a resume is possible and best effort, it could be even a safety net since + * if you rely on indices being read-only things can change in-between without notification or it's hard to detect if there where any + * changes while scrolling. These are all options to improve the current situation which we can look into down the road + */ + protected final Logger logger; + protected final ActionListener listener; + protected final ParsedScrollId scrollId; + protected final DiscoveryNodes nodes; + protected final SearchPhaseController searchPhaseController; + protected final SearchScrollRequest request; + protected final SearchTransportService searchTransportService; + private final long startTime; + private final List shardFailures = new ArrayList<>(); + private final AtomicInteger successfulOps; + + protected SearchScrollAsyncAction(ParsedScrollId scrollId, Logger logger, DiscoveryNodes nodes, + ActionListener listener, SearchPhaseController searchPhaseController, + SearchScrollRequest request, + SearchTransportService searchTransportService) { + this.startTime = System.currentTimeMillis(); + this.scrollId = scrollId; + this.successfulOps = new AtomicInteger(scrollId.getContext().length); + this.logger = logger; + this.listener = listener; + this.nodes = nodes; + this.searchPhaseController = searchPhaseController; + this.request = request; + this.searchTransportService = searchTransportService; + } + + /** + * Builds how long it took to execute the search. + */ + private long buildTookInMillis() { + // protect ourselves against time going backwards + // negative values don't make sense and we want to be able to serialize that thing as a vLong + return Math.max(1, System.currentTimeMillis() - startTime); + } + + public final void run() { + final ScrollIdForNode[] context = scrollId.getContext(); + if (context.length == 0) { + listener.onFailure(new SearchPhaseExecutionException("query", "no nodes to search on", ShardSearchFailure.EMPTY_ARRAY)); + } else { + collectNodesAndRun(Arrays.asList(context), nodes, searchTransportService, ActionListener.wrap(lookup -> run(lookup, context), + listener::onFailure)); + } + } + + /** + * This method collects nodes from the remote clusters asynchronously if any of the scroll IDs references a remote cluster. + * Otherwise the action listener will be invoked immediately with a function based on the given discovery nodes. + */ + static void collectNodesAndRun(final Iterable scrollIds, DiscoveryNodes nodes, + SearchTransportService searchTransportService, + ActionListener> listener) { + Set clusters = new HashSet<>(); + for (ScrollIdForNode target : scrollIds) { + if (target.getClusterAlias() != null) { + clusters.add(target.getClusterAlias()); + } + } + if (clusters.isEmpty()) { // no remote clusters + listener.onResponse((cluster, node) -> nodes.get(node)); + } else { + RemoteClusterService remoteClusterService = searchTransportService.getRemoteClusterService(); + remoteClusterService.collectNodes(clusters, ActionListener.wrap(nodeFunction -> { + final BiFunction clusterNodeLookup = (clusterAlias, node) -> { + if (clusterAlias == null) { + return nodes.get(node); + } else { + return nodeFunction.apply(clusterAlias, node); + } + }; + listener.onResponse(clusterNodeLookup); + }, listener::onFailure)); + } + } + + private void run(BiFunction clusterNodeLookup, final ScrollIdForNode[] context) { + final CountDown counter = new CountDown(scrollId.getContext().length); + for (int i = 0; i < context.length; i++) { + ScrollIdForNode target = context[i]; + final int shardIndex = i; + final Transport.Connection connection; + try { + DiscoveryNode node = clusterNodeLookup.apply(target.getClusterAlias(), target.getNode()); + if (node == null) { + throw new IllegalStateException("node [" + target.getNode() + "] is not available"); + } + connection = getConnection(target.getClusterAlias(), node); + } catch (Exception ex) { + onShardFailure("query", counter, target.getScrollId(), + ex, null, () -> SearchScrollAsyncAction.this.moveToNextPhase(clusterNodeLookup)); + continue; + } + final InternalScrollSearchRequest internalRequest = internalScrollSearchRequest(target.getScrollId(), request); + // we can't create a SearchShardTarget here since we don't know the index and shard ID we are talking to + // we only know the node and the search context ID. Yet, the response will contain the SearchShardTarget + // from the target node instead...that's why we pass null here + SearchActionListener searchActionListener = new SearchActionListener(null, shardIndex) { + + @Override + protected void setSearchShardTarget(T response) { + // don't do this - it's part of the response... + assert response.getSearchShardTarget() != null : "search shard target must not be null"; + if (target.getClusterAlias() != null) { + // re-create the search target and add the cluster alias if there is any, + // we need this down the road for subseq. phases + SearchShardTarget searchShardTarget = response.getSearchShardTarget(); + response.setSearchShardTarget(new SearchShardTarget(searchShardTarget.getNodeId(), searchShardTarget.getShardId(), + target.getClusterAlias(), null)); + } + } + + @Override + protected void innerOnResponse(T result) { + assert shardIndex == result.getShardIndex() : "shard index mismatch: " + shardIndex + " but got: " + + result.getShardIndex(); + onFirstPhaseResult(shardIndex, result); + if (counter.countDown()) { + SearchPhase phase = moveToNextPhase(clusterNodeLookup); + try { + phase.run(); + } catch (Exception e) { + // we need to fail the entire request here - the entire phase just blew up + // don't call onShardFailure or onFailure here since otherwise we'd countDown the counter + // again which would result in an exception + listener.onFailure(new SearchPhaseExecutionException(phase.getName(), "Phase failed", e, + ShardSearchFailure.EMPTY_ARRAY)); + } + } + } + + @Override + public void onFailure(Exception t) { + onShardFailure("query", counter, target.getScrollId(), t, null, + () -> SearchScrollAsyncAction.this.moveToNextPhase(clusterNodeLookup)); + } + }; + executeInitialPhase(connection, internalRequest, searchActionListener); + } + } + + synchronized ShardSearchFailure[] buildShardFailures() { // pkg private for testing + if (shardFailures.isEmpty()) { + return ShardSearchFailure.EMPTY_ARRAY; + } + return shardFailures.toArray(new ShardSearchFailure[shardFailures.size()]); + } + + // we do our best to return the shard failures, but its ok if its not fully concurrently safe + // we simply try and return as much as possible + private synchronized void addShardFailure(ShardSearchFailure failure) { + shardFailures.add(failure); + } + + protected abstract void executeInitialPhase(Transport.Connection connection, InternalScrollSearchRequest internalRequest, + SearchActionListener searchActionListener); + + protected abstract SearchPhase moveToNextPhase(BiFunction clusterNodeLookup); + + protected abstract void onFirstPhaseResult(int shardId, T result); + + protected SearchPhase sendResponsePhase(SearchPhaseController.ReducedQueryPhase queryPhase, + final AtomicArray fetchResults) { + return new SearchPhase("fetch") { + @Override + public void run() throws IOException { + sendResponse(queryPhase, fetchResults); + } + }; + } + + protected final void sendResponse(SearchPhaseController.ReducedQueryPhase queryPhase, + final AtomicArray fetchResults) { + try { + final InternalSearchResponse internalResponse = searchPhaseController.merge(true, queryPhase, fetchResults.asList(), + fetchResults::get); + // the scroll ID never changes we always return the same ID. This ID contains all the shards and their context ids + // such that we can talk to them abgain in the next roundtrip. + String scrollId = null; + if (request.scroll() != null) { + scrollId = request.scrollId(); + } + listener.onResponse(new SearchResponse(internalResponse, scrollId, this.scrollId.getContext().length, successfulOps.get(), + buildTookInMillis(), buildShardFailures())); + } catch (Exception e) { + listener.onFailure(new ReduceSearchPhaseException("fetch", "inner finish failed", e, buildShardFailures())); + } + } + + protected void onShardFailure(String phaseName, final CountDown counter, final long searchId, Exception failure, + @Nullable SearchShardTarget searchShardTarget, + Supplier nextPhaseSupplier) { + if (logger.isDebugEnabled()) { + logger.debug((Supplier) () -> new ParameterizedMessage("[{}] Failed to execute {} phase", searchId, phaseName), failure); + } + addShardFailure(new ShardSearchFailure(failure, searchShardTarget)); + int successfulOperations = successfulOps.decrementAndGet(); + assert successfulOperations >= 0 : "successfulOperations must be >= 0 but was: " + successfulOperations; + if (counter.countDown()) { + if (successfulOps.get() == 0) { + listener.onFailure(new SearchPhaseExecutionException(phaseName, "all shards failed", failure, buildShardFailures())); + } else { + SearchPhase phase = nextPhaseSupplier.get(); + try { + phase.run(); + } catch (Exception e) { + e.addSuppressed(failure); + listener.onFailure(new SearchPhaseExecutionException(phase.getName(), "Phase failed", e, + ShardSearchFailure.EMPTY_ARRAY)); + } + } + } + } + + protected Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return searchTransportService.getConnection(clusterAlias, node); + } +} diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java index b3ebaed3cb6..7f36d71ae25 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryAndFetchAsyncAction.java @@ -20,166 +20,43 @@ package org.elasticsearch.action.search; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; -import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.fetch.QueryFetchSearchResult; import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult; import org.elasticsearch.search.internal.InternalScrollSearchRequest; -import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.query.ScrollQuerySearchResult; +import org.elasticsearch.transport.Transport; -import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.BiFunction; -import static org.elasticsearch.action.search.TransportSearchHelper.internalScrollSearchRequest; +final class SearchScrollQueryAndFetchAsyncAction extends SearchScrollAsyncAction { -final class SearchScrollQueryAndFetchAsyncAction extends AbstractAsyncAction { - - private final Logger logger; - private final SearchPhaseController searchPhaseController; - private final SearchTransportService searchTransportService; - private final SearchScrollRequest request; private final SearchTask task; - private final ActionListener listener; - private final ParsedScrollId scrollId; - private final DiscoveryNodes nodes; - private volatile AtomicArray shardFailures; private final AtomicArray queryFetchResults; - private final AtomicInteger successfulOps; - private final AtomicInteger counter; SearchScrollQueryAndFetchAsyncAction(Logger logger, ClusterService clusterService, SearchTransportService searchTransportService, SearchPhaseController searchPhaseController, SearchScrollRequest request, SearchTask task, ParsedScrollId scrollId, ActionListener listener) { - this.logger = logger; - this.searchPhaseController = searchPhaseController; - this.searchTransportService = searchTransportService; - this.request = request; + super(scrollId, logger, clusterService.state().nodes(), listener, searchPhaseController, request, searchTransportService); this.task = task; - this.listener = listener; - this.scrollId = scrollId; - this.nodes = clusterService.state().nodes(); - this.successfulOps = new AtomicInteger(scrollId.getContext().length); - this.counter = new AtomicInteger(scrollId.getContext().length); - this.queryFetchResults = new AtomicArray<>(scrollId.getContext().length); } - private ShardSearchFailure[] buildShardFailures() { - if (shardFailures == null) { - return ShardSearchFailure.EMPTY_ARRAY; - } - List failures = shardFailures.asList(); - return failures.toArray(new ShardSearchFailure[failures.size()]); + @Override + protected void executeInitialPhase(Transport.Connection connection, InternalScrollSearchRequest internalRequest, + SearchActionListener searchActionListener) { + searchTransportService.sendExecuteScrollFetch(connection, internalRequest, task, searchActionListener); } - // we do our best to return the shard failures, but its ok if its not fully concurrently safe - // we simply try and return as much as possible - private void addShardFailure(final int shardIndex, ShardSearchFailure failure) { - if (shardFailures == null) { - shardFailures = new AtomicArray<>(scrollId.getContext().length); - } - shardFailures.set(shardIndex, failure); + @Override + protected SearchPhase moveToNextPhase(BiFunction clusterNodeLookup) { + return sendResponsePhase(searchPhaseController.reducedQueryPhase(queryFetchResults.asList(), true), queryFetchResults); } - public void start() { - if (scrollId.getContext().length == 0) { - listener.onFailure(new SearchPhaseExecutionException("query", "no nodes to search on", ShardSearchFailure.EMPTY_ARRAY)); - return; - } - - ScrollIdForNode[] context = scrollId.getContext(); - for (int i = 0; i < context.length; i++) { - ScrollIdForNode target = context[i]; - DiscoveryNode node = nodes.get(target.getNode()); - if (node != null) { - executePhase(i, node, target.getScrollId()); - } else { - if (logger.isDebugEnabled()) { - logger.debug("Node [{}] not available for scroll request [{}]", target.getNode(), scrollId.getSource()); - } - successfulOps.decrementAndGet(); - if (counter.decrementAndGet() == 0) { - finishHim(); - } - } - } - - for (ScrollIdForNode target : scrollId.getContext()) { - DiscoveryNode node = nodes.get(target.getNode()); - if (node == null) { - if (logger.isDebugEnabled()) { - logger.debug("Node [{}] not available for scroll request [{}]", target.getNode(), scrollId.getSource()); - } - successfulOps.decrementAndGet(); - if (counter.decrementAndGet() == 0) { - finishHim(); - } - } - } - } - - void executePhase(final int shardIndex, DiscoveryNode node, final long searchId) { - InternalScrollSearchRequest internalRequest = internalScrollSearchRequest(searchId, request); - searchTransportService.sendExecuteScrollFetch(node, internalRequest, task, - new SearchActionListener(null, shardIndex) { - @Override - protected void setSearchShardTarget(ScrollQueryFetchSearchResult response) { - // don't do this - it's part of the response... - assert response.getSearchShardTarget() != null : "search shard target must not be null"; - } - @Override - protected void innerOnResponse(ScrollQueryFetchSearchResult response) { - queryFetchResults.set(response.getShardIndex(), response.result()); - if (counter.decrementAndGet() == 0) { - finishHim(); - } - } - @Override - public void onFailure(Exception t) { - onPhaseFailure(t, searchId, shardIndex); - } - }); - } - - private void onPhaseFailure(Exception e, long searchId, int shardIndex) { - if (logger.isDebugEnabled()) { - logger.debug((Supplier) () -> new ParameterizedMessage("[{}] Failed to execute query phase", searchId), e); - } - addShardFailure(shardIndex, new ShardSearchFailure(e)); - successfulOps.decrementAndGet(); - if (counter.decrementAndGet() == 0) { - if (successfulOps.get() == 0) { - listener.onFailure(new SearchPhaseExecutionException("query_fetch", "all shards failed", e, buildShardFailures())); - } else { - finishHim(); - } - } - } - - private void finishHim() { - try { - innerFinishHim(); - } catch (Exception e) { - listener.onFailure(new ReduceSearchPhaseException("fetch", "", e, buildShardFailures())); - } - } - - private void innerFinishHim() throws Exception { - List queryFetchSearchResults = queryFetchResults.asList(); - final InternalSearchResponse internalResponse = searchPhaseController.merge(true, - searchPhaseController.reducedQueryPhase(queryFetchSearchResults, true), queryFetchSearchResults, queryFetchResults::get); - String scrollId = null; - if (request.scroll() != null) { - scrollId = request.scrollId(); - } - listener.onResponse(new SearchResponse(internalResponse, scrollId, this.scrollId.getContext().length, successfulOps.get(), - buildTookInMillis(), buildShardFailures())); + @Override + protected void onFirstPhaseResult(int shardId, ScrollQueryFetchSearchResult result) { + queryFetchResults.setOnce(shardId, result.result()); } } diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java index 709738dcafb..a964d1904ed 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java @@ -21,215 +21,105 @@ package org.elasticsearch.action.search; import com.carrotsearch.hppc.IntArrayList; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchRequest; import org.elasticsearch.search.internal.InternalScrollSearchRequest; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.ScrollQuerySearchResult; +import org.elasticsearch.transport.Transport; -import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; +import java.io.IOException; +import java.util.function.BiFunction; -import static org.elasticsearch.action.search.TransportSearchHelper.internalScrollSearchRequest; +final class SearchScrollQueryThenFetchAsyncAction extends SearchScrollAsyncAction { -final class SearchScrollQueryThenFetchAsyncAction extends AbstractAsyncAction { - - private final Logger logger; private final SearchTask task; - private final SearchTransportService searchTransportService; - private final SearchPhaseController searchPhaseController; - private final SearchScrollRequest request; - private final ActionListener listener; - private final ParsedScrollId scrollId; - private final DiscoveryNodes nodes; - private volatile AtomicArray shardFailures; - final AtomicArray queryResults; - final AtomicArray fetchResults; - private final AtomicInteger successfulOps; + private final AtomicArray fetchResults; + private final AtomicArray queryResults; SearchScrollQueryThenFetchAsyncAction(Logger logger, ClusterService clusterService, SearchTransportService searchTransportService, SearchPhaseController searchPhaseController, SearchScrollRequest request, SearchTask task, ParsedScrollId scrollId, ActionListener listener) { - this.logger = logger; - this.searchTransportService = searchTransportService; - this.searchPhaseController = searchPhaseController; - this.request = request; + super(scrollId, logger, clusterService.state().nodes(), listener, searchPhaseController, request, + searchTransportService); this.task = task; - this.listener = listener; - this.scrollId = scrollId; - this.nodes = clusterService.state().nodes(); - this.successfulOps = new AtomicInteger(scrollId.getContext().length); - this.queryResults = new AtomicArray<>(scrollId.getContext().length); this.fetchResults = new AtomicArray<>(scrollId.getContext().length); + this.queryResults = new AtomicArray<>(scrollId.getContext().length); } - private ShardSearchFailure[] buildShardFailures() { - if (shardFailures == null) { - return ShardSearchFailure.EMPTY_ARRAY; - } - List failures = shardFailures.asList(); - return failures.toArray(new ShardSearchFailure[failures.size()]); + protected void onFirstPhaseResult(int shardId, ScrollQuerySearchResult result) { + queryResults.setOnce(shardId, result.queryResult()); } - // we do our best to return the shard failures, but its ok if its not fully concurrently safe - // we simply try and return as much as possible - private void addShardFailure(final int shardIndex, ShardSearchFailure failure) { - if (shardFailures == null) { - shardFailures = new AtomicArray<>(scrollId.getContext().length); - } - shardFailures.set(shardIndex, failure); + @Override + protected void executeInitialPhase(Transport.Connection connection, InternalScrollSearchRequest internalRequest, + SearchActionListener searchActionListener) { + searchTransportService.sendExecuteScrollQuery(connection, internalRequest, task, searchActionListener); } - public void start() { - if (scrollId.getContext().length == 0) { - listener.onFailure(new SearchPhaseExecutionException("query", "no nodes to search on", ShardSearchFailure.EMPTY_ARRAY)); - return; - } - final CountDown counter = new CountDown(scrollId.getContext().length); - ScrollIdForNode[] context = scrollId.getContext(); - for (int i = 0; i < context.length; i++) { - ScrollIdForNode target = context[i]; - DiscoveryNode node = nodes.get(target.getNode()); - if (node != null) { - executeQueryPhase(i, counter, node, target.getScrollId()); - } else { - if (logger.isDebugEnabled()) { - logger.debug("Node [{}] not available for scroll request [{}]", target.getNode(), scrollId.getSource()); - } - successfulOps.decrementAndGet(); - if (counter.countDown()) { - try { - executeFetchPhase(); - } catch (Exception e) { - listener.onFailure(new SearchPhaseExecutionException("query", "Fetch failed", e, ShardSearchFailure.EMPTY_ARRAY)); - return; - } - } - } - } - } - - private void executeQueryPhase(final int shardIndex, final CountDown counter, DiscoveryNode node, final long searchId) { - InternalScrollSearchRequest internalRequest = internalScrollSearchRequest(searchId, request); - searchTransportService.sendExecuteScrollQuery(node, internalRequest, task, - new SearchActionListener(null, shardIndex) { - + @Override + protected SearchPhase moveToNextPhase(BiFunction clusterNodeLookup) { + return new SearchPhase("fetch") { @Override - protected void setSearchShardTarget(ScrollQuerySearchResult response) { - // don't do this - it's part of the response... - assert response.getSearchShardTarget() != null : "search shard target must not be null"; - } - - @Override - protected void innerOnResponse(ScrollQuerySearchResult result) { - queryResults.setOnce(result.getShardIndex(), result.queryResult()); - if (counter.countDown()) { - try { - executeFetchPhase(); - } catch (Exception e) { - onFailure(e); - } + public void run() throws IOException { + final SearchPhaseController.ReducedQueryPhase reducedQueryPhase = searchPhaseController.reducedQueryPhase( + queryResults.asList(), true); + if (reducedQueryPhase.scoreDocs.length == 0) { + sendResponse(reducedQueryPhase, fetchResults); + return; } - } - @Override - public void onFailure(Exception t) { - onQueryPhaseFailure(shardIndex, counter, searchId, t); - } - }); - } + final IntArrayList[] docIdsToLoad = searchPhaseController.fillDocIdsToLoad(queryResults.length(), + reducedQueryPhase.scoreDocs); + final ScoreDoc[] lastEmittedDocPerShard = searchPhaseController.getLastEmittedDocPerShard(reducedQueryPhase, + queryResults.length()); + final CountDown counter = new CountDown(docIdsToLoad.length); + for (int i = 0; i < docIdsToLoad.length; i++) { + final int index = i; + final IntArrayList docIds = docIdsToLoad[index]; + if (docIds != null) { + final QuerySearchResult querySearchResult = queryResults.get(index); + ScoreDoc lastEmittedDoc = lastEmittedDocPerShard[index]; + ShardFetchRequest shardFetchRequest = new ShardFetchRequest(querySearchResult.getRequestId(), docIds, + lastEmittedDoc); + SearchShardTarget searchShardTarget = querySearchResult.getSearchShardTarget(); + DiscoveryNode node = clusterNodeLookup.apply(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId()); + assert node != null : "target node is null in secondary phase"; + Transport.Connection connection = getConnection(searchShardTarget.getClusterAlias(), node); + searchTransportService.sendExecuteFetchScroll(connection, shardFetchRequest, task, + new SearchActionListener(querySearchResult.getSearchShardTarget(), index) { + @Override + protected void innerOnResponse(FetchSearchResult response) { + fetchResults.setOnce(response.getShardIndex(), response); + if (counter.countDown()) { + sendResponse(reducedQueryPhase, fetchResults); + } + } - void onQueryPhaseFailure(final int shardIndex, final CountDown counter, final long searchId, Exception failure) { - if (logger.isDebugEnabled()) { - logger.debug((Supplier) () -> new ParameterizedMessage("[{}] Failed to execute query phase", searchId), failure); - } - addShardFailure(shardIndex, new ShardSearchFailure(failure)); - successfulOps.decrementAndGet(); - if (counter.countDown()) { - if (successfulOps.get() == 0) { - listener.onFailure(new SearchPhaseExecutionException("query", "all shards failed", failure, buildShardFailures())); - } else { - try { - executeFetchPhase(); - } catch (Exception e) { - e.addSuppressed(failure); - listener.onFailure(new SearchPhaseExecutionException("query", "Fetch failed", e, ShardSearchFailure.EMPTY_ARRAY)); - } - } - } - } - - private void executeFetchPhase() throws Exception { - final SearchPhaseController.ReducedQueryPhase reducedQueryPhase = searchPhaseController.reducedQueryPhase(queryResults.asList(), - true); - if (reducedQueryPhase.scoreDocs.length == 0) { - finishHim(reducedQueryPhase); - return; - } - - final IntArrayList[] docIdsToLoad = searchPhaseController.fillDocIdsToLoad(queryResults.length(), reducedQueryPhase.scoreDocs); - final ScoreDoc[] lastEmittedDocPerShard = searchPhaseController.getLastEmittedDocPerShard(reducedQueryPhase, queryResults.length()); - final CountDown counter = new CountDown(docIdsToLoad.length); - for (int i = 0; i < docIdsToLoad.length; i++) { - final int index = i; - final IntArrayList docIds = docIdsToLoad[index]; - if (docIds != null) { - final QuerySearchResult querySearchResult = queryResults.get(index); - ScoreDoc lastEmittedDoc = lastEmittedDocPerShard[index]; - ShardFetchRequest shardFetchRequest = new ShardFetchRequest(querySearchResult.getRequestId(), docIds, lastEmittedDoc); - DiscoveryNode node = nodes.get(querySearchResult.getSearchShardTarget().getNodeId()); - searchTransportService.sendExecuteFetchScroll(node, shardFetchRequest, task, - new SearchActionListener(querySearchResult.getSearchShardTarget(), index) { - @Override - protected void innerOnResponse(FetchSearchResult response) { - fetchResults.setOnce(response.getShardIndex(), response); + @Override + public void onFailure(Exception t) { + onShardFailure(getName(), counter, querySearchResult.getRequestId(), + t, querySearchResult.getSearchShardTarget(), + () -> sendResponsePhase(reducedQueryPhase, fetchResults)); + } + }); + } else { + // the counter is set to the total size of docIdsToLoad + // which can have null values so we have to count them down too if (counter.countDown()) { - finishHim(reducedQueryPhase); + sendResponse(reducedQueryPhase, fetchResults); } } - - @Override - public void onFailure(Exception t) { - if (logger.isDebugEnabled()) { - logger.debug("Failed to execute fetch phase", t); - } - successfulOps.decrementAndGet(); - if (counter.countDown()) { - finishHim(reducedQueryPhase); - } - } - }); - } else { - // the counter is set to the total size of docIdsToLoad which can have null values so we have to count them down too - if (counter.countDown()) { - finishHim(reducedQueryPhase); } } - } + }; } - private void finishHim(SearchPhaseController.ReducedQueryPhase queryPhase) { - try { - final InternalSearchResponse internalResponse = searchPhaseController.merge(true, queryPhase, fetchResults.asList(), - fetchResults::get); - String scrollId = null; - if (request.scroll() != null) { - scrollId = request.scrollId(); - } - listener.onResponse(new SearchResponse(internalResponse, scrollId, this.scrollId.getContext().length, successfulOps.get(), - buildTookInMillis(), buildShardFailures())); - } catch (Exception e) { - listener.onFailure(new ReduceSearchPhaseException("fetch", "inner finish failed", e, buildShardFailures())); - } - } } diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/core/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index 9dd2125d5e2..5ed41d0fe65 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -98,14 +98,14 @@ public class SearchTransportService extends AbstractComponent { }, SearchFreeContextResponse::new)); } - public void sendFreeContext(DiscoveryNode node, long contextId, final ActionListener listener) { - transportService.sendRequest(node, FREE_CONTEXT_SCROLL_ACTION_NAME, new ScrollFreeContextRequest(contextId), - new ActionListenerResponseHandler<>(listener, SearchFreeContextResponse::new)); + public void sendFreeContext(Transport.Connection connection, long contextId, final ActionListener listener) { + transportService.sendRequest(connection, FREE_CONTEXT_SCROLL_ACTION_NAME, new ScrollFreeContextRequest(contextId), + TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, SearchFreeContextResponse::new)); } - public void sendClearAllScrollContexts(DiscoveryNode node, final ActionListener listener) { - transportService.sendRequest(node, CLEAR_SCROLL_CONTEXTS_ACTION_NAME, TransportRequest.Empty.INSTANCE, - new ActionListenerResponseHandler<>(listener, () -> TransportResponse.Empty.INSTANCE)); + public void sendClearAllScrollContexts(Transport.Connection connection, final ActionListener listener) { + transportService.sendRequest(connection, CLEAR_SCROLL_CONTEXTS_ACTION_NAME, TransportRequest.Empty.INSTANCE, + TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, () -> TransportResponse.Empty.INSTANCE)); } public void sendExecuteDfs(Transport.Connection connection, final ShardSearchTransportRequest request, SearchTask task, @@ -145,15 +145,15 @@ public class SearchTransportService extends AbstractComponent { new ActionListenerResponseHandler<>(listener, QuerySearchResult::new)); } - public void sendExecuteScrollQuery(DiscoveryNode node, final InternalScrollSearchRequest request, SearchTask task, + public void sendExecuteScrollQuery(Transport.Connection connection, final InternalScrollSearchRequest request, SearchTask task, final SearchActionListener listener) { - transportService.sendChildRequest(transportService.getConnection(node), QUERY_SCROLL_ACTION_NAME, request, task, + transportService.sendChildRequest(connection, QUERY_SCROLL_ACTION_NAME, request, task, new ActionListenerResponseHandler<>(listener, ScrollQuerySearchResult::new)); } - public void sendExecuteScrollFetch(DiscoveryNode node, final InternalScrollSearchRequest request, SearchTask task, + public void sendExecuteScrollFetch(Transport.Connection connection, final InternalScrollSearchRequest request, SearchTask task, final SearchActionListener listener) { - transportService.sendChildRequest(transportService.getConnection(node), QUERY_FETCH_SCROLL_ACTION_NAME, request, task, + transportService.sendChildRequest(connection, QUERY_FETCH_SCROLL_ACTION_NAME, request, task, new ActionListenerResponseHandler<>(listener, ScrollQueryFetchSearchResult::new)); } @@ -162,9 +162,9 @@ public class SearchTransportService extends AbstractComponent { sendExecuteFetch(connection, FETCH_ID_ACTION_NAME, request, task, listener); } - public void sendExecuteFetchScroll(DiscoveryNode node, final ShardFetchRequest request, SearchTask task, + public void sendExecuteFetchScroll(Transport.Connection connection, final ShardFetchRequest request, SearchTask task, final SearchActionListener listener) { - sendExecuteFetch(transportService.getConnection(node), FETCH_ID_SCROLL_ACTION_NAME, request, task, listener); + sendExecuteFetch(connection, FETCH_ID_SCROLL_ACTION_NAME, request, task, listener); } private void sendExecuteFetch(Transport.Connection connection, String action, final ShardFetchRequest request, SearchTask task, diff --git a/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java b/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java index 8fed61af294..7eb939ca827 100644 --- a/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java +++ b/core/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java @@ -38,8 +38,6 @@ import org.elasticsearch.search.SearchShardTarget; import java.io.IOException; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken; /** * Represents a failure to search on a specific shard. @@ -200,16 +198,16 @@ public class ShardSearchFailure implements ShardOperationFailedException { } else if (NODE_FIELD.equals(currentFieldName)) { nodeId = parser.text(); } else { - throwUnknownField(currentFieldName, parser.getTokenLocation()); + parser.skipChildren(); } } else if (token == XContentParser.Token.START_OBJECT) { if (REASON_FIELD.equals(currentFieldName)) { exception = ElasticsearchException.fromXContent(parser); } else { - throwUnknownField(currentFieldName, parser.getTokenLocation()); + parser.skipChildren(); } } else { - throwUnknownToken(token, parser.getTokenLocation()); + parser.skipChildren(); } } return new ShardSearchFailure(exception, diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java index 716077c915d..d9afbdacafe 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java @@ -19,30 +19,16 @@ package org.elasticsearch.action.search; -import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; - -import static org.elasticsearch.action.search.TransportSearchHelper.parseScrollId; - public class TransportClearScrollAction extends HandledTransportAction { private final ClusterService clusterService; @@ -53,105 +39,16 @@ public class TransportClearScrollAction extends HandledTransportAction listener) { - new Async(request, listener, clusterService.state()).run(); - } - - private class Async { - final DiscoveryNodes nodes; - final CountDown expectedOps; - final List contexts = new ArrayList<>(); - final ActionListener listener; - final AtomicReference expHolder; - final AtomicInteger numberOfFreedSearchContexts = new AtomicInteger(0); - - private Async(ClearScrollRequest request, ActionListener listener, ClusterState clusterState) { - int expectedOps = 0; - this.nodes = clusterState.nodes(); - if (request.getScrollIds().size() == 1 && "_all".equals(request.getScrollIds().get(0))) { - expectedOps = nodes.getSize(); - } else { - for (String parsedScrollId : request.getScrollIds()) { - ScrollIdForNode[] context = parseScrollId(parsedScrollId).getContext(); - expectedOps += context.length; - this.contexts.add(context); - } - } - this.listener = listener; - this.expHolder = new AtomicReference<>(); - this.expectedOps = new CountDown(expectedOps); - } - - public void run() { - if (expectedOps.isCountedDown()) { - listener.onResponse(new ClearScrollResponse(true, 0)); - return; - } - - if (contexts.isEmpty()) { - for (final DiscoveryNode node : nodes) { - searchTransportService.sendClearAllScrollContexts(node, new ActionListener() { - @Override - public void onResponse(TransportResponse response) { - onFreedContext(true); - } - - @Override - public void onFailure(Exception e) { - onFailedFreedContext(e, node); - } - }); - } - } else { - for (ScrollIdForNode[] context : contexts) { - for (ScrollIdForNode target : context) { - final DiscoveryNode node = nodes.get(target.getNode()); - if (node == null) { - onFreedContext(false); - continue; - } - - searchTransportService.sendFreeContext(node, target.getScrollId(), new ActionListener() { - @Override - public void onResponse(SearchTransportService.SearchFreeContextResponse freed) { - onFreedContext(freed.isFreed()); - } - - @Override - public void onFailure(Exception e) { - onFailedFreedContext(e, node); - } - }); - } - } - } - } - - void onFreedContext(boolean freed) { - if (freed) { - numberOfFreedSearchContexts.incrementAndGet(); - } - if (expectedOps.countDown()) { - boolean succeeded = expHolder.get() == null; - listener.onResponse(new ClearScrollResponse(succeeded, numberOfFreedSearchContexts.get())); - } - } - - void onFailedFreedContext(Throwable e, DiscoveryNode node) { - logger.warn((Supplier) () -> new ParameterizedMessage("Clear SC failed on node[{}]", node), e); - if (expectedOps.countDown()) { - listener.onResponse(new ClearScrollResponse(false, numberOfFreedSearchContexts.get())); - } else { - expHolder.set(e); - } - } - + Runnable runnable = new ClearScrollController(request, listener, clusterService.state().nodes(), logger, searchTransportService); + runnable.run(); } } diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java b/core/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java index e494bb6768d..7a0bb63478c 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java @@ -23,7 +23,9 @@ import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.RAMOutputStream; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.SearchPhaseResult; +import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalScrollSearchRequest; +import org.elasticsearch.transport.RemoteClusterAware; import java.io.IOException; import java.util.Base64; @@ -40,7 +42,13 @@ final class TransportSearchHelper { out.writeVInt(searchPhaseResults.asList().size()); for (SearchPhaseResult searchPhaseResult : searchPhaseResults.asList()) { out.writeLong(searchPhaseResult.getRequestId()); - out.writeString(searchPhaseResult.getSearchShardTarget().getNodeId()); + SearchShardTarget searchShardTarget = searchPhaseResult.getSearchShardTarget(); + if (searchShardTarget.getClusterAlias() != null) { + out.writeString(RemoteClusterAware.buildRemoteIndexName(searchShardTarget.getClusterAlias(), + searchShardTarget.getNodeId())); + } else { + out.writeString(searchShardTarget.getNodeId()); + } } byte[] bytes = new byte[(int) out.getFilePointer()]; out.writeTo(bytes, 0); @@ -57,7 +65,15 @@ final class TransportSearchHelper { for (int i = 0; i < context.length; ++i) { long id = in.readLong(); String target = in.readString(); - context[i] = new ScrollIdForNode(target, id); + String clusterAlias; + final int index = target.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR); + if (index == -1) { + clusterAlias = null; + } else { + clusterAlias = target.substring(0, index); + target = target.substring(index+1); + } + context[i] = new ScrollIdForNode(clusterAlias, target, id); } if (in.getPosition() != bytes.length) { throw new IllegalArgumentException("Not all bytes were read"); diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java index 53db483b4ba..e334b951801 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java @@ -60,7 +60,7 @@ public class TransportSearchScrollAction extends HandledTransportAction listener) { try { ParsedScrollId scrollId = parseScrollId(request.scrollId()); - AbstractAsyncAction action; + Runnable action; switch (scrollId.getType()) { case QUERY_THEN_FETCH_TYPE: action = new SearchScrollQueryThenFetchAsyncAction(logger, clusterService, searchTransportService, @@ -73,7 +73,7 @@ public class TransportSearchScrollAction extends HandledTransportAction implements ActionListener { private final CountDown countDown; private final AtomicInteger pos = new AtomicInteger(); - private final AtomicArray roles; + private final AtomicArray results; private final ActionListener> delegate; private final Collection defaults; private final AtomicReference failure = new AtomicReference<>(); @@ -49,7 +49,7 @@ public final class GroupedActionListener implements ActionListener { */ public GroupedActionListener(ActionListener> delegate, int groupSize, Collection defaults) { - roles = new AtomicArray<>(groupSize); + results = new AtomicArray<>(groupSize); countDown = new CountDown(groupSize); this.delegate = delegate; this.defaults = defaults; @@ -57,12 +57,12 @@ public final class GroupedActionListener implements ActionListener { @Override public void onResponse(T element) { - roles.set(pos.incrementAndGet() - 1, element); + results.setOnce(pos.incrementAndGet() - 1, element); if (countDown.countDown()) { if (failure.get() != null) { delegate.onFailure(failure.get()); } else { - List collect = this.roles.asList(); + List collect = this.results.asList(); collect.addAll(defaults); delegate.onResponse(Collections.unmodifiableList(collect)); } diff --git a/core/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/core/src/main/java/org/elasticsearch/action/support/IndicesOptions.java index b82bfcc7170..9ab4ee80ccf 100644 --- a/core/src/main/java/org/elasticsearch/action/support/IndicesOptions.java +++ b/core/src/main/java/org/elasticsearch/action/support/IndicesOptions.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.support; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.rest.RestRequest; @@ -43,6 +44,7 @@ public class IndicesOptions { private static final byte EXPAND_WILDCARDS_CLOSED = 8; private static final byte FORBID_ALIASES_TO_MULTIPLE_INDICES = 16; private static final byte FORBID_CLOSED_INDICES = 32; + private static final byte IGNORE_ALIASES = 64; private static final byte STRICT_EXPAND_OPEN = 6; private static final byte LENIENT_EXPAND_OPEN = 7; @@ -51,10 +53,10 @@ public class IndicesOptions { private static final byte STRICT_SINGLE_INDEX_NO_EXPAND_FORBID_CLOSED = 48; static { - byte max = 1 << 6; + short max = 1 << 7; VALUES = new IndicesOptions[max]; - for (byte id = 0; id < max; id++) { - VALUES[id] = new IndicesOptions(id); + for (short id = 0; id < max; id++) { + VALUES[id] = new IndicesOptions((byte)id); } } @@ -106,18 +108,31 @@ public class IndicesOptions { * @return whether aliases pointing to multiple indices are allowed */ public boolean allowAliasesToMultipleIndices() { - //true is default here, for bw comp we keep the first 16 values - //in the array same as before + the default value for the new flag + // true is default here, for bw comp we keep the first 16 values + // in the array same as before + the default value for the new flag return (id & FORBID_ALIASES_TO_MULTIPLE_INDICES) == 0; } + /** + * @return whether aliases should be ignored (when resolving a wildcard) + */ + public boolean ignoreAliases() { + return (id & IGNORE_ALIASES) != 0; + } + public void writeIndicesOptions(StreamOutput out) throws IOException { - out.write(id); + if (out.getVersion().onOrAfter(Version.V_6_0_0_alpha2)) { + out.write(id); + } else { + // if we are talking to a node that doesn't support the newly added flag (ignoreAliases) + // flip to 0 all the bits starting from the 7th + out.write(id & 0x3f); + } } public static IndicesOptions readIndicesOptions(StreamInput in) throws IOException { - //if we read from a node that doesn't support the newly added flag (allowAliasesToMultipleIndices) - //we just receive the old corresponding value with the new flag set to true (default) + //if we read from a node that doesn't support the newly added flag (ignoreAliases) + //we just receive the old corresponding value with the new flag set to false (default) byte id = in.readByte(); if (id >= VALUES.length) { throw new IllegalArgumentException("No valid missing index type id: " + id); @@ -133,8 +148,16 @@ public class IndicesOptions { return fromOptions(ignoreUnavailable, allowNoIndices, expandToOpenIndices, expandToClosedIndices, defaultOptions.allowAliasesToMultipleIndices(), defaultOptions.forbidClosedIndices()); } - static IndicesOptions fromOptions(boolean ignoreUnavailable, boolean allowNoIndices, boolean expandToOpenIndices, boolean expandToClosedIndices, boolean allowAliasesToMultipleIndices, boolean forbidClosedIndices) { - byte id = toByte(ignoreUnavailable, allowNoIndices, expandToOpenIndices, expandToClosedIndices, allowAliasesToMultipleIndices, forbidClosedIndices); + public static IndicesOptions fromOptions(boolean ignoreUnavailable, boolean allowNoIndices, boolean expandToOpenIndices, + boolean expandToClosedIndices, boolean allowAliasesToMultipleIndices, boolean forbidClosedIndices) { + return fromOptions(ignoreUnavailable, allowNoIndices, expandToOpenIndices, expandToClosedIndices, allowAliasesToMultipleIndices, + forbidClosedIndices, false); + } + + public static IndicesOptions fromOptions(boolean ignoreUnavailable, boolean allowNoIndices, boolean expandToOpenIndices, + boolean expandToClosedIndices, boolean allowAliasesToMultipleIndices, boolean forbidClosedIndices, boolean ignoreAliases) { + byte id = toByte(ignoreUnavailable, allowNoIndices, expandToOpenIndices, expandToClosedIndices, allowAliasesToMultipleIndices, + forbidClosedIndices, ignoreAliases); return VALUES[id]; } @@ -246,7 +269,7 @@ public class IndicesOptions { } private static byte toByte(boolean ignoreUnavailable, boolean allowNoIndices, boolean wildcardExpandToOpen, - boolean wildcardExpandToClosed, boolean allowAliasesToMultipleIndices, boolean forbidClosedIndices) { + boolean wildcardExpandToClosed, boolean allowAliasesToMultipleIndices, boolean forbidClosedIndices, boolean ignoreAliases) { byte id = 0; if (ignoreUnavailable) { id |= IGNORE_UNAVAILABLE; @@ -268,6 +291,9 @@ public class IndicesOptions { if (forbidClosedIndices) { id |= FORBID_CLOSED_INDICES; } + if (ignoreAliases) { + id |= IGNORE_ALIASES; + } return id; } @@ -281,6 +307,7 @@ public class IndicesOptions { ", expand_wildcards_closed=" + expandWildcardsClosed() + ", allow_aliases_to_multiple_indices=" + allowAliasesToMultipleIndices() + ", forbid_closed_indices=" + forbidClosedIndices() + + ", ignore_aliases=" + ignoreAliases() + ']'; } } diff --git a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index d8010f4381f..4583e47bc1d 100644 --- a/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -106,16 +106,11 @@ public abstract class TransportNodesAction responses = new ArrayList<>(); final List failures = new ArrayList<>(); - final boolean accumulateExceptions = accumulateExceptions(); for (int i = 0; i < nodesResponses.length(); ++i) { Object response = nodesResponses.get(i); if (response instanceof FailedNodeException) { - if (accumulateExceptions) { - failures.add((FailedNodeException)response); - } else { - logger.warn("not accumulating exceptions, excluding exception from response", (FailedNodeException)response); - } + failures.add((FailedNodeException)response); } else { responses.add(nodeResponseClass.cast(response)); } @@ -145,8 +140,6 @@ public abstract class TransportNodesAction) () -> new ParameterizedMessage("failed to execute on node [{}]", nodeId), t); } - if (accumulateExceptions()) { - responses.set(idx, new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t)); - } + + responses.set(idx, new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t)); + if (counter.incrementAndGet() == responses.length()) { finishHim(); } diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java index c63400be7e9..7532ade3fa3 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/TermVectorsResponse.java @@ -334,10 +334,6 @@ public class TermVectorsResponse extends ActionResponse implements ToXContentObj return new TimeValue(tookInMillis); } - public long getTookInMillis() { - return tookInMillis; - } - private void buildScore(XContentBuilder builder, BoostAttribute boostAtt) throws IOException { if (hasScores) { builder.field(FieldStrings.SCORE, boostAtt.getBoost()); diff --git a/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java b/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java index c5346bf243d..1959e5e8139 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/JarHell.java @@ -140,7 +140,7 @@ public class JarHell { URL url = PathUtils.get(element).toUri().toURL(); if (urlElements.add(url) == false) { throw new IllegalStateException("jar hell!" + System.lineSeparator() + - "duplicate jar on classpath: " + classPath); + "duplicate jar [" + element + "] on classpath: " + classPath); } } catch (MalformedURLException e) { // should not happen, as we use the filesystem API diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Security.java b/core/src/main/java/org/elasticsearch/bootstrap/Security.java index 384d4dd3352..5ffb89b6ee4 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -46,6 +46,7 @@ import java.security.Policy; import java.security.URIParameter; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; @@ -262,8 +263,22 @@ final class Security { if (environment.sharedDataFile() != null) { addPath(policy, Environment.PATH_SHARED_DATA_SETTING.getKey(), environment.sharedDataFile(), "read,readlink,write,delete"); } + final Set dataFilesPaths = new HashSet<>(); for (Path path : environment.dataFiles()) { addPath(policy, Environment.PATH_DATA_SETTING.getKey(), path, "read,readlink,write,delete"); + /* + * We have to do this after adding the path because a side effect of that is that the directory is created; the Path#toRealPath + * invocation will fail if the directory does not already exist. We use Path#toRealPath to follow symlinks and handle issues + * like unicode normalization or case-insensitivity on some filesystems (e.g., the case-insensitive variant of HFS+ on macOS). + */ + try { + final Path realPath = path.toRealPath(); + if (!dataFilesPaths.add(realPath)) { + throw new IllegalStateException("path [" + realPath + "] is duplicated by [" + path + "]"); + } + } catch (final IOException e) { + throw new IllegalStateException("unable to access [" + path + "]", e); + } } /* * If path.data and default.path.data are set, we need read access to the paths in default.path.data to check for the existence of @@ -392,11 +407,12 @@ final class Security { } /** - * Add access to path (and all files underneath it) - * @param policy current policy to add permissions to + * Add access to path (and all files underneath it); this also creates the directory if it does not exist. + * + * @param policy current policy to add permissions to * @param configurationName the configuration name associated with the path (for error messages only) - * @param path the path itself - * @param permissions set of file permissions to grant to the path + * @param path the path itself + * @param permissions set of file permissions to grant to the path */ static void addPath(Permissions policy, String configurationName, Path path, String permissions) { // paths may not exist yet, this also checks accessibility diff --git a/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java b/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java index de8205211dd..942d9e50560 100644 --- a/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java +++ b/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java @@ -45,6 +45,9 @@ import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequestBuilder; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageRequest; +import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageRequestBuilder; +import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageResponse; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequestBuilder; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryResponse; @@ -265,8 +268,37 @@ public interface ClusterAdminClient extends ElasticsearchClient { NodesStatsRequestBuilder prepareNodesStats(String... nodesIds); /** - * Returns top N hot-threads samples per node. The hot-threads are only sampled - * for the node ids specified in the request. + * Returns top N hot-threads samples per node. The hot-threads are only + * sampled for the node ids specified in the request. Nodes usage of the + * cluster. + * + * @param request + * The nodes usage request + * @return The result future + * @see org.elasticsearch.client.Requests#nodesUsageRequest(String...) + */ + ActionFuture nodesUsage(NodesUsageRequest request); + + /** + * Nodes usage of the cluster. + * + * @param request + * The nodes usage request + * @param listener + * A listener to be notified with a result + * @see org.elasticsearch.client.Requests#nodesUsageRequest(String...) + */ + void nodesUsage(NodesUsageRequest request, ActionListener listener); + + /** + * Nodes usage of the cluster. + */ + NodesUsageRequestBuilder prepareNodesUsage(String... nodesIds); + + /** + * Returns top N hot-threads samples per node. The hot-threads are only + * sampled for the node ids specified in the request. + * */ ActionFuture nodesHotThreads(NodesHotThreadsRequest request); diff --git a/core/src/main/java/org/elasticsearch/client/Requests.java b/core/src/main/java/org/elasticsearch/client/Requests.java index 6d652bf39d0..74f83452e51 100644 --- a/core/src/main/java/org/elasticsearch/client/Requests.java +++ b/core/src/main/java/org/elasticsearch/client/Requests.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; +import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageRequest; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; @@ -387,6 +388,19 @@ public class Requests { return new NodesStatsRequest(nodesIds); } + /** + * Creates a nodes usage request against one or more nodes. Pass + * null or an empty array for all nodes. + * + * @param nodesIds + * The nodes ids to get the usage for + * @return The nodes usage request + * @see org.elasticsearch.client.ClusterAdminClient#nodesUsage(org.elasticsearch.action.admin.cluster.node.usage.NodesUsageRequest) + */ + public static NodesUsageRequest nodesUsageRequest(String... nodesIds) { + return new NodesUsageRequest(nodesIds); + } + /** * Creates a cluster stats request. * diff --git a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java index 726875a6d5c..e350727e425 100644 --- a/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/core/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -57,6 +57,10 @@ import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequestBuilder; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageAction; +import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageRequest; +import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageRequestBuilder; +import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageResponse; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequestBuilder; @@ -828,6 +832,21 @@ public abstract class AbstractClient extends AbstractComponent implements Client return new NodesStatsRequestBuilder(this, NodesStatsAction.INSTANCE).setNodesIds(nodesIds); } + @Override + public ActionFuture nodesUsage(final NodesUsageRequest request) { + return execute(NodesUsageAction.INSTANCE, request); + } + + @Override + public void nodesUsage(final NodesUsageRequest request, final ActionListener listener) { + execute(NodesUsageAction.INSTANCE, request, listener); + } + + @Override + public NodesUsageRequestBuilder prepareNodesUsage(String... nodesIds) { + return new NodesUsageRequestBuilder(this, NodesUsageAction.INSTANCE).setNodesIds(nodesIds); + } + @Override public ActionFuture clusterStats(ClusterStatsRequest request) { return execute(ClusterStatsAction.INSTANCE, request); diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 4942edfc644..5879f1e3579 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -45,8 +45,8 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.indices.breaker.CircuitBreakerService; -import org.elasticsearch.node.Node; import org.elasticsearch.node.InternalSettingsPreparer; +import org.elasticsearch.node.Node; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.NetworkPlugin; import org.elasticsearch.plugins.Plugin; @@ -159,7 +159,7 @@ public abstract class TransportClient extends AbstractClient { modules.add(b -> b.bind(ThreadPool.class).toInstance(threadPool)); ActionModule actionModule = new ActionModule(true, settings, null, settingsModule.getIndexScopedSettings(), settingsModule.getClusterSettings(), settingsModule.getSettingsFilter(), threadPool, - pluginsService.filterPlugins(ActionPlugin.class), null, null); + pluginsService.filterPlugins(ActionPlugin.class), null, null, null); modules.add(actionModule); CircuitBreakerService circuitBreakerService = Node.createCircuitBreakerService(settingsModule.getSettings(), diff --git a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 8973890021f..543118a172f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/core/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -375,7 +375,7 @@ public class ShardStateAction extends AbstractComponent { public void messageReceived(ShardEntry request, TransportChannel channel) throws Exception { logger.debug("{} received shard started for [{}]", request.shardId, request); clusterService.submitStateUpdateTask( - "shard-started", + "shard-started " + request, request, ClusterStateTaskConfig.build(Priority.URGENT), shardStartedClusterStateTaskExecutor, diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 711d685c1d6..0841dd3c6bf 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -50,6 +50,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.SortedMap; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -104,7 +105,7 @@ public class IndexNameExpressionResolver extends AbstractComponent { return concreteIndexNames(context, indexExpressions); } - /** + /** * Translates the provided index expression into actual concrete indices, properly deduplicated. * * @param state the cluster state containing all the data to resolve to expressions to concrete indices @@ -181,7 +182,7 @@ public class IndexNameExpressionResolver extends AbstractComponent { final Set concreteIndices = new HashSet<>(expressions.size()); for (String expression : expressions) { AliasOrIndex aliasOrIndex = metaData.getAliasAndIndexLookup().get(expression); - if (aliasOrIndex == null) { + if (aliasOrIndex == null || (aliasOrIndex.isAlias() && context.getOptions().ignoreAliases())) { if (failNoIndices) { IndexNotFoundException infe = new IndexNotFoundException(expression); infe.setResources("index_expression", expression); @@ -638,7 +639,7 @@ public class IndexNameExpressionResolver extends AbstractComponent { } final IndexMetaData.State excludeState = excludeState(options); - final Map matches = matches(metaData, expression); + final Map matches = matches(context, metaData, expression); Set expand = expand(context, excludeState, matches); if (add) { result.addAll(expand); @@ -693,31 +694,44 @@ public class IndexNameExpressionResolver extends AbstractComponent { return excludeState; } - private static Map matches(MetaData metaData, String expression) { + public static Map matches(Context context, MetaData metaData, String expression) { if (Regex.isMatchAllPattern(expression)) { // Can only happen if the expressions was initially: '-*' - return metaData.getAliasAndIndexLookup(); + if (context.getOptions().ignoreAliases()) { + return metaData.getAliasAndIndexLookup().entrySet().stream() + .filter(e -> e.getValue().isAlias() == false) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } else { + return metaData.getAliasAndIndexLookup(); + } } else if (expression.indexOf("*") == expression.length() - 1) { - return suffixWildcard(metaData, expression); + return suffixWildcard(context, metaData, expression); } else { - return otherWildcard(metaData, expression); + return otherWildcard(context, metaData, expression); } } - private static Map suffixWildcard(MetaData metaData, String expression) { + private static Map suffixWildcard(Context context, MetaData metaData, String expression) { assert expression.length() >= 2 : "expression [" + expression + "] should have at least a length of 2"; String fromPrefix = expression.substring(0, expression.length() - 1); char[] toPrefixCharArr = fromPrefix.toCharArray(); toPrefixCharArr[toPrefixCharArr.length - 1]++; String toPrefix = new String(toPrefixCharArr); - return metaData.getAliasAndIndexLookup().subMap(fromPrefix, toPrefix); + SortedMap subMap = metaData.getAliasAndIndexLookup().subMap(fromPrefix, toPrefix); + if (context.getOptions().ignoreAliases()) { + return subMap.entrySet().stream() + .filter(entry -> entry.getValue().isAlias() == false) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + return subMap; } - private static Map otherWildcard(MetaData metaData, String expression) { + private static Map otherWildcard(Context context, MetaData metaData, String expression) { final String pattern = expression; return metaData.getAliasAndIndexLookup() .entrySet() .stream() + .filter(e -> context.getOptions().ignoreAliases() == false || e.getValue().isAlias() == false) .filter(e -> Regex.simpleMatch(pattern, e.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index e47585356a0..fcc0fdebdd4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -243,7 +243,8 @@ public class MetaData implements Iterable, Diffable, To * * @param aliases The names of the index aliases to find * @param concreteIndices The concrete indexes the index aliases must point to order to be returned. - * @return the found index aliases grouped by index + * @return a map of index to a list of alias metadata, the list corresponding to a concrete index will be empty if no aliases are + * present for that index */ public ImmutableOpenMap> findAliases(final String[] aliases, String[] concreteIndices) { assert aliases != null; @@ -273,8 +274,8 @@ public class MetaData implements Iterable, Diffable, To return o1.alias().compareTo(o2.alias()); } }); - mapBuilder.put(index, Collections.unmodifiableList(filteredValues)); } + mapBuilder.put(index, Collections.unmodifiableList(filteredValues)); } return mapBuilder.build(); } diff --git a/core/src/main/java/org/elasticsearch/common/ExponentiallyWeightedMovingAverage.java b/core/src/main/java/org/elasticsearch/common/ExponentiallyWeightedMovingAverage.java new file mode 100644 index 00000000000..835f37664ec --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/ExponentiallyWeightedMovingAverage.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common; + + +import java.util.concurrent.atomic.AtomicLong; + +/** + * Implements exponentially weighted moving averages (commonly abbreviated EWMA) for a single value. + * This class is safe to share between threads. + */ +public class ExponentiallyWeightedMovingAverage { + + private final double alpha; + private final AtomicLong averageBits; + + /** + * Create a new EWMA with a given {@code alpha} and {@code initialAvg}. A smaller alpha means + * that new data points will have less weight, where a high alpha means older data points will + * have a lower influence. + */ + public ExponentiallyWeightedMovingAverage(double alpha, double initialAvg) { + if (alpha < 0 || alpha > 1) { + throw new IllegalArgumentException("alpha must be greater or equal to 0 and less than or equal to 1"); + } + this.alpha = alpha; + this.averageBits = new AtomicLong(Double.doubleToLongBits(initialAvg)); + } + + public double getAverage() { + return Double.longBitsToDouble(this.averageBits.get()); + } + + public void addValue(double newValue) { + boolean successful = false; + do { + final long currentBits = this.averageBits.get(); + final double currentAvg = getAverage(); + final double newAvg = (alpha * newValue) + ((1 - alpha) * currentAvg); + final long newBits = Double.doubleToLongBits(newAvg); + successful = averageBits.compareAndSet(currentBits, newBits); + } while (successful == false); + } +} diff --git a/core/src/main/java/org/elasticsearch/common/Strings.java b/core/src/main/java/org/elasticsearch/common/Strings.java index 7cce81674d7..e8cc02596a0 100644 --- a/core/src/main/java/org/elasticsearch/common/Strings.java +++ b/core/src/main/java/org/elasticsearch/common/Strings.java @@ -49,14 +49,6 @@ public class Strings { public static final String[] EMPTY_ARRAY = new String[0]; - private static final String FOLDER_SEPARATOR = "/"; - - private static final String WINDOWS_FOLDER_SEPARATOR = "\\"; - - private static final String TOP_PATH = "src/test"; - - private static final String CURRENT_PATH = "."; - public static void spaceify(int spaces, String from, StringBuilder to) throws Exception { try (BufferedReader reader = new BufferedReader(new FastStringReader(from))) { String line; @@ -403,66 +395,6 @@ public class Strings { return true; } - /** - * Normalize the path by suppressing sequences like "path/.." and - * inner simple dots. - *

The result is convenient for path comparison. For other uses, - * notice that Windows separators ("\") are replaced by simple slashes. - * - * @param path the original path - * @return the normalized path - */ - public static String cleanPath(String path) { - if (path == null) { - return null; - } - String pathToUse = replace(path, WINDOWS_FOLDER_SEPARATOR, FOLDER_SEPARATOR); - - // Strip prefix from path to analyze, to not treat it as part of the - // first path element. This is necessary to correctly parse paths like - // "file:core/../core/io/Resource.class", where the ".." should just - // strip the first "core" directory while keeping the "file:" prefix. - int prefixIndex = pathToUse.indexOf(":"); - String prefix = ""; - if (prefixIndex != -1) { - prefix = pathToUse.substring(0, prefixIndex + 1); - pathToUse = pathToUse.substring(prefixIndex + 1); - } - if (pathToUse.startsWith(FOLDER_SEPARATOR)) { - prefix = prefix + FOLDER_SEPARATOR; - pathToUse = pathToUse.substring(1); - } - - String[] pathArray = delimitedListToStringArray(pathToUse, FOLDER_SEPARATOR); - List pathElements = new LinkedList<>(); - int tops = 0; - - for (int i = pathArray.length - 1; i >= 0; i--) { - String element = pathArray[i]; - if (CURRENT_PATH.equals(element)) { - // Points to current directory - drop it. - } else if (TOP_PATH.equals(element)) { - // Registering top path found. - tops++; - } else { - if (tops > 0) { - // Merging path element with element corresponding to top path. - tops--; - } else { - // Normal path element found. - pathElements.add(0, element); - } - } - } - - // Remaining top paths need to be retained. - for (int i = 0; i < tops; i++) { - pathElements.add(0, TOP_PATH); - } - - return prefix + collectionToDelimitedString(pathElements, FOLDER_SEPARATOR); - } - /** * Copy the given Collection into a String array. * The Collection must contain String elements only. diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java index a5864146318..aed72f502bf 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java @@ -82,7 +82,7 @@ public class GeoUtils { /** Returns true if longitude is actually a valid longitude value. */ public static boolean isValidLongitude(double longitude) { - if (Double.isNaN(longitude) || Double.isNaN(longitude) || longitude < GeoUtils.MIN_LON || longitude > GeoUtils.MAX_LON) { + if (Double.isNaN(longitude) || Double.isInfinite(longitude) || longitude < GeoUtils.MIN_LON || longitude > GeoUtils.MAX_LON) { return false; } return true; diff --git a/core/src/main/java/org/elasticsearch/common/io/Streams.java b/core/src/main/java/org/elasticsearch/common/io/Streams.java index f24b703251b..b6b9061ad7e 100644 --- a/core/src/main/java/org/elasticsearch/common/io/Streams.java +++ b/core/src/main/java/org/elasticsearch/common/io/Streams.java @@ -23,7 +23,6 @@ import org.apache.lucene.util.IOUtils; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStream; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.Callback; import java.io.BufferedReader; import java.io.IOException; @@ -37,6 +36,7 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Objects; +import java.util.function.Consumer; /** * Simple utility methods for file and stream copying. @@ -222,20 +222,15 @@ public abstract class Streams { public static List readAllLines(InputStream input) throws IOException { final List lines = new ArrayList<>(); - readAllLines(input, new Callback() { - @Override - public void handle(String line) { - lines.add(line); - } - }); + readAllLines(input, lines::add); return lines; } - public static void readAllLines(InputStream input, Callback callback) throws IOException { + public static void readAllLines(InputStream input, Consumer consumer) throws IOException { try (BufferedReader reader = new BufferedReader(new InputStreamReader(input, StandardCharsets.UTF_8))) { String line; while ((line = reader.readLine()) != null) { - callback.handle(line); + consumer.accept(line); } } } diff --git a/core/src/main/java/org/elasticsearch/common/logging/PrefixLogger.java b/core/src/main/java/org/elasticsearch/common/logging/PrefixLogger.java index 32de2afde36..a78330c3e85 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/PrefixLogger.java +++ b/core/src/main/java/org/elasticsearch/common/logging/PrefixLogger.java @@ -26,22 +26,53 @@ import org.apache.logging.log4j.message.Message; import org.apache.logging.log4j.spi.ExtendedLogger; import org.apache.logging.log4j.spi.ExtendedLoggerWrapper; -import java.lang.ref.WeakReference; import java.util.WeakHashMap; +/** + * A logger that prefixes all messages with a fixed prefix specified during construction. The prefix mechanism uses the marker construct, so + * for the prefixes to appear, the logging layout pattern must include the marker in its pattern. + */ class PrefixLogger extends ExtendedLoggerWrapper { - // we can not use the built-in Marker tracking (MarkerManager) because the MarkerManager holds - // a permanent reference to the marker; however, we have transient markers from index-level and - // shard-level components so this would effectively be a memory leak - private static final WeakHashMap> markers = new WeakHashMap<>(); + /* + * We can not use the built-in Marker tracking (MarkerManager) because the MarkerManager holds a permanent reference to the marker; + * however, we have transient markers from index-level and shard-level components so this would effectively be a memory leak. Since we + * can not tie into the lifecycle of these components, we have to use a mechanism that enables garbage collection of such markers when + * they are no longer in use. + */ + private static final WeakHashMap markers = new WeakHashMap<>(); + /** + * Return the size of the cached markers. This size can vary as markers are cached but collected during GC activity when a given prefix + * is no longer in use. + * + * @return the size of the cached markers + */ + static int markersSize() { + return markers.size(); + } + + /** + * The marker for this prefix logger. + */ private final Marker marker; + /** + * Obtain the prefix for this prefix logger. This can be used to create a logger with the same prefix as this one. + * + * @return the prefix + */ public String prefix() { return marker.getName(); } + /** + * Construct a prefix logger with the specified name and prefix. + * + * @param logger the extended logger to wrap + * @param name the name of this prefix logger + * @param prefix the prefix for this prefix logger + */ PrefixLogger(final ExtendedLogger logger, final String name, final String prefix) { super(logger, name, null); @@ -49,11 +80,15 @@ class PrefixLogger extends ExtendedLoggerWrapper { final Marker actualMarker; // markers is not thread-safe, so we synchronize access synchronized (markers) { - final WeakReference marker = markers.get(actualPrefix); - final Marker maybeMarker = marker == null ? null : marker.get(); + final Marker maybeMarker = markers.get(actualPrefix); if (maybeMarker == null) { actualMarker = new MarkerManager.Log4jMarker(actualPrefix); - markers.put(actualPrefix, new WeakReference<>(actualMarker)); + /* + * We must create a new instance here as otherwise the marker will hold a reference to the key in the weak hash map; as + * those references are held strongly, this would give a strong reference back to the key preventing them from ever being + * collected. This also guarantees that no other strong reference can be held to the prefix anywhere. + */ + markers.put(new String(actualPrefix), actualMarker); } else { actualMarker = maybeMarker; } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java index c213c384611..52550f1ba67 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -49,6 +49,7 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.SimpleCollector; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; @@ -245,20 +246,6 @@ public class Lucene { }.run(); } - /** - * Wraps delegate with count based early termination collector with a threshold of maxCountHits - */ - public static final EarlyTerminatingCollector wrapCountBasedEarlyTerminatingCollector(final Collector delegate, int maxCountHits) { - return new EarlyTerminatingCollector(delegate, maxCountHits); - } - - /** - * Wraps delegate with a time limited collector with a timeout of timeoutInMillis - */ - public static final TimeLimitingCollector wrapTimeLimitingCollector(final Collector delegate, final Counter counter, long timeoutInMillis) { - return new TimeLimitingCollector(delegate, counter, timeoutInMillis); - } - /** * Check whether there is one or more documents matching the provided query. */ @@ -617,71 +604,6 @@ public class Lucene { } } - /** - * This exception is thrown when {@link org.elasticsearch.common.lucene.Lucene.EarlyTerminatingCollector} - * reaches early termination - * */ - public static final class EarlyTerminationException extends ElasticsearchException { - - public EarlyTerminationException(String msg) { - super(msg); - } - - public EarlyTerminationException(StreamInput in) throws IOException{ - super(in); - } - } - - /** - * A collector that terminates early by throwing {@link org.elasticsearch.common.lucene.Lucene.EarlyTerminationException} - * when count of matched documents has reached maxCountHits - */ - public static final class EarlyTerminatingCollector extends SimpleCollector { - - private final int maxCountHits; - private final Collector delegate; - - private int count = 0; - private LeafCollector leafCollector; - - EarlyTerminatingCollector(final Collector delegate, int maxCountHits) { - this.maxCountHits = maxCountHits; - this.delegate = Objects.requireNonNull(delegate); - } - - public int count() { - return count; - } - - public boolean exists() { - return count > 0; - } - - @Override - public void setScorer(Scorer scorer) throws IOException { - leafCollector.setScorer(scorer); - } - - @Override - public void collect(int doc) throws IOException { - leafCollector.collect(doc); - - if (++count >= maxCountHits) { - throw new EarlyTerminationException("early termination [CountBased]"); - } - } - - @Override - public void doSetNextReader(LeafReaderContext atomicReaderContext) throws IOException { - leafCollector = delegate.getLeafCollector(atomicReaderContext); - } - - @Override - public boolean needsScores() { - return delegate.needsScores(); - } - } - private Lucene() { } @@ -838,14 +760,16 @@ public class Lucene { } /** - * Given a {@link Scorer}, return a {@link Bits} instance that will match + * Given a {@link ScorerSupplier}, return a {@link Bits} instance that will match * all documents contained in the set. Note that the returned {@link Bits} * instance MUST be consumed in order. */ - public static Bits asSequentialAccessBits(final int maxDoc, @Nullable Scorer scorer) throws IOException { - if (scorer == null) { + public static Bits asSequentialAccessBits(final int maxDoc, @Nullable ScorerSupplier scorerSupplier) throws IOException { + if (scorerSupplier == null) { return new Bits.MatchNoBits(maxDoc); } + // Since we want bits, we need random-access + final Scorer scorer = scorerSupplier.get(true); // this never returns null final TwoPhaseIterator twoPhase = scorer.twoPhaseIterator(); final DocIdSetIterator iterator; if (twoPhase == null) { diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/FilteredCollector.java b/core/src/main/java/org/elasticsearch/common/lucene/search/FilteredCollector.java index 3a5d71d1fcd..e9db2928ca7 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/FilteredCollector.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/FilteredCollector.java @@ -22,7 +22,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Collector; import org.apache.lucene.search.FilterLeafCollector; import org.apache.lucene.search.LeafCollector; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; import org.elasticsearch.common.lucene.Lucene; @@ -41,9 +41,9 @@ public class FilteredCollector implements Collector { @Override public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { - final Scorer filterScorer = filter.scorer(context); + final ScorerSupplier filterScorerSupplier = filter.scorerSupplier(context); final LeafCollector in = collector.getLeafCollector(context); - final Bits bits = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorer); + final Bits bits = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorerSupplier); return new FilterLeafCollector(in) { @Override diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java index dbfc1f0af11..b8e1039b2df 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQuery.java @@ -164,6 +164,11 @@ public class MultiPhrasePrefixQuery extends Query { } } if (terms.isEmpty()) { + if (sizeMinus1 == 0) { + // no prefix and the phrase query is empty + return Queries.newMatchNoDocsQuery("No terms supplied for " + MultiPhrasePrefixQuery.class.getName()); + } + // if the terms does not exist we could return a MatchNoDocsQuery but this would break the unified highlighter // which rewrites query with an empty reader. return new BooleanQuery.Builder() diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java index 2f2a70537c0..40465dc6ece 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java @@ -27,6 +27,7 @@ import org.apache.lucene.search.FilterScorer; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; import org.elasticsearch.common.io.stream.StreamInput; @@ -174,8 +175,8 @@ public class FiltersFunctionScoreQuery extends Query { for (int i = 0; i < filterFunctions.length; i++) { FilterFunction filterFunction = filterFunctions[i]; functions[i] = filterFunction.function.getLeafScoreFunction(context); - Scorer filterScorer = filterWeights[i].scorer(context); - docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorer); + ScorerSupplier filterScorerSupplier = filterWeights[i].scorerSupplier(context); + docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorerSupplier); } return new FiltersFunctionFactorScorer(this, subQueryScorer, scoreMode, filterFunctions, maxBoost, functions, docSets, combineFunction, needsScores); } @@ -200,7 +201,7 @@ public class FiltersFunctionScoreQuery extends Query { List filterExplanations = new ArrayList<>(); for (int i = 0; i < filterFunctions.length; ++i) { Bits docSet = Lucene.asSequentialAccessBits(context.reader().maxDoc(), - filterWeights[i].scorer(context)); + filterWeights[i].scorerSupplier(context)); if (docSet.get(doc)) { FilterFunction filterFunction = filterFunctions[i]; Explanation functionExplanation = filterFunction.function.getLeafScoreFunction(context).explainScore(doc, expl); diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java index bee7087c1d5..112bf271c4e 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java @@ -24,7 +24,6 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Scorer; import org.elasticsearch.script.ExplainableSearchScript; -import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.GeneralScriptException; import org.elasticsearch.script.SearchScript; @@ -65,10 +64,10 @@ public class ScriptScoreFunction extends ScoreFunction { private final Script sScript; - private final SearchScript script; + private final SearchScript.LeafFactory script; - public ScriptScoreFunction(Script sScript, SearchScript script) { + public ScriptScoreFunction(Script sScript, SearchScript.LeafFactory script) { super(CombineFunction.REPLACE); this.sScript = sScript; this.script = script; @@ -76,7 +75,7 @@ public class ScriptScoreFunction extends ScoreFunction { @Override public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOException { - final LeafSearchScript leafScript = script.getLeafSearchScript(ctx); + final SearchScript leafScript = script.newInstance(ctx); final CannedScorer scorer = new CannedScorer(); leafScript.setScorer(scorer); return new LeafScoreFunction() { diff --git a/core/src/main/java/org/elasticsearch/common/metrics/EWMA.java b/core/src/main/java/org/elasticsearch/common/metrics/EWMA.java deleted file mode 100644 index b2fca5f6605..00000000000 --- a/core/src/main/java/org/elasticsearch/common/metrics/EWMA.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.metrics; - -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.LongAdder; - -/** - * An exponentially-weighted moving average. - * - *

- * Taken from codahale metric module, changed to use LongAdder - * - * @see UNIX Load Average Part 1: How It Works - * @see UNIX Load Average Part 2: Not Your Average Average - */ -public class EWMA { - private static final double M1_ALPHA = 1 - Math.exp(-5 / 60.0); - private static final double M5_ALPHA = 1 - Math.exp(-5 / 60.0 / 5); - private static final double M15_ALPHA = 1 - Math.exp(-5 / 60.0 / 15); - - private volatile boolean initialized = false; - private volatile double rate = 0.0; - - private final LongAdder uncounted = new LongAdder(); - private final double alpha, interval; - - /** - * Creates a new EWMA which is equivalent to the UNIX one minute load average and which expects to be ticked every - * 5 seconds. - * - * @return a one-minute EWMA - */ - public static EWMA oneMinuteEWMA() { - return new EWMA(M1_ALPHA, 5, TimeUnit.SECONDS); - } - - /** - * Creates a new EWMA which is equivalent to the UNIX five minute load average and which expects to be ticked every - * 5 seconds. - * - * @return a five-minute EWMA - */ - public static EWMA fiveMinuteEWMA() { - return new EWMA(M5_ALPHA, 5, TimeUnit.SECONDS); - } - - /** - * Creates a new EWMA which is equivalent to the UNIX fifteen minute load average and which expects to be ticked - * every 5 seconds. - * - * @return a fifteen-minute EWMA - */ - public static EWMA fifteenMinuteEWMA() { - return new EWMA(M15_ALPHA, 5, TimeUnit.SECONDS); - } - - /** - * Create a new EWMA with a specific smoothing constant. - * - * @param alpha the smoothing constant - * @param interval the expected tick interval - * @param intervalUnit the time unit of the tick interval - */ - public EWMA(double alpha, long interval, TimeUnit intervalUnit) { - this.interval = intervalUnit.toNanos(interval); - this.alpha = alpha; - } - - /** - * Update the moving average with a new value. - * - * @param n the new value - */ - public void update(long n) { - uncounted.add(n); - } - - /** - * Mark the passage of time and decay the current rate accordingly. - */ - public void tick() { - final long count = uncounted.sumThenReset(); - double instantRate = count / interval; - if (initialized) { - rate += (alpha * (instantRate - rate)); - } else { - rate = instantRate; - initialized = true; - } - } - - /** - * Returns the rate in the given units of time. - * - * @param rateUnit the unit of time - * @return the rate - */ - public double rate(TimeUnit rateUnit) { - return rate * (double) rateUnit.toNanos(1); - } -} diff --git a/core/src/main/java/org/elasticsearch/common/metrics/MeterMetric.java b/core/src/main/java/org/elasticsearch/common/metrics/MeterMetric.java deleted file mode 100644 index 3a24df0208e..00000000000 --- a/core/src/main/java/org/elasticsearch/common/metrics/MeterMetric.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.metrics; - -import org.elasticsearch.common.util.concurrent.FutureUtils; - -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ScheduledFuture; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.LongAdder; - -/** - * A meter metric which measures mean throughput and one-, five-, and - * fifteen-minute exponentially-weighted moving average throughputs. - * - *

- * taken from codahale metric module, replaced with LongAdder - * - * @see EMA - */ -public class MeterMetric implements Metric { - private static final long INTERVAL = 5; // seconds - - private final EWMA m1Rate = EWMA.oneMinuteEWMA(); - private final EWMA m5Rate = EWMA.fiveMinuteEWMA(); - private final EWMA m15Rate = EWMA.fifteenMinuteEWMA(); - - private final LongAdder count = new LongAdder(); - private final long startTime = System.nanoTime(); - private final TimeUnit rateUnit; - private final ScheduledFuture future; - - public MeterMetric(ScheduledExecutorService tickThread, TimeUnit rateUnit) { - this.rateUnit = rateUnit; - this.future = tickThread.scheduleAtFixedRate(new Runnable() { - @Override - public void run() { - tick(); - } - }, INTERVAL, INTERVAL, TimeUnit.SECONDS); - } - - public TimeUnit rateUnit() { - return rateUnit; - } - - /** - * Updates the moving averages. - */ - void tick() { - m1Rate.tick(); - m5Rate.tick(); - m15Rate.tick(); - } - - /** - * Mark the occurrence of an event. - */ - public void mark() { - mark(1); - } - - /** - * Mark the occurrence of a given number of events. - * - * @param n the number of events - */ - public void mark(long n) { - count.add(n); - m1Rate.update(n); - m5Rate.update(n); - m15Rate.update(n); - } - - public long count() { - return count.sum(); - } - - public double fifteenMinuteRate() { - return m15Rate.rate(rateUnit); - } - - public double fiveMinuteRate() { - return m5Rate.rate(rateUnit); - } - - public double meanRate() { - long count = count(); - if (count == 0) { - return 0.0; - } else { - final long elapsed = (System.nanoTime() - startTime); - return convertNsRate(count / (double) elapsed); - } - } - - public double oneMinuteRate() { - return m1Rate.rate(rateUnit); - } - - private double convertNsRate(double ratePerNs) { - return ratePerNs * (double) rateUnit.toNanos(1); - } - - public void stop() { FutureUtils.cancel(future);} -} diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index b0a1f2ed09f..f71ddccd9d3 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -219,7 +219,7 @@ public final class Settings implements ToXContent { */ public Settings getByPrefix(String prefix) { return new Settings(new FilteredMap(this.settings, (k) -> k.startsWith(prefix), prefix), secureSettings == null ? null : - new PrefixedSecureSettings(secureSettings, s -> prefix + s, s -> s.startsWith(prefix))); + new PrefixedSecureSettings(secureSettings, prefix, s -> s.startsWith(prefix))); } /** @@ -227,7 +227,7 @@ public final class Settings implements ToXContent { */ public Settings filter(Predicate predicate) { return new Settings(new FilteredMap(this.settings, predicate, null), secureSettings == null ? null : - new PrefixedSecureSettings(secureSettings, UnaryOperator.identity(), predicate)); + new PrefixedSecureSettings(secureSettings, "", predicate)); } /** @@ -507,35 +507,21 @@ public final class Settings implements ToXContent { } private Map getGroupsInternal(String settingPrefix, boolean ignoreNonGrouped) throws SettingsException { - // we don't really care that it might happen twice - Map> map = new LinkedHashMap<>(); - for (Object o : settings.keySet()) { - String setting = (String) o; - if (setting.startsWith(settingPrefix)) { - String nameValue = setting.substring(settingPrefix.length()); - int dotIndex = nameValue.indexOf('.'); - if (dotIndex == -1) { - if (ignoreNonGrouped) { - continue; - } - throw new SettingsException("Failed to get setting group for [" + settingPrefix + "] setting prefix and setting [" - + setting + "] because of a missing '.'"); + Settings prefixSettings = getByPrefix(settingPrefix); + Map groups = new HashMap<>(); + for (String groupName : prefixSettings.names()) { + Settings groupSettings = prefixSettings.getByPrefix(groupName + "."); + if (groupSettings.isEmpty()) { + if (ignoreNonGrouped) { + continue; } - String name = nameValue.substring(0, dotIndex); - String value = nameValue.substring(dotIndex + 1); - Map groupSettings = map.get(name); - if (groupSettings == null) { - groupSettings = new LinkedHashMap<>(); - map.put(name, groupSettings); - } - groupSettings.put(value, get(setting)); + throw new SettingsException("Failed to get setting group for [" + settingPrefix + "] setting prefix and setting [" + + settingPrefix + groupName + "] because of a missing '.'"); } + groups.put(groupName, groupSettings); } - Map retVal = new LinkedHashMap<>(); - for (Map.Entry> entry : map.entrySet()) { - retVal.put(entry.getKey(), new Settings(Collections.unmodifiableMap(entry.getValue()), secureSettings)); - } - return Collections.unmodifiableMap(retVal); + + return Collections.unmodifiableMap(groups); } /** * Returns group settings for the given setting prefix. @@ -721,6 +707,11 @@ public final class Settings implements ToXContent { return map.get(key); } + /** Return the current secure settings, or {@code null} if none have been set. */ + public SecureSettings getSecureSettings() { + return secureSettings.get(); + } + public Builder setSecureSettings(SecureSettings secureSettings) { if (secureSettings.isLoaded() == false) { throw new IllegalStateException("Secure settings must already be loaded"); @@ -1276,13 +1267,15 @@ public final class Settings implements ToXContent { private static class PrefixedSecureSettings implements SecureSettings { private final SecureSettings delegate; - private final UnaryOperator keyTransform; + private final UnaryOperator addPrefix; + private final UnaryOperator removePrefix; private final Predicate keyPredicate; private final SetOnce> settingNames = new SetOnce<>(); - PrefixedSecureSettings(SecureSettings delegate, UnaryOperator keyTransform, Predicate keyPredicate) { + PrefixedSecureSettings(SecureSettings delegate, String prefix, Predicate keyPredicate) { this.delegate = delegate; - this.keyTransform = keyTransform; + this.addPrefix = s -> prefix + s; + this.removePrefix = s -> s.substring(prefix.length()); this.keyPredicate = keyPredicate; } @@ -1295,7 +1288,8 @@ public final class Settings implements ToXContent { public Set getSettingNames() { synchronized (settingNames) { if (settingNames.get() == null) { - Set names = delegate.getSettingNames().stream().filter(keyPredicate).collect(Collectors.toSet()); + Set names = delegate.getSettingNames().stream() + .filter(keyPredicate).map(removePrefix).collect(Collectors.toSet()); settingNames.set(Collections.unmodifiableSet(names)); } } @@ -1304,12 +1298,12 @@ public final class Settings implements ToXContent { @Override public SecureString getString(String setting) throws GeneralSecurityException{ - return delegate.getString(keyTransform.apply(setting)); + return delegate.getString(addPrefix.apply(setting)); } @Override public InputStream getFile(String setting) throws GeneralSecurityException{ - return delegate.getFile(keyTransform.apply(setting)); + return delegate.getFile(addPrefix.apply(setting)); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/util/AbstractArray.java b/core/src/main/java/org/elasticsearch/common/util/AbstractArray.java index 913f1ad26a4..6a4895c7950 100644 --- a/core/src/main/java/org/elasticsearch/common/util/AbstractArray.java +++ b/core/src/main/java/org/elasticsearch/common/util/AbstractArray.java @@ -41,7 +41,7 @@ abstract class AbstractArray implements BigArray { public final void close() { if (closed.compareAndSet(false, true)) { try { - bigArrays.adjustBreaker(-ramBytesUsed()); + bigArrays.adjustBreaker(-ramBytesUsed(), true); } finally { doClose(); } diff --git a/core/src/main/java/org/elasticsearch/common/util/AbstractBigArray.java b/core/src/main/java/org/elasticsearch/common/util/AbstractBigArray.java index f26dad1fdb5..73a05f7f2cf 100644 --- a/core/src/main/java/org/elasticsearch/common/util/AbstractBigArray.java +++ b/core/src/main/java/org/elasticsearch/common/util/AbstractBigArray.java @@ -87,6 +87,11 @@ abstract class AbstractBigArray extends AbstractArray { @Override public final long ramBytesUsed() { + return ramBytesEstimated(size); + } + + /** Given the size of the array, estimate the number of bytes it will use. */ + public final long ramBytesEstimated(final long size) { // rough approximate, we only take into account the size of the values, not the overhead of the array objects return ((long) pageIndex(size - 1) + 1) * pageSize() * numBytesPerElement(); } diff --git a/core/src/main/java/org/elasticsearch/common/util/BigArrays.java b/core/src/main/java/org/elasticsearch/common/util/BigArrays.java index 728db17c2a4..5c539a791cf 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BigArrays.java +++ b/core/src/main/java/org/elasticsearch/common/util/BigArrays.java @@ -25,7 +25,6 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.recycler.Recycler; @@ -91,7 +90,7 @@ public class BigArrays implements Releasable { private abstract static class AbstractArrayWrapper extends AbstractArray implements BigArray { - protected static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(ByteArrayWrapper.class); + static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(ByteArrayWrapper.class); private final Releasable releasable; private final long size; @@ -377,6 +376,7 @@ public class BigArrays implements Releasable { // Checking the breaker is disabled if not specified this(new PageCacheRecycler(settings), breakerService, false); } + // public for tests public BigArrays(PageCacheRecycler recycler, @Nullable final CircuitBreakerService breakerService, boolean checkBreaker) { this.checkBreaker = checkBreaker; @@ -392,9 +392,12 @@ public class BigArrays implements Releasable { /** * Adjust the circuit breaker with the given delta, if the delta is * negative, or checkBreaker is false, the breaker will be adjusted - * without tripping + * without tripping. If the data was already created before calling + * this method, and the breaker trips, we add the delta without breaking + * to account for the created data. If the data has not been created yet, + * we do not add the delta to the breaker if it trips. */ - void adjustBreaker(long delta) { + void adjustBreaker(final long delta, final boolean isDataAlreadyCreated) { if (this.breakerService != null) { CircuitBreaker breaker = this.breakerService.getBreaker(CircuitBreaker.REQUEST); if (this.checkBreaker) { @@ -404,9 +407,11 @@ public class BigArrays implements Releasable { try { breaker.addEstimateBytesAndMaybeBreak(delta, ""); } catch (CircuitBreakingException e) { - // since we've already created the data, we need to - // add it so closing the stream re-adjusts properly - breaker.addWithoutBreaking(delta); + if (isDataAlreadyCreated) { + // since we've already created the data, we need to + // add it so closing the stream re-adjusts properly + breaker.addWithoutBreaking(delta); + } // re-throw the original exception throw e; } @@ -435,15 +440,20 @@ public class BigArrays implements Releasable { private T resizeInPlace(T array, long newSize) { final long oldMemSize = array.ramBytesUsed(); + final long oldSize = array.size(); + assert oldMemSize == array.ramBytesEstimated(oldSize) : + "ram bytes used should equal that which was previously estimated: ramBytesUsed=" + + oldMemSize + ", ramBytesEstimated=" + array.ramBytesEstimated(oldSize); + final long estimatedIncreaseInBytes = array.ramBytesEstimated(newSize) - oldMemSize; + adjustBreaker(estimatedIncreaseInBytes, false); array.resize(newSize); - adjustBreaker(array.ramBytesUsed() - oldMemSize); return array; } private T validate(T array) { boolean success = false; try { - adjustBreaker(array.ramBytesUsed()); + adjustBreaker(array.ramBytesUsed(), true); success = true; } finally { if (!success) { @@ -459,16 +469,17 @@ public class BigArrays implements Releasable { * @param clearOnResize whether values should be set to 0 on initialization and resize */ public ByteArray newByteArray(long size, boolean clearOnResize) { - final ByteArray array; if (size > BYTE_PAGE_SIZE) { - array = new BigByteArray(size, this, clearOnResize); + // when allocating big arrays, we want to first ensure we have the capacity by + // checking with the circuit breaker before attempting to allocate + adjustBreaker(BigByteArray.estimateRamBytes(size), false); + return new BigByteArray(size, this, clearOnResize); } else if (size >= BYTE_PAGE_SIZE / 2 && recycler != null) { final Recycler.V page = recycler.bytePage(clearOnResize); - array = new ByteArrayWrapper(this, page.v(), size, page, clearOnResize); + return validate(new ByteArrayWrapper(this, page.v(), size, page, clearOnResize)); } else { - array = new ByteArrayWrapper(this, new byte[(int) size], size, null, clearOnResize); + return validate(new ByteArrayWrapper(this, new byte[(int) size], size, null, clearOnResize)); } - return validate(array); } /** @@ -541,16 +552,17 @@ public class BigArrays implements Releasable { * @param clearOnResize whether values should be set to 0 on initialization and resize */ public IntArray newIntArray(long size, boolean clearOnResize) { - final IntArray array; if (size > INT_PAGE_SIZE) { - array = new BigIntArray(size, this, clearOnResize); + // when allocating big arrays, we want to first ensure we have the capacity by + // checking with the circuit breaker before attempting to allocate + adjustBreaker(BigIntArray.estimateRamBytes(size), false); + return new BigIntArray(size, this, clearOnResize); } else if (size >= INT_PAGE_SIZE / 2 && recycler != null) { final Recycler.V page = recycler.intPage(clearOnResize); - array = new IntArrayWrapper(this, page.v(), size, page, clearOnResize); + return validate(new IntArrayWrapper(this, page.v(), size, page, clearOnResize)); } else { - array = new IntArrayWrapper(this, new int[(int) size], size, null, clearOnResize); + return validate(new IntArrayWrapper(this, new int[(int) size], size, null, clearOnResize)); } - return validate(array); } /** @@ -591,16 +603,17 @@ public class BigArrays implements Releasable { * @param clearOnResize whether values should be set to 0 on initialization and resize */ public LongArray newLongArray(long size, boolean clearOnResize) { - final LongArray array; if (size > LONG_PAGE_SIZE) { - array = new BigLongArray(size, this, clearOnResize); + // when allocating big arrays, we want to first ensure we have the capacity by + // checking with the circuit breaker before attempting to allocate + adjustBreaker(BigLongArray.estimateRamBytes(size), false); + return new BigLongArray(size, this, clearOnResize); } else if (size >= LONG_PAGE_SIZE / 2 && recycler != null) { final Recycler.V page = recycler.longPage(clearOnResize); - array = new LongArrayWrapper(this, page.v(), size, page, clearOnResize); + return validate(new LongArrayWrapper(this, page.v(), size, page, clearOnResize)); } else { - array = new LongArrayWrapper(this, new long[(int) size], size, null, clearOnResize); + return validate(new LongArrayWrapper(this, new long[(int) size], size, null, clearOnResize)); } - return validate(array); } /** @@ -641,16 +654,17 @@ public class BigArrays implements Releasable { * @param clearOnResize whether values should be set to 0 on initialization and resize */ public DoubleArray newDoubleArray(long size, boolean clearOnResize) { - final DoubleArray arr; if (size > LONG_PAGE_SIZE) { - arr = new BigDoubleArray(size, this, clearOnResize); + // when allocating big arrays, we want to first ensure we have the capacity by + // checking with the circuit breaker before attempting to allocate + adjustBreaker(BigDoubleArray.estimateRamBytes(size), false); + return new BigDoubleArray(size, this, clearOnResize); } else if (size >= LONG_PAGE_SIZE / 2 && recycler != null) { final Recycler.V page = recycler.longPage(clearOnResize); - arr = new DoubleArrayWrapper(this, page.v(), size, page, clearOnResize); + return validate(new DoubleArrayWrapper(this, page.v(), size, page, clearOnResize)); } else { - arr = new DoubleArrayWrapper(this, new long[(int) size], size, null, clearOnResize); + return validate(new DoubleArrayWrapper(this, new long[(int) size], size, null, clearOnResize)); } - return validate(arr); } /** Allocate a new {@link DoubleArray} of the given capacity. */ @@ -688,16 +702,17 @@ public class BigArrays implements Releasable { * @param clearOnResize whether values should be set to 0 on initialization and resize */ public FloatArray newFloatArray(long size, boolean clearOnResize) { - final FloatArray array; if (size > INT_PAGE_SIZE) { - array = new BigFloatArray(size, this, clearOnResize); + // when allocating big arrays, we want to first ensure we have the capacity by + // checking with the circuit breaker before attempting to allocate + adjustBreaker(BigFloatArray.estimateRamBytes(size), false); + return new BigFloatArray(size, this, clearOnResize); } else if (size >= INT_PAGE_SIZE / 2 && recycler != null) { final Recycler.V page = recycler.intPage(clearOnResize); - array = new FloatArrayWrapper(this, page.v(), size, page, clearOnResize); + return validate(new FloatArrayWrapper(this, page.v(), size, page, clearOnResize)); } else { - array = new FloatArrayWrapper(this, new int[(int) size], size, null, clearOnResize); + return validate(new FloatArrayWrapper(this, new int[(int) size], size, null, clearOnResize)); } - return validate(array); } /** Allocate a new {@link FloatArray} of the given capacity. */ @@ -736,14 +751,16 @@ public class BigArrays implements Releasable { public ObjectArray newObjectArray(long size) { final ObjectArray array; if (size > OBJECT_PAGE_SIZE) { - array = new BigObjectArray<>(size, this); + // when allocating big arrays, we want to first ensure we have the capacity by + // checking with the circuit breaker before attempting to allocate + adjustBreaker(BigObjectArray.estimateRamBytes(size), false); + return new BigObjectArray<>(size, this); } else if (size >= OBJECT_PAGE_SIZE / 2 && recycler != null) { final Recycler.V page = recycler.objectPage(); - array = new ObjectArrayWrapper<>(this, page.v(), size, page); + return validate(new ObjectArrayWrapper<>(this, page.v(), size, page)); } else { - array = new ObjectArrayWrapper<>(this, new Object[(int) size], size, null); + return validate(new ObjectArrayWrapper<>(this, new Object[(int) size], size, null)); } - return validate(array); } /** Resize the array to the exact provided size. */ diff --git a/core/src/main/java/org/elasticsearch/common/util/BigByteArray.java b/core/src/main/java/org/elasticsearch/common/util/BigByteArray.java index 02f2c627a4c..789e6dc6bba 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BigByteArray.java +++ b/core/src/main/java/org/elasticsearch/common/util/BigByteArray.java @@ -33,6 +33,8 @@ import static org.elasticsearch.common.util.BigArrays.BYTE_PAGE_SIZE; */ final class BigByteArray extends AbstractBigArray implements ByteArray { + private static final BigByteArray ESTIMATOR = new BigByteArray(0, BigArrays.NON_RECYCLING_INSTANCE, false); + private byte[][] pages; /** Constructor. */ @@ -44,7 +46,7 @@ final class BigByteArray extends AbstractBigArray implements ByteArray { pages[i] = newBytePage(i); } } - + @Override public byte get(long index) { final int pageIndex = pageIndex(index); @@ -147,4 +149,9 @@ final class BigByteArray extends AbstractBigArray implements ByteArray { this.size = newSize; } + /** Estimates the number of bytes that would be consumed by an array of the given size. */ + public static long estimateRamBytes(final long size) { + return ESTIMATOR.ramBytesEstimated(size); + } + } diff --git a/core/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java b/core/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java index a8b4503bda6..a2c770ee995 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java +++ b/core/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java @@ -32,6 +32,8 @@ import static org.elasticsearch.common.util.BigArrays.LONG_PAGE_SIZE; */ final class BigDoubleArray extends AbstractBigArray implements DoubleArray { + private static final BigDoubleArray ESTIMATOR = new BigDoubleArray(0, BigArrays.NON_RECYCLING_INSTANCE, false); + private long[][] pages; /** Constructor. */ @@ -110,4 +112,9 @@ final class BigDoubleArray extends AbstractBigArray implements DoubleArray { } } + /** Estimates the number of bytes that would be consumed by an array of the given size. */ + public static long estimateRamBytes(final long size) { + return ESTIMATOR.ramBytesEstimated(size); + } + } diff --git a/core/src/main/java/org/elasticsearch/common/util/BigFloatArray.java b/core/src/main/java/org/elasticsearch/common/util/BigFloatArray.java index cf11eba37ae..b67db2e84de 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BigFloatArray.java +++ b/core/src/main/java/org/elasticsearch/common/util/BigFloatArray.java @@ -32,6 +32,8 @@ import static org.elasticsearch.common.util.BigArrays.INT_PAGE_SIZE; */ final class BigFloatArray extends AbstractBigArray implements FloatArray { + private static final BigFloatArray ESTIMATOR = new BigFloatArray(0, BigArrays.NON_RECYCLING_INSTANCE, false); + private int[][] pages; /** Constructor. */ @@ -110,4 +112,9 @@ final class BigFloatArray extends AbstractBigArray implements FloatArray { } } + /** Estimates the number of bytes that would be consumed by an array of the given size. */ + public static long estimateRamBytes(final long size) { + return ESTIMATOR.ramBytesEstimated(size); + } + } diff --git a/core/src/main/java/org/elasticsearch/common/util/BigIntArray.java b/core/src/main/java/org/elasticsearch/common/util/BigIntArray.java index 16ca3ada24d..d2a1ca3f49c 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BigIntArray.java +++ b/core/src/main/java/org/elasticsearch/common/util/BigIntArray.java @@ -32,6 +32,8 @@ import static org.elasticsearch.common.util.BigArrays.INT_PAGE_SIZE; */ final class BigIntArray extends AbstractBigArray implements IntArray { + private static final BigIntArray ESTIMATOR = new BigIntArray(0, BigArrays.NON_RECYCLING_INSTANCE, false); + private int[][] pages; /** Constructor. */ @@ -108,4 +110,9 @@ final class BigIntArray extends AbstractBigArray implements IntArray { this.size = newSize; } + /** Estimates the number of bytes that would be consumed by an array of the given size. */ + public static long estimateRamBytes(final long size) { + return ESTIMATOR.ramBytesEstimated(size); + } + } diff --git a/core/src/main/java/org/elasticsearch/common/util/BigLongArray.java b/core/src/main/java/org/elasticsearch/common/util/BigLongArray.java index cb9b9e6c332..69f919382f8 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BigLongArray.java +++ b/core/src/main/java/org/elasticsearch/common/util/BigLongArray.java @@ -32,6 +32,8 @@ import static org.elasticsearch.common.util.BigArrays.LONG_PAGE_SIZE; */ final class BigLongArray extends AbstractBigArray implements LongArray { + private static final BigLongArray ESTIMATOR = new BigLongArray(0, BigArrays.NON_RECYCLING_INSTANCE, false); + private long[][] pages; /** Constructor. */ @@ -111,4 +113,9 @@ final class BigLongArray extends AbstractBigArray implements LongArray { } } + /** Estimates the number of bytes that would be consumed by an array of the given size. */ + public static long estimateRamBytes(final long size) { + return ESTIMATOR.ramBytesEstimated(size); + } + } diff --git a/core/src/main/java/org/elasticsearch/common/util/BigObjectArray.java b/core/src/main/java/org/elasticsearch/common/util/BigObjectArray.java index 023c710f3aa..1ed012e2bb3 100644 --- a/core/src/main/java/org/elasticsearch/common/util/BigObjectArray.java +++ b/core/src/main/java/org/elasticsearch/common/util/BigObjectArray.java @@ -32,6 +32,8 @@ import static org.elasticsearch.common.util.BigArrays.OBJECT_PAGE_SIZE; */ final class BigObjectArray extends AbstractBigArray implements ObjectArray { + private static final BigObjectArray ESTIMATOR = new BigObjectArray(0, BigArrays.NON_RECYCLING_INSTANCE); + private Object[][] pages; /** Constructor. */ @@ -85,4 +87,9 @@ final class BigObjectArray extends AbstractBigArray implements ObjectArray this.size = newSize; } -} \ No newline at end of file + /** Estimates the number of bytes that would be consumed by an array of the given size. */ + public static long estimateRamBytes(final long size) { + return ESTIMATOR.ramBytesEstimated(size); + } + +} diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java index 898c2203d0b..c24b6899bcc 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java @@ -21,6 +21,7 @@ package org.elasticsearch.common.util.concurrent; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.common.ExponentiallyWeightedMovingAverage; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.unit.TimeValue; @@ -43,8 +44,13 @@ import java.util.stream.Stream; */ public final class QueueResizingEsThreadPoolExecutor extends EsThreadPoolExecutor { + // This is a random starting point alpha. TODO: revisit this with actual testing and/or make it configurable + public static double EWMA_ALPHA = 0.3; + private static final Logger logger = ESLoggerFactory.getLogger(QueueResizingEsThreadPoolExecutor.class); + // The amount the queue size is adjusted by for each calcuation + private static final int QUEUE_ADJUSTMENT_AMOUNT = 50; private final Function runnableWrapper; private final ResizableBlockingQueue workQueue; @@ -52,8 +58,7 @@ public final class QueueResizingEsThreadPoolExecutor extends EsThreadPoolExecuto private final int minQueueSize; private final int maxQueueSize; private final long targetedResponseTimeNanos; - // The amount the queue size is adjusted by for each calcuation - private static final int QUEUE_ADJUSTMENT_AMOUNT = 50; + private final ExponentiallyWeightedMovingAverage executionEWMA; private final AtomicLong totalTaskNanos = new AtomicLong(0); private final AtomicInteger taskCount = new AtomicInteger(0); @@ -74,6 +79,9 @@ public final class QueueResizingEsThreadPoolExecutor extends EsThreadPoolExecuto this.minQueueSize = minQueueSize; this.maxQueueSize = maxQueueSize; this.targetedResponseTimeNanos = targetedResponseTime.getNanos(); + // We choose to start the EWMA with the targeted response time, reasoning that it is a + // better start point for a realistic task execution time than starting at 0 + this.executionEWMA = new ExponentiallyWeightedMovingAverage(EWMA_ALPHA, targetedResponseTimeNanos); logger.debug("thread pool [{}] will adjust queue by [{}] when determining automatic queue size", name, QUEUE_ADJUSTMENT_AMOUNT); } @@ -126,6 +134,13 @@ public final class QueueResizingEsThreadPoolExecutor extends EsThreadPoolExecuto return workQueue.capacity(); } + /** + * Returns the exponentially weighted moving average of the task execution time + */ + public double getTaskExecutionEWMA() { + return executionEWMA.getAverage(); + } + @Override protected void afterExecute(Runnable r, Throwable t) { super.afterExecute(r, t); @@ -136,6 +151,11 @@ public final class QueueResizingEsThreadPoolExecutor extends EsThreadPoolExecuto assert r instanceof TimedRunnable : "expected only TimedRunnables in queue"; final long taskNanos = ((TimedRunnable) r).getTotalNanos(); final long totalNanos = totalTaskNanos.addAndGet(taskNanos); + + final long taskExecutionNanos = ((TimedRunnable) r).getTotalExecutionNanos(); + assert taskExecutionNanos >= 0 : "expected task to always take longer than 0 nanoseconds, got: " + taskExecutionNanos; + executionEWMA.addValue(taskExecutionNanos); + if (taskCount.incrementAndGet() == this.tasksPerFrame) { final long endTimeNs = System.nanoTime(); final long totalRuntime = endTimeNs - this.startNs; @@ -149,20 +169,22 @@ public final class QueueResizingEsThreadPoolExecutor extends EsThreadPoolExecuto try { final double lambda = calculateLambda(tasksPerFrame, totalNanos); final int desiredQueueSize = calculateL(lambda, targetedResponseTimeNanos); + final int oldCapacity = workQueue.capacity(); + if (logger.isDebugEnabled()) { final long avgTaskTime = totalNanos / tasksPerFrame; - logger.debug("[{}]: there were [{}] tasks in [{}], avg task time: [{}], [{} tasks/s], " + - "optimal queue is [{}]", + logger.debug("[{}]: there were [{}] tasks in [{}], avg task time [{}], EWMA task execution [{}], " + + "[{} tasks/s], optimal queue is [{}], current capacity [{}]", name, tasksPerFrame, TimeValue.timeValueNanos(totalRuntime), TimeValue.timeValueNanos(avgTaskTime), + TimeValue.timeValueNanos((long)executionEWMA.getAverage()), String.format(Locale.ROOT, "%.2f", lambda * TimeValue.timeValueSeconds(1).nanos()), - desiredQueueSize); + desiredQueueSize, + oldCapacity); } - final int oldCapacity = workQueue.capacity(); - // Adjust the queue size towards the desired capacity using an adjust of // QUEUE_ADJUSTMENT_AMOUNT (either up or down), keeping in mind the min and max // values the queue size can have. @@ -223,6 +245,7 @@ public final class QueueResizingEsThreadPoolExecutor extends EsThreadPoolExecuto b.append("max queue capacity = ").append(maxQueueSize).append(", "); b.append("frame size = ").append(tasksPerFrame).append(", "); b.append("targeted response rate = ").append(TimeValue.timeValueNanos(targetedResponseTimeNanos)).append(", "); + b.append("task execution EWMA = ").append(TimeValue.timeValueNanos((long)executionEWMA.getAverage())).append(", "); b.append("adjustment amount = ").append(QUEUE_ADJUSTMENT_AMOUNT).append(", "); /* * ThreadPoolExecutor has some nice information in its toString but we diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/TimedRunnable.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/TimedRunnable.java index 91ad6e46efa..2ee80badb74 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/TimedRunnable.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/TimedRunnable.java @@ -20,12 +20,13 @@ package org.elasticsearch.common.util.concurrent; /** - * A class used to wrap a {@code Runnable} that allows capturing the time the task since creation - * through execution. + * A class used to wrap a {@code Runnable} that allows capturing the time of the task since creation + * through execution as well as only execution time. */ class TimedRunnable implements Runnable { private final Runnable original; private final long creationTimeNanos; + private long startTimeNanos; private long finishTimeNanos = -1; TimedRunnable(Runnable original) { @@ -36,6 +37,7 @@ class TimedRunnable implements Runnable { @Override public void run() { try { + startTimeNanos = System.nanoTime(); original.run(); } finally { finishTimeNanos = System.nanoTime(); @@ -53,4 +55,16 @@ class TimedRunnable implements Runnable { } return finishTimeNanos - creationTimeNanos; } + + /** + * Return the time this task spent being run. + * If the task is still running or has not yet been run, returns -1. + */ + long getTotalExecutionNanos() { + if (startTimeNanos == -1 || finishTimeNanos == -1) { + // There must have been an exception thrown, the total time is unknown (-1) + return -1; + } + return finishTimeNanos - startTimeNanos; + } } diff --git a/core/src/main/java/org/elasticsearch/common/util/set/Sets.java b/core/src/main/java/org/elasticsearch/common/util/set/Sets.java index 4b323c42a37..f2bba5cde36 100644 --- a/core/src/main/java/org/elasticsearch/common/util/set/Sets.java +++ b/core/src/main/java/org/elasticsearch/common/util/set/Sets.java @@ -21,11 +21,19 @@ package org.elasticsearch.common.util.set; import java.util.Collection; import java.util.Collections; +import java.util.EnumSet; import java.util.HashSet; import java.util.Iterator; import java.util.Objects; import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; +import java.util.function.BiConsumer; +import java.util.function.BinaryOperator; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.Collector; import java.util.stream.Collectors; public final class Sets { @@ -69,6 +77,47 @@ public final class Sets { return left.stream().filter(k -> !right.contains(k)).collect(Collectors.toSet()); } + public static SortedSet sortedDifference(Set left, Set right) { + Objects.requireNonNull(left); + Objects.requireNonNull(right); + return left.stream().filter(k -> !right.contains(k)).collect(new SortedSetCollector<>()); + } + + private static class SortedSetCollector implements Collector, SortedSet> { + + @Override + public Supplier> supplier() { + return TreeSet::new; + } + + @Override + public BiConsumer, T> accumulator() { + return (s, e) -> s.add(e); + } + + @Override + public BinaryOperator> combiner() { + return (s, t) -> { + s.addAll(t); + return s; + }; + } + + @Override + public Function, SortedSet> finisher() { + return Function.identity(); + } + + static final Set CHARACTERISTICS = + Collections.unmodifiableSet(EnumSet.of(Collector.Characteristics.IDENTITY_FINISH)); + + @Override + public Set characteristics() { + return CHARACTERISTICS; + } + + } + public static Set union(Set left, Set right) { Objects.requireNonNull(left); Objects.requireNonNull(right); diff --git a/core/src/main/java/org/elasticsearch/env/Environment.java b/core/src/main/java/org/elasticsearch/env/Environment.java index bdf3d76fa9a..ce2b15d2d71 100644 --- a/core/src/main/java/org/elasticsearch/env/Environment.java +++ b/core/src/main/java/org/elasticsearch/env/Environment.java @@ -27,23 +27,17 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import java.io.IOException; -import java.io.UncheckedIOException; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.FileStore; import java.nio.file.Files; import java.nio.file.Path; -import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.Objects; import java.util.function.Function; -import static org.elasticsearch.common.Strings.cleanPath; - /** * The environment of where things exists. */ @@ -100,14 +94,14 @@ public class Environment { public Environment(Settings settings) { final Path homeFile; if (PATH_HOME_SETTING.exists(settings)) { - homeFile = PathUtils.get(cleanPath(PATH_HOME_SETTING.get(settings))); + homeFile = PathUtils.get(PATH_HOME_SETTING.get(settings)).normalize(); } else { throw new IllegalStateException(PATH_HOME_SETTING.getKey() + " is not configured"); } // this is trappy, Setting#get(Settings) will get a fallback setting yet return false for Settings#exists(Settings) if (PATH_CONF_SETTING.exists(settings) || DEFAULT_PATH_CONF_SETTING.exists(settings)) { - configFile = PathUtils.get(cleanPath(PATH_CONF_SETTING.get(settings))); + configFile = PathUtils.get(PATH_CONF_SETTING.get(settings)).normalize(); } else { configFile = homeFile.resolve("config"); } @@ -128,7 +122,7 @@ public class Environment { dataWithClusterFiles = new Path[]{homeFile.resolve("data").resolve(clusterName.value())}; } if (PATH_SHARED_DATA_SETTING.exists(settings)) { - sharedDataFile = PathUtils.get(cleanPath(PATH_SHARED_DATA_SETTING.get(settings))); + sharedDataFile = PathUtils.get(PATH_SHARED_DATA_SETTING.get(settings)).normalize(); } else { sharedDataFile = null; } @@ -144,13 +138,13 @@ public class Environment { // this is trappy, Setting#get(Settings) will get a fallback setting yet return false for Settings#exists(Settings) if (PATH_LOGS_SETTING.exists(settings) || DEFAULT_PATH_LOGS_SETTING.exists(settings)) { - logsFile = PathUtils.get(cleanPath(PATH_LOGS_SETTING.get(settings))); + logsFile = PathUtils.get(PATH_LOGS_SETTING.get(settings)).normalize(); } else { logsFile = homeFile.resolve("logs"); } if (PIDFILE_SETTING.exists(settings)) { - pidFile = PathUtils.get(cleanPath(PIDFILE_SETTING.get(settings))); + pidFile = PathUtils.get(PIDFILE_SETTING.get(settings)).normalize(); } else { pidFile = null; } diff --git a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java index a24c3591374..91cb96d60d7 100644 --- a/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/core/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -199,7 +199,6 @@ public final class NodeEnvironment implements Closeable { int maxLocalStorageNodes = MAX_LOCAL_STORAGE_NODES_SETTING.get(settings); for (int possibleLockId = 0; possibleLockId < maxLocalStorageNodes; possibleLockId++) { for (int dirIndex = 0; dirIndex < environment.dataFiles().length; dirIndex++) { - Path dataDirWithClusterName = environment.dataWithClusterFiles()[dirIndex]; Path dataDir = environment.dataFiles()[dirIndex]; Path dir = resolveNodePath(dataDir, possibleLockId); Files.createDirectories(dir); @@ -211,7 +210,8 @@ public final class NodeEnvironment implements Closeable { nodePaths[dirIndex] = new NodePath(dir); nodeLockId = possibleLockId; } catch (LockObtainFailedException ex) { - startupTraceLogger.trace("failed to obtain node lock on {}", dir.toAbsolutePath()); + startupTraceLogger.trace( + new ParameterizedMessage("failed to obtain node lock on {}", dir.toAbsolutePath()), ex); // release all the ones that were obtained up until now releaseAndNullLocks(locks); break; diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java index 13c317c53e9..4247ec131a3 100644 --- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java +++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayMetaState.java @@ -100,11 +100,6 @@ public class TransportNodesListGatewayMetaState extends TransportNodesAction { public Request() { diff --git a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index f2b046acd97..11df875d4dd 100644 --- a/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/core/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -172,11 +172,6 @@ public class TransportNodesListGatewayStartedShards extends } } - @Override - protected boolean accumulateExceptions() { - return true; - } - public static class Request extends BaseNodesRequest { private ShardId shardId; diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index a4ab30116f7..523f322da0c 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -81,7 +81,6 @@ import java.util.Set; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Consumer; import java.util.function.LongSupplier; import java.util.function.Supplier; @@ -514,7 +513,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust } @Override - public void handle(ShardLock lock) { + public void accept(ShardLock lock) { try { assert lock.getShardId().equals(shardId) : "shard id mismatch, expected: " + shardId + " but got: " + lock.getShardId(); onShardClose(lock); diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AbstractTokenizerFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/AbstractTokenizerFactory.java index dfa177a7fbf..bf6b2fd7c5b 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AbstractTokenizerFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AbstractTokenizerFactory.java @@ -25,23 +25,14 @@ import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexSettings; public abstract class AbstractTokenizerFactory extends AbstractIndexComponent implements TokenizerFactory { - - private final String name; - protected final Version version; - - public AbstractTokenizerFactory(IndexSettings indexSettings, String name, Settings settings) { + // TODO drop `String ignored` in a followup + public AbstractTokenizerFactory(IndexSettings indexSettings, String ignored, Settings settings) { super(indexSettings); - this.name = name; this.version = Analysis.parseAnalysisVersion(this.indexSettings.getSettings(), settings, logger); } - @Override - public String name() { - return this.name; - } - public final Version version() { return version; } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index 5d099267c79..4c17773d6df 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -35,8 +35,6 @@ import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; import org.elasticsearch.indices.analysis.PreBuiltAnalyzers; -import org.elasticsearch.indices.analysis.PreBuiltCharFilters; -import org.elasticsearch.indices.analysis.PreBuiltTokenizers; import java.io.Closeable; import java.io.IOException; @@ -74,6 +72,7 @@ public final class AnalysisRegistry implements Closeable { Map> tokenizers, Map>> analyzers, Map>> normalizers, + Map preConfiguredCharFilters, Map preConfiguredTokenFilters, Map preConfiguredTokenizers) { this.environment = environment; @@ -82,7 +81,7 @@ public final class AnalysisRegistry implements Closeable { this.tokenizers = unmodifiableMap(tokenizers); this.analyzers = unmodifiableMap(analyzers); this.normalizers = unmodifiableMap(normalizers); - prebuiltAnalysis = new PrebuiltAnalysis(preConfiguredTokenFilters, preConfiguredTokenizers); + prebuiltAnalysis = new PrebuiltAnalysis(preConfiguredCharFilters, preConfiguredTokenFilters, preConfiguredTokenizers); } /** @@ -180,7 +179,7 @@ public final class AnalysisRegistry implements Closeable { public Map buildCharFilterFactories(IndexSettings indexSettings) throws IOException { final Map charFiltersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_CHAR_FILTER); - return buildMapping(Component.CHAR_FILTER, indexSettings, charFiltersSettings, charFilters, prebuiltAnalysis.charFilterFactories); + return buildMapping(Component.CHAR_FILTER, indexSettings, charFiltersSettings, charFilters, prebuiltAnalysis.preConfiguredCharFilterFactories); } public Map> buildAnalyzerFactories(IndexSettings indexSettings) throws IOException { @@ -397,13 +396,13 @@ public final class AnalysisRegistry implements Closeable { final Map>> analyzerProviderFactories; final Map> preConfiguredTokenFilters; final Map> preConfiguredTokenizers; - final Map> charFilterFactories; + final Map> preConfiguredCharFilterFactories; private PrebuiltAnalysis( + Map preConfiguredCharFilters, Map preConfiguredTokenFilters, Map preConfiguredTokenizers) { Map analyzerProviderFactories = new HashMap<>(); - Map charFilterFactories = new HashMap<>(); // Analyzers for (PreBuiltAnalyzers preBuiltAnalyzerEnum : PreBuiltAnalyzers.values()) { @@ -411,22 +410,14 @@ public final class AnalysisRegistry implements Closeable { analyzerProviderFactories.put(name, new PreBuiltAnalyzerProviderFactory(name, AnalyzerScope.INDICES, preBuiltAnalyzerEnum.getAnalyzer(Version.CURRENT))); } - // Char Filters - for (PreBuiltCharFilters preBuiltCharFilter : PreBuiltCharFilters.values()) { - String name = preBuiltCharFilter.name().toLowerCase(Locale.ROOT); - charFilterFactories.put(name, new PreBuiltCharFilterFactoryFactory(preBuiltCharFilter.getCharFilterFactory(Version.CURRENT))); - } - // Char filter aliases - charFilterFactories.put("htmlStrip", new PreBuiltCharFilterFactoryFactory(PreBuiltCharFilters.HTML_STRIP.getCharFilterFactory(Version.CURRENT))); - this.analyzerProviderFactories = Collections.unmodifiableMap(analyzerProviderFactories); - this.charFilterFactories = Collections.unmodifiableMap(charFilterFactories); + this.preConfiguredCharFilterFactories = preConfiguredCharFilters; this.preConfiguredTokenFilters = preConfiguredTokenFilters; this.preConfiguredTokenizers = preConfiguredTokenizers; } public AnalysisModule.AnalysisProvider getCharFilterFactory(String name) { - return charFilterFactories.get(name); + return preConfiguredCharFilterFactories.get(name); } public AnalysisModule.AnalysisProvider getTokenFilterFactory(String name) { diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzer.java index 68799413907..d70b4628f53 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzer.java @@ -27,6 +27,7 @@ import java.io.Reader; public final class CustomAnalyzer extends Analyzer { + private final String tokenizerName; private final TokenizerFactory tokenizerFactory; private final CharFilterFactory[] charFilters; @@ -36,12 +37,14 @@ public final class CustomAnalyzer extends Analyzer { private final int positionIncrementGap; private final int offsetGap; - public CustomAnalyzer(TokenizerFactory tokenizerFactory, CharFilterFactory[] charFilters, TokenFilterFactory[] tokenFilters) { - this(tokenizerFactory, charFilters, tokenFilters, 0, -1); + public CustomAnalyzer(String tokenizerName, TokenizerFactory tokenizerFactory, CharFilterFactory[] charFilters, + TokenFilterFactory[] tokenFilters) { + this(tokenizerName, tokenizerFactory, charFilters, tokenFilters, 0, -1); } - public CustomAnalyzer(TokenizerFactory tokenizerFactory, CharFilterFactory[] charFilters, TokenFilterFactory[] tokenFilters, - int positionIncrementGap, int offsetGap) { + public CustomAnalyzer(String tokenizerName, TokenizerFactory tokenizerFactory, CharFilterFactory[] charFilters, + TokenFilterFactory[] tokenFilters, int positionIncrementGap, int offsetGap) { + this.tokenizerName = tokenizerName; this.tokenizerFactory = tokenizerFactory; this.charFilters = charFilters; this.tokenFilters = tokenFilters; @@ -49,6 +52,12 @@ public final class CustomAnalyzer extends Analyzer { this.offsetGap = offsetGap; } + /** + * The name of the tokenizer as configured by the user. + */ + public String getTokenizerName() { + return tokenizerName; + } public TokenizerFactory tokenizerFactory() { return tokenizerFactory; diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java index f1123045622..3bf5d43375c 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java @@ -80,7 +80,7 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider 1) { - throw new IllegalArgumentException("delimiter can only be a one char value"); + } else if (delimiter.length() != 1) { + throw new IllegalArgumentException("delimiter must be a one char value"); } else { this.delimiter = delimiter.charAt(0); } @@ -50,8 +50,8 @@ public class PathHierarchyTokenizerFactory extends AbstractTokenizerFactory { String replacement = settings.get("replacement"); if (replacement == null) { this.replacement = this.delimiter; - } else if (replacement.length() > 1) { - throw new IllegalArgumentException("replacement can only be a one char value"); + } else if (replacement.length() != 1) { + throw new IllegalArgumentException("replacement must be a one char value"); } else { this.replacement = replacement.charAt(0); } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzer.java b/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzer.java index 7554f459bfa..5d4d9f2df3f 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/PatternAnalyzer.java @@ -53,4 +53,13 @@ public final class PatternAnalyzer extends Analyzer { } return new TokenStreamComponents(tokenizer, stream); } + + @Override + protected TokenStream normalize(String fieldName, TokenStream in) { + TokenStream stream = in; + if (lowercase) { + stream = new LowerCaseFilter(stream); + } + return stream; + } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/PreBuiltCharFilterFactoryFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/PreBuiltCharFilterFactoryFactory.java deleted file mode 100644 index 62a8ff1ff3e..00000000000 --- a/core/src/main/java/org/elasticsearch/index/analysis/PreBuiltCharFilterFactoryFactory.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.analysis; - -import org.elasticsearch.Version; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.indices.analysis.AnalysisModule; -import org.elasticsearch.indices.analysis.PreBuiltCharFilters; - -import java.io.IOException; - -public class PreBuiltCharFilterFactoryFactory implements AnalysisModule.AnalysisProvider { - - private final CharFilterFactory charFilterFactory; - - public PreBuiltCharFilterFactoryFactory(CharFilterFactory charFilterFactory) { - this.charFilterFactory = charFilterFactory; - } - - @Override - public CharFilterFactory get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException { - Version indexVersion = Version.indexCreated(settings); - if (!Version.CURRENT.equals(indexVersion)) { - PreBuiltCharFilters preBuiltCharFilters = PreBuiltCharFilters.getOrDefault(name, null); - if (preBuiltCharFilters != null) { - return preBuiltCharFilters.getCharFilterFactory(indexVersion); - } - } - - return charFilterFactory; - } -} diff --git a/core/src/main/java/org/elasticsearch/index/analysis/PreConfiguredCharFilter.java b/core/src/main/java/org/elasticsearch/index/analysis/PreConfiguredCharFilter.java new file mode 100644 index 00000000000..a979e9e34fe --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/analysis/PreConfiguredCharFilter.java @@ -0,0 +1,112 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.apache.lucene.analysis.CharFilter; +import org.apache.lucene.analysis.TokenFilter; +import org.elasticsearch.Version; +import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy; + +import java.io.Reader; +import java.util.function.BiFunction; +import java.util.function.Function; + +/** + * Provides pre-configured, shared {@link CharFilter}s. + */ +public class PreConfiguredCharFilter extends PreConfiguredAnalysisComponent { + /** + * Create a pre-configured char filter that may not vary at all. + */ + public static PreConfiguredCharFilter singleton(String name, boolean useFilterForMultitermQueries, Function create) { + return new PreConfiguredCharFilter(name, CachingStrategy.ONE, useFilterForMultitermQueries, + (reader, version) -> create.apply(reader)); + } + + /** + * Create a pre-configured token filter that may vary based on the Lucene version. + */ + public static PreConfiguredCharFilter luceneVersion(String name, boolean useFilterForMultitermQueries, + BiFunction create) { + return new PreConfiguredCharFilter(name, CachingStrategy.LUCENE, useFilterForMultitermQueries, + (reader, version) -> create.apply(reader, version.luceneVersion)); + } + + /** + * Create a pre-configured token filter that may vary based on the Elasticsearch version. + */ + public static PreConfiguredCharFilter elasticsearchVersion(String name, boolean useFilterForMultitermQueries, + BiFunction create) { + return new PreConfiguredCharFilter(name, CachingStrategy.ELASTICSEARCH, useFilterForMultitermQueries, create); + } + + private final boolean useFilterForMultitermQueries; + private final BiFunction create; + + protected PreConfiguredCharFilter(String name, CachingStrategy cache, boolean useFilterForMultitermQueries, + BiFunction create) { + super(name, cache); + this.useFilterForMultitermQueries = useFilterForMultitermQueries; + this.create = create; + } + + /** + * Can this {@link TokenFilter} be used in multi-term queries? + */ + public boolean shouldUseFilterForMultitermQueries() { + return useFilterForMultitermQueries; + } + + private interface MultiTermAwareCharFilterFactory extends CharFilterFactory, MultiTermAwareComponent {} + + @Override + protected CharFilterFactory create(Version version) { + if (useFilterForMultitermQueries) { + return new MultiTermAwareCharFilterFactory() { + @Override + public String name() { + return getName(); + } + + @Override + public Reader create(Reader reader) { + return create.apply(reader, version); + } + + @Override + public Object getMultiTermComponent() { + return this; + } + }; + } + return new CharFilterFactory() { + @Override + public Reader create(Reader reader) { + return create.apply(reader, version); + } + + @Override + public String name() { + return getName(); + } + }; + } + +} diff --git a/core/src/main/java/org/elasticsearch/index/analysis/PreConfiguredTokenizer.java b/core/src/main/java/org/elasticsearch/index/analysis/PreConfiguredTokenizer.java index 6d1842c7a36..131246d0b76 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/PreConfiguredTokenizer.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/PreConfiguredTokenizer.java @@ -96,11 +96,6 @@ public final class PreConfiguredTokenizer extends PreConfiguredAnalysisComponent protected TokenizerFactory create(Version version) { if (multiTermComponent != null) { return new MultiTermAwareTokenizerFactory() { - @Override - public String name() { - return getName(); - } - @Override public Tokenizer create() { return create.apply(version); @@ -112,17 +107,7 @@ public final class PreConfiguredTokenizer extends PreConfiguredAnalysisComponent } }; } else { - return new TokenizerFactory() { - @Override - public String name() { - return getName(); - } - - @Override - public Tokenizer create() { - return create.apply(version); - } - }; + return () -> create.apply(version); } } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java index 6ca9d457cbc..be96dbd6560 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java @@ -21,9 +21,6 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Tokenizer; -public interface TokenizerFactory { - - String name(); - +public interface TokenizerFactory { // TODO replace with Supplier Tokenizer create(); } diff --git a/core/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java b/core/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java new file mode 100644 index 00000000000..69173cc4216 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/engine/CombinedDeletionPolicy.java @@ -0,0 +1,90 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.engine; + +import org.apache.lucene.index.IndexCommit; +import org.apache.lucene.index.IndexDeletionPolicy; +import org.apache.lucene.index.SnapshotDeletionPolicy; +import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.index.translog.TranslogDeletionPolicy; + +import java.io.IOException; +import java.util.List; + +/** + * An {@link IndexDeletionPolicy} that coordinates between Lucene's commits and the retention of translog generation files, + * making sure that all translog files that are needed to recover from the Lucene commit are not deleted. + */ +class CombinedDeletionPolicy extends IndexDeletionPolicy { + + private final TranslogDeletionPolicy translogDeletionPolicy; + private final EngineConfig.OpenMode openMode; + + private final SnapshotDeletionPolicy indexDeletionPolicy; + + CombinedDeletionPolicy(SnapshotDeletionPolicy indexDeletionPolicy, TranslogDeletionPolicy translogDeletionPolicy, + EngineConfig.OpenMode openMode) { + this.indexDeletionPolicy = indexDeletionPolicy; + this.translogDeletionPolicy = translogDeletionPolicy; + this.openMode = openMode; + } + + @Override + public void onInit(List commits) throws IOException { + indexDeletionPolicy.onInit(commits); + switch (openMode) { + case CREATE_INDEX_AND_TRANSLOG: + assert commits.isEmpty() : "index is being created but we already have commits"; + break; + case OPEN_INDEX_CREATE_TRANSLOG: + assert commits.isEmpty() == false : "index is opened, but we have no commits"; + break; + case OPEN_INDEX_AND_TRANSLOG: + assert commits.isEmpty() == false : "index is opened, but we have no commits"; + setLastCommittedTranslogGeneration(commits); + break; + default: + throw new IllegalArgumentException("unknown openMode [" + openMode + "]"); + } + } + + @Override + public void onCommit(List commits) throws IOException { + indexDeletionPolicy.onCommit(commits); + setLastCommittedTranslogGeneration(commits); + } + + private void setLastCommittedTranslogGeneration(List commits) throws IOException { + // when opening an existing lucene index, we currently always open the last commit. + // we therefore use the translog gen as the one that will be required for recovery + final IndexCommit indexCommit = commits.get(commits.size() - 1); + assert indexCommit.isDeleted() == false : "last commit is deleted"; + long minGen = Long.parseLong(indexCommit.getUserData().get(Translog.TRANSLOG_GENERATION_KEY)); + translogDeletionPolicy.setMinTranslogGenerationForRecovery(minGen); + } + + public SnapshotDeletionPolicy getIndexDeletionPolicy() { + return indexDeletionPolicy; + } + + public TranslogDeletionPolicy getTranslogDeletionPolicy() { + return translogDeletionPolicy; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java index 70ec03c09fd..6e93d1feed5 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -485,10 +485,6 @@ public abstract class Engine implements Closeable { } } - public final GetResult get(Get get) throws EngineException { - return get(get, this::acquireSearcher); - } - public abstract GetResult get(Get get, Function searcherFactory) throws EngineException; /** @@ -1012,7 +1008,7 @@ public abstract class Engine implements Closeable { abstract String id(); - abstract TYPE operationType(); + public abstract TYPE operationType(); } public static class Index extends Operation { @@ -1054,7 +1050,7 @@ public abstract class Engine implements Closeable { } @Override - TYPE operationType() { + public TYPE operationType() { return TYPE.INDEX; } @@ -1106,8 +1102,8 @@ public abstract class Engine implements Closeable { public Delete(String type, String id, Term uid, long seqNo, long primaryTerm, long version, VersionType versionType, Origin origin, long startTime) { super(uid, seqNo, primaryTerm, version, versionType, origin, startTime); - this.type = type; - this.id = id; + this.type = Objects.requireNonNull(type); + this.id = Objects.requireNonNull(id); } public Delete(String type, String id, Term uid) { @@ -1130,7 +1126,7 @@ public abstract class Engine implements Closeable { } @Override - TYPE operationType() { + public TYPE operationType() { return TYPE.DELETE; } @@ -1180,7 +1176,7 @@ public abstract class Engine implements Closeable { } @Override - TYPE operationType() { + public TYPE operationType() { return TYPE.NO_OP; } diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java index 19ec3e036e5..d7019c77321 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java +++ b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java @@ -35,12 +35,15 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.TranslogRecoveryPerformer; import org.elasticsearch.index.store.Store; +import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.IndexingMemoryController; import org.elasticsearch.threadpool.ThreadPool; +import java.io.IOException; +import java.util.List; + /* * Holds all the configuration that is used to create an {@link Engine}. * Once {@link Engine} has been created with this object, changes to this @@ -48,7 +51,6 @@ import org.elasticsearch.threadpool.ThreadPool; */ public final class EngineConfig { private final ShardId shardId; - private final TranslogRecoveryPerformer translogRecoveryPerformer; private final IndexSettings indexSettings; private final ByteSizeValue indexingBufferSize; private volatile boolean enableGcDeletes = true; @@ -65,9 +67,10 @@ public final class EngineConfig { private final QueryCache queryCache; private final QueryCachingPolicy queryCachingPolicy; @Nullable - private final ReferenceManager.RefreshListener refreshListeners; + private final List refreshListeners; @Nullable private final Sort indexSort; + private final TranslogRecoveryRunner translogRecoveryRunner; /** * Index setting to change the low level lucene codec used for writing new segments. @@ -110,9 +113,9 @@ public final class EngineConfig { IndexSettings indexSettings, Engine.Warmer warmer, Store store, MergePolicy mergePolicy, Analyzer analyzer, Similarity similarity, CodecService codecService, Engine.EventListener eventListener, - TranslogRecoveryPerformer translogRecoveryPerformer, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy, - TranslogConfig translogConfig, TimeValue flushMergesAfter, ReferenceManager.RefreshListener refreshListeners, - Sort indexSort) { + QueryCache queryCache, QueryCachingPolicy queryCachingPolicy, + TranslogConfig translogConfig, TimeValue flushMergesAfter, List refreshListeners, + Sort indexSort, TranslogRecoveryRunner translogRecoveryRunner) { if (openMode == null) { throw new IllegalArgumentException("openMode must not be null"); } @@ -131,7 +134,6 @@ public final class EngineConfig { // there are not too many shards allocated to this node. Instead, IndexingMemoryController periodically checks // and refreshes the most heap-consuming shards when total indexing heap usage across all shards is too high: indexingBufferSize = new ByteSizeValue(256, ByteSizeUnit.MB); - this.translogRecoveryPerformer = translogRecoveryPerformer; this.queryCache = queryCache; this.queryCachingPolicy = queryCachingPolicy; this.translogConfig = translogConfig; @@ -139,6 +141,7 @@ public final class EngineConfig { this.openMode = openMode; this.refreshListeners = refreshListeners; this.indexSort = indexSort; + this.translogRecoveryRunner = translogRecoveryRunner; } /** @@ -251,15 +254,6 @@ public final class EngineConfig { return similarity; } - /** - * Returns the {@link org.elasticsearch.index.shard.TranslogRecoveryPerformer} for this engine. This class is used - * to apply transaction log operations to the engine. It encapsulates all the logic to transfer the translog entry into - * an indexing operation. - */ - public TranslogRecoveryPerformer getTranslogRecoveryPerformer() { - return translogRecoveryPerformer; - } - /** * Return the cache to use for queries. */ @@ -295,6 +289,18 @@ public final class EngineConfig { return openMode; } + @FunctionalInterface + public interface TranslogRecoveryRunner { + int run(Engine engine, Translog.Snapshot snapshot) throws IOException; + } + + /** + * Returns a runner that implements the translog recovery from the given snapshot + */ + public TranslogRecoveryRunner getTranslogRecoveryRunner() { + return translogRecoveryRunner; + } + /** * Engine open mode defines how the engine should be opened or in other words what the engine should expect * to recover from. We either create a brand new engine with a new index and translog or we recover from an existing index. @@ -310,9 +316,9 @@ public final class EngineConfig { } /** - * {@linkplain ReferenceManager.RefreshListener} instance to configure. + * The refresh listeners to add to Lucene */ - public ReferenceManager.RefreshListener getRefreshListeners() { + public List getRefreshListeners() { return refreshListeners; } diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 40c5c0af5e3..f84f76b537e 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -34,6 +34,7 @@ import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SnapshotDeletionPolicy; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.search.SearcherFactory; import org.apache.lucene.search.SearcherManager; import org.apache.lucene.search.TermQuery; @@ -71,10 +72,10 @@ import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.seqno.SequenceNumbersService; import org.elasticsearch.index.shard.ElasticsearchMergePolicy; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.TranslogRecoveryPerformer; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.index.translog.TranslogCorruptedException; +import org.elasticsearch.index.translog.TranslogDeletionPolicy; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -127,7 +128,7 @@ public class InternalEngine extends Engine { private final String uidField; - private final SnapshotDeletionPolicy deletionPolicy; + private final CombinedDeletionPolicy deletionPolicy; // How many callers are currently requesting index throttling. Currently there are only two situations where we do this: when merges // are falling behind and when writing indexing buffer to disk is too slow. When this is 0, there is no throttling, else we throttling @@ -147,9 +148,11 @@ public class InternalEngine extends Engine { if (engineConfig.isAutoGeneratedIDsOptimizationEnabled() == false) { maxUnsafeAutoIdTimestamp.set(Long.MAX_VALUE); } - deletionPolicy = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()); this.uidField = engineConfig.getIndexSettings().isSingleType() ? IdFieldMapper.NAME : UidFieldMapper.NAME; this.versionMap = new LiveVersionMap(); + final TranslogDeletionPolicy translogDeletionPolicy = new TranslogDeletionPolicy(); + this.deletionPolicy = new CombinedDeletionPolicy( + new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()), translogDeletionPolicy, openMode); store.incRef(); IndexWriter writer = null; Translog translog = null; @@ -188,7 +191,7 @@ public class InternalEngine extends Engine { seqNoService = sequenceNumberService(shardId, engineConfig.getIndexSettings(), seqNoStats); updateMaxUnsafeAutoIdTimestampFromWriter(writer); indexWriter = writer; - translog = openTranslog(engineConfig, writer, () -> seqNoService().getGlobalCheckpoint()); + translog = openTranslog(engineConfig, writer, translogDeletionPolicy, () -> seqNoService().getGlobalCheckpoint()); assert translog.getGeneration() != null; } catch (IOException | TranslogCorruptedException e) { throw new EngineCreationFailureException(shardId, "failed to create engine", e); @@ -209,8 +212,8 @@ public class InternalEngine extends Engine { assert pendingTranslogRecovery.get() == false : "translog recovery can't be pending before we set it"; // don't allow commits until we are done with recovering pendingTranslogRecovery.set(openMode == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG); - if (engineConfig.getRefreshListeners() != null) { - searcherManager.addListener(engineConfig.getRefreshListeners()); + for (ReferenceManager.RefreshListener listener: engineConfig.getRefreshListeners()) { + searcherManager.addListener(listener); } success = true; } finally { @@ -282,7 +285,7 @@ public class InternalEngine extends Engine { throw new IllegalStateException("Engine has already been recovered"); } try { - recoverFromTranslog(engineConfig.getTranslogRecoveryPerformer()); + recoverFromTranslogInternal(); } catch (Exception e) { try { pendingTranslogRecovery.set(true); // just play safe and never allow commits on this see #ensureCanFlush @@ -298,12 +301,13 @@ public class InternalEngine extends Engine { return this; } - private void recoverFromTranslog(TranslogRecoveryPerformer handler) throws IOException { + private void recoverFromTranslogInternal() throws IOException { Translog.TranslogGeneration translogGeneration = translog.getGeneration(); final int opsRecovered; try { - Translog.Snapshot snapshot = translog.newSnapshot(); - opsRecovered = handler.recoveryFromSnapshot(this, snapshot); + final long translogGen = Long.parseLong(lastCommittedSegmentInfos.getUserData().get(Translog.TRANSLOG_GENERATION_KEY)); + Translog.Snapshot snapshot = translog.newSnapshot(translogGen); + opsRecovered = config().getTranslogRecoveryRunner().run(this, snapshot); } catch (Exception e) { throw new EngineException(shardId, "failed to recover from translog", e); } @@ -318,31 +322,25 @@ public class InternalEngine extends Engine { } else if (translog.isCurrent(translogGeneration) == false) { commitIndexWriter(indexWriter, translog, lastCommittedSegmentInfos.getUserData().get(Engine.SYNC_COMMIT_ID)); } + // clean up what's not needed + translog.trimUnreferencedReaders(); } - private Translog openTranslog(EngineConfig engineConfig, IndexWriter writer, LongSupplier globalCheckpointSupplier) throws IOException { + private Translog openTranslog(EngineConfig engineConfig, IndexWriter writer, TranslogDeletionPolicy translogDeletionPolicy, LongSupplier globalCheckpointSupplier) throws IOException { assert openMode != null; final TranslogConfig translogConfig = engineConfig.getTranslogConfig(); - Translog.TranslogGeneration generation = null; + String translogUUID = null; if (openMode == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG) { - generation = loadTranslogIdFromCommit(writer); + translogUUID = loadTranslogUUIDFromCommit(writer); // We expect that this shard already exists, so it must already have an existing translog else something is badly wrong! - if (generation == null) { - throw new IllegalStateException("no translog generation present in commit data but translog is expected to exist"); - } - if (generation.translogUUID == null) { + if (translogUUID == null) { throw new IndexFormatTooOldException("translog", "translog has no generation nor a UUID - this might be an index from a previous version consider upgrading to N-1 first"); } } - final Translog translog = new Translog(translogConfig, generation, globalCheckpointSupplier); - if (generation == null || generation.translogUUID == null) { + final Translog translog = new Translog(translogConfig, translogUUID, translogDeletionPolicy, globalCheckpointSupplier); + if (translogUUID == null) { assert openMode != EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG : "OpenMode must not be " + EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG; - if (generation == null) { - logger.debug("no translog ID present in the current generation - creating one"); - } else if (generation.translogUUID == null) { - logger.debug("upgraded translog to pre 2.0 format, associating translog with index - writing translog UUID"); - } boolean success = false; try { commitIndexWriter(writer, translog, openMode == EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG @@ -368,22 +366,18 @@ public class InternalEngine extends Engine { * translog id into lucene and returns null. */ @Nullable - private Translog.TranslogGeneration loadTranslogIdFromCommit(IndexWriter writer) throws IOException { + private String loadTranslogUUIDFromCommit(IndexWriter writer) throws IOException { // commit on a just opened writer will commit even if there are no changes done to it // we rely on that for the commit data translog id key final Map commitUserData = commitDataAsMap(writer); - if (commitUserData.containsKey("translog_id")) { - assert commitUserData.containsKey(Translog.TRANSLOG_UUID_KEY) == false : "legacy commit contains translog UUID"; - return new Translog.TranslogGeneration(null, Long.parseLong(commitUserData.get("translog_id"))); - } else if (commitUserData.containsKey(Translog.TRANSLOG_GENERATION_KEY)) { - if (commitUserData.containsKey(Translog.TRANSLOG_UUID_KEY) == false) { - throw new IllegalStateException("commit doesn't contain translog UUID"); + if (commitUserData.containsKey(Translog.TRANSLOG_UUID_KEY)) { + if (commitUserData.containsKey(Translog.TRANSLOG_GENERATION_KEY) == false) { + throw new IllegalStateException("commit doesn't contain translog generation id"); } - final String translogUUID = commitUserData.get(Translog.TRANSLOG_UUID_KEY); - final long translogGen = Long.parseLong(commitUserData.get(Translog.TRANSLOG_GENERATION_KEY)); - return new Translog.TranslogGeneration(translogUUID, translogGen); + return commitUserData.get(Translog.TRANSLOG_UUID_KEY); + } else { + return null; } - return null; } private SearcherManager createSearcherManager() throws EngineException { @@ -1269,14 +1263,13 @@ public class InternalEngine extends Engine { if (indexWriter.hasUncommittedChanges() || force) { ensureCanFlush(); try { - translog.prepareCommit(); + translog.rollGeneration(); logger.trace("starting commit for flush; commitTranslog=true"); - final long committedGeneration = commitIndexWriter(indexWriter, translog, null); + commitIndexWriter(indexWriter, translog, null); logger.trace("finished commit for flush"); // we need to refresh in order to clear older version values refresh("version_table_flush"); - // after refresh documents can be retrieved from the index so we can now commit the translog - translog.commit(committedGeneration); + translog.trimUnreferencedReaders(); } catch (Exception e) { throw new FlushFailedEngineException(shardId, e); } @@ -1428,9 +1421,8 @@ public class InternalEngine extends Engine { logger.trace("finish flush for snapshot"); } try (ReleasableLock lock = readLock.acquire()) { - ensureOpen(); logger.trace("pulling snapshot"); - return new IndexCommitRef(deletionPolicy); + return new IndexCommitRef(deletionPolicy.getIndexDeletionPolicy()); } catch (IOException e) { throw new SnapshotFailedEngineException(shardId, e); } @@ -1781,10 +1773,9 @@ public class InternalEngine extends Engine { * @param writer the index writer to commit * @param translog the translog * @param syncId the sync flush ID ({@code null} if not committing a synced flush) - * @return the minimum translog generation for the local checkpoint committed with the specified index writer * @throws IOException if an I/O exception occurs committing the specfied writer */ - private long commitIndexWriter(final IndexWriter writer, final Translog translog, @Nullable final String syncId) throws IOException { + protected void commitIndexWriter(final IndexWriter writer, final Translog translog, @Nullable final String syncId) throws IOException { ensureCanFlush(); try { final long localCheckpoint = seqNoService().getLocalCheckpoint(); @@ -1817,7 +1808,6 @@ public class InternalEngine extends Engine { }); writer.commit(); - return translogGeneration.translogFileGeneration; } catch (final Exception ex) { try { failEngine("lucene commit failed", ex); diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java index 339e70c50b1..3ae4e6ebc81 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java @@ -85,76 +85,6 @@ public abstract class ScriptDocValues extends AbstractList { throw new UnsupportedOperationException("doc values are unmodifiable"); } - public static final class Strings extends ScriptDocValues { - - private final SortedBinaryDocValues in; - private BytesRefBuilder[] values = new BytesRefBuilder[0]; - private int count; - - public Strings(SortedBinaryDocValues in) { - this.in = in; - } - - @Override - public void setNextDocId(int docId) throws IOException { - if (in.advanceExact(docId)) { - resize(in.docValueCount()); - for (int i = 0; i < count; i++) { - values[i].copyBytes(in.nextValue()); - } - } else { - resize(0); - } - } - - /** - * Set the {@link #size()} and ensure that the {@link #values} array can - * store at least that many entries. - */ - protected void resize(int newSize) { - count = newSize; - if (newSize > values.length) { - final int oldLength = values.length; - values = ArrayUtil.grow(values, count); - for (int i = oldLength; i < values.length; ++i) { - values[i] = new BytesRefBuilder(); - } - } - } - - public SortedBinaryDocValues getInternalValues() { - return this.in; - } - - public BytesRef getBytesValue() { - if (size() > 0) { - return values[0].get(); - } else { - return null; - } - } - - public String getValue() { - BytesRef value = getBytesValue(); - if (value == null) { - return null; - } else { - return value.utf8ToString(); - } - } - - @Override - public String get(int index) { - return values[index].get().utf8ToString(); - } - - @Override - public int size() { - return count; - } - - } - public static final class Longs extends ScriptDocValues { protected static final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger(Longs.class)); @@ -570,13 +500,13 @@ public abstract class ScriptDocValues extends AbstractList { } - public static final class BytesRefs extends ScriptDocValues { + abstract static class BinaryScriptDocValues extends ScriptDocValues { private final SortedBinaryDocValues in; - private BytesRef[] values; - private int count; + protected BytesRefBuilder[] values = new BytesRefBuilder[0]; + protected int count; - public BytesRefs(SortedBinaryDocValues in) { + BinaryScriptDocValues(SortedBinaryDocValues in) { this.in = in; } @@ -585,7 +515,10 @@ public abstract class ScriptDocValues extends AbstractList { if (in.advanceExact(docId)) { resize(in.docValueCount()); for (int i = 0; i < count; i++) { - values[i] = in.nextValue(); + // We need to make a copy here, because BytesBinaryDVAtomicFieldData's SortedBinaryDocValues + // implementation reuses the returned BytesRef. Otherwise we would end up with the same BytesRef + // instance for all slots in the values array. + values[i].copyBytes(in.nextValue()); } } else { resize(0); @@ -598,32 +531,69 @@ public abstract class ScriptDocValues extends AbstractList { */ protected void resize(int newSize) { count = newSize; - if (values == null) { - values = new BytesRef[newSize]; - } else { + if (newSize > values.length) { + final int oldLength = values.length; values = ArrayUtil.grow(values, count); + for (int i = oldLength; i < values.length; ++i) { + values[i] = new BytesRefBuilder(); + } } } - public SortedBinaryDocValues getInternalValues() { - return this.in; - } - - public BytesRef getValue() { - if (count == 0) { - return new BytesRef(); - } - return values[0]; - } - - @Override - public BytesRef get(int index) { - return values[index]; - } - @Override public int size() { return count; } + + } + + public static final class Strings extends BinaryScriptDocValues { + + public Strings(SortedBinaryDocValues in) { + super(in); + } + + @Override + public String get(int index) { + return values[index].get().utf8ToString(); + } + + public BytesRef getBytesValue() { + if (size() > 0) { + return values[0].get(); + } else { + return null; + } + } + + public String getValue() { + BytesRef value = getBytesValue(); + if (value == null) { + return null; + } else { + return value.utf8ToString(); + } + } + + } + + public static final class BytesRefs extends BinaryScriptDocValues { + + public BytesRefs(SortedBinaryDocValues in) { + super(in); + } + + @Override + public BytesRef get(int index) { + return values[index].get(); + } + + public BytesRef getValue() { + if (count == 0) { + return new BytesRef(); + } + return values[0].get(); + } + } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/AllFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/AllFieldMapper.java index f3001db3926..0f88d3223ed 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/AllFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/AllFieldMapper.java @@ -133,6 +133,7 @@ public class AllFieldMapper extends MetadataFieldMapper { } parseTextField(builder, builder.name, node, parserContext); + boolean enabledSet = false; for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); String fieldName = entry.getKey(); @@ -140,9 +141,16 @@ public class AllFieldMapper extends MetadataFieldMapper { if (fieldName.equals("enabled")) { boolean enabled = TypeParsers.nodeBooleanValueLenient(name, "enabled", fieldNode); builder.enabled(enabled ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED); + enabledSet = true; iterator.remove(); } } + if (enabledSet == false && parserContext.indexVersionCreated().before(Version.V_6_0_0_alpha1)) { + // So there is no "enabled" field, however, the index was created prior to 6.0, + // and therefore the default for this particular index should be "true" for + // enabling _all + builder.enabled(EnabledAttributeMapper.ENABLED); + } return builder; } @@ -150,7 +158,13 @@ public class AllFieldMapper extends MetadataFieldMapper { public MetadataFieldMapper getDefault(MappedFieldType fieldType, ParserContext context) { final Settings indexSettings = context.mapperService().getIndexSettings().getSettings(); if (fieldType != null) { - return new AllFieldMapper(indexSettings, fieldType); + if (context.indexVersionCreated().before(Version.V_6_0_0_alpha1)) { + // The index was created prior to 6.0, and therefore the default for this + // particular index should be "true" for enabling _all + return new AllFieldMapper(fieldType.clone(), EnabledAttributeMapper.ENABLED, indexSettings); + } else { + return new AllFieldMapper(indexSettings, fieldType); + } } else { return parse(NAME, Collections.emptyMap(), context) .build(new BuilderContext(indexSettings, new ContentPath(1))); @@ -197,7 +211,6 @@ public class AllFieldMapper extends MetadataFieldMapper { private AllFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, Settings indexSettings) { super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings); this.enabledState = enabled; - } public boolean enabled() { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 489f4702bc3..c2de26c96b3 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -424,15 +424,33 @@ final class DocumentParser { context = context.createNestedContext(mapper.fullPath()); ParseContext.Document nestedDoc = context.doc(); ParseContext.Document parentDoc = nestedDoc.getParent(); - // pre add the uid field if possible (id was already provided) - IndexableField uidField = parentDoc.getField(UidFieldMapper.NAME); - if (uidField != null) { - // we don't need to add it as a full uid field in nested docs, since we don't need versioning - // we also rely on this for UidField#loadVersion - // this is a deeply nested field - nestedDoc.add(new Field(UidFieldMapper.NAME, uidField.stringValue(), UidFieldMapper.Defaults.NESTED_FIELD_TYPE)); + // We need to add the uid or id to this nested Lucene document too, + // If we do not do this then when a document gets deleted only the root Lucene document gets deleted and + // not the nested Lucene documents! Besides the fact that we would have zombie Lucene documents, the ordering of + // documents inside the Lucene index (document blocks) will be incorrect, as nested documents of different root + // documents are then aligned with other root documents. This will lead tothe nested query, sorting, aggregations + // and inner hits to fail or yield incorrect results. + if (context.mapperService().getIndexSettings().isSingleType()) { + IndexableField idField = parentDoc.getField(IdFieldMapper.NAME); + if (idField != null) { + // We just need to store the id as indexed field, so that IndexWriter#deleteDocuments(term) can then + // delete it when the root document is deleted too. + nestedDoc.add(new Field(IdFieldMapper.NAME, idField.stringValue(), IdFieldMapper.Defaults.NESTED_FIELD_TYPE)); + } else { + throw new IllegalStateException("The root document of a nested document should have an id field"); + } + } else { + IndexableField uidField = parentDoc.getField(UidFieldMapper.NAME); + if (uidField != null) { + /// We just need to store the uid as indexed field, so that IndexWriter#deleteDocuments(term) can then + // delete it when the root document is deleted too. + nestedDoc.add(new Field(UidFieldMapper.NAME, uidField.stringValue(), UidFieldMapper.Defaults.NESTED_FIELD_TYPE)); + } else { + throw new IllegalStateException("The root document of a nested document should have an uid field"); + } } + // the type of the nested doc starts with __, so we can identify that its a nested one in filters // note, we don't prefix it with the type of the doc since it allows us to execute a nested query // across types (for example, with similar nested objects) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java index a9a765f1c3a..813a546aaed 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java @@ -52,6 +52,7 @@ public class IdFieldMapper extends MetadataFieldMapper { public static final String NAME = IdFieldMapper.NAME; public static final MappedFieldType FIELD_TYPE = new IdFieldType(); + public static final MappedFieldType NESTED_FIELD_TYPE; static { FIELD_TYPE.setTokenized(false); @@ -62,6 +63,10 @@ public class IdFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.setName(NAME); FIELD_TYPE.freeze(); + + NESTED_FIELD_TYPE = FIELD_TYPE.clone(); + NESTED_FIELD_TYPE.setStored(false); + NESTED_FIELD_TYPE.freeze(); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ParentFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ParentFieldMapper.java index 3ef9b73708f..49f5b820f8e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ParentFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ParentFieldMapper.java @@ -65,6 +65,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { FIELD_TYPE.setIndexOptions(IndexOptions.NONE); FIELD_TYPE.setHasDocValues(true); FIELD_TYPE.setDocValuesType(DocValuesType.SORTED); + FIELD_TYPE.setEagerGlobalOrdinals(true); FIELD_TYPE.freeze(); } } @@ -295,9 +296,10 @@ public class ParentFieldMapper extends MetadataFieldMapper { @Override protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + ParentFieldType currentFieldType = (ParentFieldType) fieldType.clone(); super.doMerge(mergeWith, updateAllTypes); ParentFieldMapper fieldMergeWith = (ParentFieldMapper) mergeWith; - if (Objects.equals(parentType, fieldMergeWith.parentType) == false) { + if (fieldMergeWith.parentType != null && Objects.equals(parentType, fieldMergeWith.parentType) == false) { throw new IllegalArgumentException("The _parent field's type option can't be changed: [" + parentType + "]->[" + fieldMergeWith.parentType + "]"); } @@ -308,7 +310,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { } if (active()) { - fieldType = fieldMergeWith.fieldType.clone(); + fieldType = currentFieldType; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java index 2ed6658e87c..c18b66cf618 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java @@ -134,6 +134,10 @@ public class TokenCountFieldMapper extends FieldMapper { value = context.parser().textOrNull(); } + if (value == null && fieldType().nullValue() == null) { + return; + } + final int tokenCount; if (value == null) { tokenCount = (Integer) fieldType().nullValue(); diff --git a/core/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java b/core/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java index 74c80554109..f13aa22f7d9 100644 --- a/core/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java @@ -74,8 +74,9 @@ public abstract class InnerHitContextBuilder { } if (innerHitBuilder.getScriptFields() != null) { for (SearchSourceBuilder.ScriptField field : innerHitBuilder.getScriptFields()) { - SearchScript searchScript = innerHitsContext.getQueryShardContext().getSearchScript(field.script(), - SearchScript.CONTEXT); + QueryShardContext innerContext = innerHitsContext.getQueryShardContext(); + SearchScript.Factory factory = innerContext.getScriptService().compile(field.script(), SearchScript.CONTEXT); + SearchScript.LeafFactory searchScript = factory.newFactory(field.script().getParams(), innerHitsContext.lookup()); innerHitsContext.scriptFields().add(new org.elasticsearch.search.fetch.subphase.ScriptFieldsContext.ScriptField( field.fieldName(), searchScript, field.ignoreFailure())); } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java index df0493d61c8..0aa9a43a312 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java @@ -471,14 +471,6 @@ public abstract class QueryBuilders { return moreLikeThisQuery(null, null, likeItems); } - /** - * Constructs a new parent id query that returns all child documents of the specified type that - * point to the specified id. - */ - public static ParentIdQueryBuilder parentId(String type, String id) { - return new ParentIdQueryBuilder(type, id); - } - public static NestedQueryBuilder nestedQuery(String path, QueryBuilder query, ScoreMode scoreMode) { return new NestedQueryBuilder(path, query, scoreMode); } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index 02da924f931..3fefb7141bd 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -24,11 +24,9 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.template.CompiledTemplate; +import org.elasticsearch.script.TemplateScript; import java.util.function.LongSupplier; @@ -78,6 +76,11 @@ public class QueryRewriteContext { return mapperService; } + /** Return the script service to allow compiling scripts within queries. */ + public ScriptService getScriptService() { + return scriptService; + } + /** Return the current {@link IndexReader}, or {@code null} if no index reader is available, for * instance if we are on the coordinating node or if this rewrite context is used to index * queries (percolation). */ @@ -104,7 +107,7 @@ public class QueryRewriteContext { } public String getTemplateBytes(Script template) { - CompiledTemplate compiledTemplate = scriptService.compileTemplate(template, ExecutableScript.CONTEXT); - return compiledTemplate.run(template.getParams()); + TemplateScript compiledTemplate = scriptService.compile(template, TemplateScript.CONTEXT).newInstance(template.getParams()); + return compiledTemplate.execute(); } } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index 1ab89a0fe01..2ce5abd213c 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -325,47 +325,10 @@ public class QueryShardContext extends QueryRewriteContext { return indexSettings.getIndex(); } - /** - * Compiles (or retrieves from cache) and binds the parameters to the - * provided script - */ - public final SearchScript getSearchScript(Script script, ScriptContext context) { + /** Return the script service to allow compiling scripts. */ + public final ScriptService getScriptService() { failIfFrozen(); - SearchScript.Factory factory = scriptService.compile(script, context); - return factory.newInstance(script.getParams(), lookup()); - } - /** - * Returns a lazily created {@link SearchScript} that is compiled immediately but can be pulled later once all - * parameters are available. - */ - public final Function, SearchScript> getLazySearchScript( - Script script, ScriptContext context) { - // TODO: this "lazy" binding can be removed once scripted metric aggs have their own contexts, which take _agg/_aggs as a parameter - failIfFrozen(); - SearchScript.Factory factory = scriptService.compile(script, context); - return (p) -> factory.newInstance(p, lookup()); - } - - /** - * Compiles (or retrieves from cache) and binds the parameters to the - * provided script - */ - public final ExecutableScript getExecutableScript(Script script, ScriptContext context) { - failIfFrozen(); - ExecutableScript.Factory factory = scriptService.compile(script, context); - return factory.newInstance(script.getParams()); - } - - /** - * Returns a lazily created {@link ExecutableScript} that is compiled immediately but can be pulled later once all - * parameters are available. - */ - public final Function, ExecutableScript> getLazyExecutableScript( - Script script, ScriptContext context) { - // TODO: this "lazy" binding can be removed once scripted metric aggs have their own contexts, which take _agg/_aggs as a parameter - failIfFrozen(); - ExecutableScript.Factory factory = scriptService.compile(script, context); - return factory::newInstance; + return scriptService; } /** diff --git a/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java index 88fde50eb1b..0d608fd5f11 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java @@ -34,9 +34,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.SearchScript; import java.io.IOException; @@ -131,15 +129,17 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder @Override protected Query doToQuery(QueryShardContext context) throws IOException { - return new ScriptQuery(script, context.getSearchScript(script, SearchScript.CONTEXT)); + SearchScript.Factory factory = context.getScriptService().compile(script, SearchScript.CONTEXT); + SearchScript.LeafFactory searchScript = factory.newFactory(script.getParams(), context.lookup()); + return new ScriptQuery(script, searchScript); } static class ScriptQuery extends Query { final Script script; - final SearchScript searchScript; + final SearchScript.LeafFactory searchScript; - ScriptQuery(Script script, SearchScript searchScript) { + ScriptQuery(Script script, SearchScript.LeafFactory searchScript) { this.script = script; this.searchScript = searchScript; } @@ -181,7 +181,7 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder @Override public Scorer scorer(LeafReaderContext context) throws IOException { DocIdSetIterator approximation = DocIdSetIterator.all(context.reader().maxDoc()); - final LeafSearchScript leafScript = searchScript.getLeafSearchScript(context); + final SearchScript leafScript = searchScript.newInstance(context); TwoPhaseIterator twoPhase = new TwoPhaseIterator(approximation) { @Override diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java index 9899ba9a748..f6e7dd32eb2 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java @@ -27,6 +27,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; @@ -36,6 +37,7 @@ import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; +import java.util.Iterator; import java.util.Map; import java.util.Objects; import java.util.List; @@ -79,18 +81,21 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp @Override public Query newDefaultQuery(String text) { - BooleanQuery.Builder bq = new BooleanQuery.Builder(); + List disjuncts = new ArrayList<>(); for (Map.Entry entry : weights.entrySet()) { try { Query q = createBooleanQuery(entry.getKey(), text, super.getDefaultOperator()); if (q != null) { - bq.add(wrapWithBoost(q, entry.getValue()), BooleanClause.Occur.SHOULD); + disjuncts.add(wrapWithBoost(q, entry.getValue())); } } catch (RuntimeException e) { rethrowUnlessLenient(e); } } - return super.simplify(bq.build()); + if (disjuncts.size() == 1) { + return disjuncts.get(0); + } + return new DisjunctionMaxQuery(disjuncts, 1.0f); } /** @@ -99,23 +104,26 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp */ @Override public Query newFuzzyQuery(String text, int fuzziness) { - BooleanQuery.Builder bq = new BooleanQuery.Builder(); + List disjuncts = new ArrayList<>(); for (Map.Entry entry : weights.entrySet()) { final String fieldName = entry.getKey(); try { final BytesRef term = getAnalyzer().normalize(fieldName, text); Query query = new FuzzyQuery(new Term(fieldName, term), fuzziness); - bq.add(wrapWithBoost(query, entry.getValue()), BooleanClause.Occur.SHOULD); + disjuncts.add(wrapWithBoost(query, entry.getValue())); } catch (RuntimeException e) { rethrowUnlessLenient(e); } } - return super.simplify(bq.build()); + if (disjuncts.size() == 1) { + return disjuncts.get(0); + } + return new DisjunctionMaxQuery(disjuncts, 1.0f); } @Override public Query newPhraseQuery(String text, int slop) { - BooleanQuery.Builder bq = new BooleanQuery.Builder(); + List disjuncts = new ArrayList<>(); for (Map.Entry entry : weights.entrySet()) { try { String field = entry.getKey(); @@ -129,13 +137,16 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp Float boost = entry.getValue(); Query q = createPhraseQuery(field, text, slop); if (q != null) { - bq.add(wrapWithBoost(q, boost), BooleanClause.Occur.SHOULD); + disjuncts.add(wrapWithBoost(q, boost)); } } catch (RuntimeException e) { rethrowUnlessLenient(e); } } - return super.simplify(bq.build()); + if (disjuncts.size() == 1) { + return disjuncts.get(0); + } + return new DisjunctionMaxQuery(disjuncts, 1.0f); } /** @@ -144,25 +155,28 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp */ @Override public Query newPrefixQuery(String text) { - BooleanQuery.Builder bq = new BooleanQuery.Builder(); + List disjuncts = new ArrayList<>(); for (Map.Entry entry : weights.entrySet()) { final String fieldName = entry.getKey(); try { if (settings.analyzeWildcard()) { Query analyzedQuery = newPossiblyAnalyzedQuery(fieldName, text); if (analyzedQuery != null) { - bq.add(wrapWithBoost(analyzedQuery, entry.getValue()), BooleanClause.Occur.SHOULD); + disjuncts.add(wrapWithBoost(analyzedQuery, entry.getValue())); } } else { Term term = new Term(fieldName, getAnalyzer().normalize(fieldName, text)); Query query = new PrefixQuery(term); - bq.add(wrapWithBoost(query, entry.getValue()), BooleanClause.Occur.SHOULD); + disjuncts.add(wrapWithBoost(query, entry.getValue())); } } catch (RuntimeException e) { return rethrowUnlessLenient(e); } } - return super.simplify(bq.build()); + if (disjuncts.size() == 1) { + return disjuncts.get(0); + } + return new DisjunctionMaxQuery(disjuncts, 1.0f); } /** diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreFunctionBuilder.java index 2af4c9dd751..7c66a7de99c 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/ScriptScoreFunctionBuilder.java @@ -94,7 +94,8 @@ public class ScriptScoreFunctionBuilder extends ScoreFunctionBuilder queries = new ArrayList<>(); - for (Query query : groupQuery) { - queries.add(query); - } - return new DisjunctionMaxQuery(queries, tieBreaker); - } else { - final BooleanQuery.Builder booleanQuery = new BooleanQuery.Builder(); - for (Query query : groupQuery) { - booleanQuery.add(query, BooleanClause.Occur.SHOULD); - } - return booleanQuery.build(); + List queries = new ArrayList<>(); + for (Query query : groupQuery) { + queries.add(query); } + return new DisjunctionMaxQuery(queries, tieBreaker); } public Query blendTerm(Term term, MappedFieldType fieldType) { @@ -165,8 +148,8 @@ public class MultiMatchQuery extends MatchQuery { final class CrossFieldsQueryBuilder extends QueryBuilder { private FieldAndFieldType[] blendedFields; - CrossFieldsQueryBuilder(float tieBreaker) { - super(false, tieBreaker); + CrossFieldsQueryBuilder() { + super(0.0f); } @Override @@ -306,8 +289,6 @@ public class MultiMatchQuery extends MatchQuery { blendedBoost = Arrays.copyOf(blendedBoost, i); if (commonTermsCutoff != null) { queries.add(BlendedTermQuery.commonTermsBlendedQuery(terms, blendedBoost, commonTermsCutoff)); - } else if (tieBreaker == 1.0f) { - queries.add(BlendedTermQuery.booleanBlendedQuery(terms, blendedBoost)); } else { queries.add(BlendedTermQuery.dismaxBlendedQuery(terms, blendedBoost, tieBreaker)); } @@ -318,11 +299,7 @@ public class MultiMatchQuery extends MatchQuery { // best effort: add clauses that are not term queries so that they have an opportunity to match // however their score contribution will be different // TODO: can we improve this? - BooleanQuery.Builder bq = new BooleanQuery.Builder(); - for (Query query : queries) { - bq.add(query, Occur.SHOULD); - } - return bq.build(); + return new DisjunctionMaxQuery(queries, 1.0f); } } diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 5f1f94f72f7..18f025c27c3 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -19,18 +19,19 @@ package org.elasticsearch.index.shard; -import org.apache.logging.log4j.Logger; import org.apache.lucene.index.CheckIndex; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.Term; import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.search.Sort; import org.apache.lucene.search.UsageTrackingQueryCachingPolicy; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.ThreadInterruptedException; +import org.elasticsearch.Assertions; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; @@ -54,7 +55,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.Callback; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AsyncIOProcessor; import org.elasticsearch.index.Index; @@ -115,6 +115,7 @@ import org.elasticsearch.indices.recovery.RecoveryFailedException; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.suggest.completion.CompletionFieldStats; import org.elasticsearch.search.suggest.completion.CompletionStats; import org.elasticsearch.threadpool.ThreadPool; @@ -124,6 +125,7 @@ import java.io.PrintStream; import java.nio.channels.ClosedByInterruptException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Arrays; import java.util.EnumSet; import java.util.List; import java.util.Locale; @@ -131,6 +133,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; @@ -164,6 +167,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl private final IndexEventListener indexEventListener; private final QueryCachingPolicy cachingPolicy; private final Supplier indexSortSupplier; + private final TranslogOpToEngineOpConverter translogOpToEngineOpConverter; /** * How many bytes we are currently moving to disk, via either IndexWriter.flush or refresh. IndexingMemoryController polls this @@ -255,8 +259,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl logger.debug("state: [CREATED]"); this.checkIndexOnStartup = indexSettings.getValue(IndexSettings.INDEX_CHECK_ON_STARTUP); - this.translogConfig = new TranslogConfig(shardId, shardPath().resolveTranslog(), indexSettings, - bigArrays); + this.translogConfig = new TranslogConfig(shardId, shardPath().resolveTranslog(), indexSettings, bigArrays); + this.translogOpToEngineOpConverter = new TranslogOpToEngineOpConverter(shardId, mapperService); // the query cache is a node-level thing, however we want the most popular filters // to be computed on a per-shard basis if (IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.get(settings)) { @@ -332,12 +336,14 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } /** - * notifies the shard of an increase in the primary term + * Notifies the shard of an increase in the primary term. + * + * @param newPrimaryTerm the new primary term */ - public void updatePrimaryTerm(final long newTerm) { + public void updatePrimaryTerm(final long newPrimaryTerm) { assert shardRouting.primary() : "primary term can only be explicitly updated on a primary shard"; synchronized (mutex) { - if (newTerm != primaryTerm) { + if (newPrimaryTerm != primaryTerm) { // Note that due to cluster state batching an initializing primary shard term can failed and re-assigned // in one state causing it's term to be incremented. Note that if both current shard state and new // shard state are initializing, we could replace the current shard and reinitialize it. It is however @@ -354,10 +360,29 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl "a started primary shard should never update its term; " + "shard " + shardRouting + ", " + "current term [" + primaryTerm + "], " - + "new term [" + newTerm + "]"; - assert newTerm > primaryTerm : - "primary terms can only go up; current term [" + primaryTerm + "], new term [" + newTerm + "]"; - primaryTerm = newTerm; + + "new term [" + newPrimaryTerm + "]"; + assert newPrimaryTerm > primaryTerm : + "primary terms can only go up; current term [" + primaryTerm + "], new term [" + newPrimaryTerm + "]"; + /* + * Before this call returns, we are guaranteed that all future operations are delayed and so this happens before we + * increment the primary term. The latch is needed to ensure that we do not unblock operations before the primary term is + * incremented. + */ + final CountDownLatch latch = new CountDownLatch(1); + indexShardOperationPermits.asyncBlockOperations( + 30, + TimeUnit.MINUTES, + () -> { + latch.await(); + try { + getEngine().fillSeqNoGaps(newPrimaryTerm); + } catch (final AlreadyClosedException e) { + // okay, the index was deleted + } + }, + e -> failShard("exception during primary term transition", e)); + primaryTerm = newPrimaryTerm; + latch.countDown(); } } } @@ -548,6 +573,37 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl return new Engine.Index(uid, doc, seqNo, primaryTerm, version, versionType, origin, startTime, autoGeneratedIdTimestamp, isRetry); } + /** + * Applies an engine operation to the shard, which can be either an index, delete or noop operation. + */ + public Engine.Result applyOperation(Engine.Operation operation) throws IOException { + return applyOperation(getEngine(), operation); + } + + private Engine.Result applyOperation(Engine engine, Engine.Operation operation) throws IOException { + switch (operation.operationType()) { + case INDEX: + Engine.Index engineIndex = (Engine.Index) operation; + return index(engine, engineIndex); + case DELETE: + final Engine.Delete engineDelete = (Engine.Delete) operation; + return delete(engine, engineDelete); + case NO_OP: + final Engine.NoOp engineNoOp = (Engine.NoOp) operation; + return noOp(engine, engineNoOp); + default: + throw new IllegalStateException("No operation defined for [" + operation + "]"); + } + } + + private Engine.NoOpResult noOp(Engine engine, Engine.NoOp noOp) { + active.set(true); + if (logger.isTraceEnabled()) { + logger.trace("noop (seq# [{}])", noOp.seqNo()); + } + return engine.noOp(noOp); + } + public Engine.IndexResult index(Engine.Index index) throws IOException { ensureWriteAllowed(index); Engine engine = getEngine(); @@ -572,10 +628,11 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl return result; } - public Engine.NoOp prepareMarkingSeqNoAsNoOp(long seqNo, String reason) { + public Engine.NoOp prepareMarkingSeqNoAsNoOpOnReplica(long seqNo, long opPrimaryTerm, String reason) { verifyReplicationTarget(); + assert opPrimaryTerm <= this.primaryTerm : "op term [ " + opPrimaryTerm + " ] > shard term [" + this.primaryTerm + "]"; long startTime = System.nanoTime(); - return new Engine.NoOp(seqNo, primaryTerm, Engine.Operation.Origin.REPLICA, startTime, reason); + return new Engine.NoOp(seqNo, opPrimaryTerm, Engine.Operation.Origin.REPLICA, startTime, reason); } public Engine.NoOpResult markSeqNoAsNoOp(Engine.NoOp noOp) throws IOException { @@ -656,9 +713,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl if (logger.isTraceEnabled()) { logger.trace("refresh with source [{}] indexBufferRAMBytesUsed [{}]", source, new ByteSizeValue(bytes)); } - long time = System.nanoTime(); getEngine().refresh(source); - refreshMetric.inc(System.nanoTime() - time); } finally { if (logger.isTraceEnabled()) { logger.trace("remove [{}] writing bytes for shard [{}]", new ByteSizeValue(bytes), shardId()); @@ -669,9 +724,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl if (logger.isTraceEnabled()) { logger.trace("refresh with source [{}]", source); } - long time = System.nanoTime(); getEngine().refresh(source); - refreshMetric.inc(System.nanoTime() - time); } } @@ -1000,21 +1053,33 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl assert currentEngineReference.get() == null; } - /** - * Applies all operations in the iterable to the current engine and returns the number of operations applied. - * This operation will stop applying operations once an operation failed to apply. - * Note: This method is typically used in peer recovery to replay remote transaction log entries. - */ - public int performBatchRecovery(Iterable operations) { - if (state != IndexShardState.RECOVERING) { - throw new IndexShardNotRecoveringException(shardId, state); + public Engine.Operation convertToEngineOp(Translog.Operation operation, Engine.Operation.Origin origin) { + return translogOpToEngineOpConverter.convertToEngineOp(operation, origin); + } + + // package-private for testing + int runTranslogRecovery(Engine engine, Translog.Snapshot snapshot) throws IOException { + recoveryState.getTranslog().totalOperations(snapshot.totalOperations()); + recoveryState.getTranslog().totalOperationsOnStart(snapshot.totalOperations()); + int opsRecovered = 0; + Translog.Operation operation; + while ((operation = snapshot.next()) != null) { + try { + logger.trace("[translog] recover op {}", operation); + Engine.Operation engineOp = convertToEngineOp(operation, Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY); + applyOperation(engine, engineOp); + opsRecovered++; + recoveryState.getTranslog().incrementRecoveredOperations(); + } catch (ElasticsearchException e) { + if (e.status() == RestStatus.BAD_REQUEST) { + // mainly for MapperParsingException and Failure to detect xcontent + logger.info("ignoring recovery of a corrupt translog entry", e); + } else { + throw e; + } + } } - // We set active because we are now writing operations to the engine; this way, if we go idle after some time and become inactive, - // we still invoke any onShardInactive listeners ... we won't sync'd flush in this case because we only do that on primary and this - // is a replica - active.set(true); - Engine engine = getEngine(); - return engine.config().getTranslogRecoveryPerformer().performBatchRecovery(engine, operations); + return opsRecovered; } /** @@ -1234,7 +1299,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } } - public void addShardFailureCallback(Callback onShardFailure) { + public void addShardFailureCallback(Consumer onShardFailure) { this.shardEventListener.delegates.add(onShardFailure); } @@ -1746,15 +1811,15 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } class ShardEventListener implements Engine.EventListener { - private final CopyOnWriteArrayList> delegates = new CopyOnWriteArrayList<>(); + private final CopyOnWriteArrayList> delegates = new CopyOnWriteArrayList<>(); // called by the current engine @Override public void onFailedEngine(String reason, @Nullable Exception failure) { final ShardFailure shardFailure = new ShardFailure(shardRouting, reason, failure); - for (Callback listener : delegates) { + for (Consumer listener : delegates) { try { - listener.handle(shardFailure); + listener.accept(shardFailure); } catch (Exception inner) { inner.addSuppressed(failure); logger.warn("exception while notifying engine failure", inner); @@ -1822,12 +1887,14 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl } private EngineConfig newEngineConfig(EngineConfig.OpenMode openMode) { - final IndexShardRecoveryPerformer translogRecoveryPerformer = new IndexShardRecoveryPerformer(shardId, mapperService, logger); Sort indexSort = indexSortSupplier.get(); return new EngineConfig(openMode, shardId, threadPool, indexSettings, warmer, store, indexSettings.getMergePolicy(), - mapperService.indexAnalyzer(), similarityService.similarity(mapperService), codecService, shardEventListener, translogRecoveryPerformer, indexCache.query(), cachingPolicy, translogConfig, - IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING.get(indexSettings.getSettings()), refreshListeners, indexSort); + mapperService.indexAnalyzer(), similarityService.similarity(mapperService), codecService, shardEventListener, + indexCache.query(), cachingPolicy, translogConfig, + IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING.get(indexSettings.getSettings()), + Arrays.asList(refreshListeners, new RefreshMetricUpdater(refreshMetric)), indexSort, + this::runTranslogRecovery); } /** @@ -1940,6 +2007,11 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl translogSyncProcessor.put(location, syncListener); } + public final void sync() throws IOException { + verifyNotClosed(); + getEngine().getTranslog().sync(); + } + /** * Returns the current translog durability mode */ @@ -2030,7 +2102,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl /** * Simple struct encapsulating a shard failure * - * @see IndexShard#addShardFailureCallback(Callback) + * @see IndexShard#addShardFailureCallback(Consumer) */ public static final class ShardFailure { public final ShardRouting routing; @@ -2071,36 +2143,35 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl refreshListeners.addOrNotify(location, listener); } - private class IndexShardRecoveryPerformer extends TranslogRecoveryPerformer { + private static class RefreshMetricUpdater implements ReferenceManager.RefreshListener { - protected IndexShardRecoveryPerformer(ShardId shardId, MapperService mapperService, Logger logger) { - super(shardId, mapperService, logger); + private final MeanMetric refreshMetric; + private long currentRefreshStartTime; + private Thread callingThread = null; + + private RefreshMetricUpdater(MeanMetric refreshMetric) { + this.refreshMetric = refreshMetric; } @Override - protected void operationProcessed() { - assert recoveryState != null; - recoveryState.getTranslog().incrementRecoveredOperations(); + public void beforeRefresh() throws IOException { + if (Assertions.ENABLED) { + assert callingThread == null : "beforeRefresh was called by " + callingThread.getName() + + " without a corresponding call to afterRefresh"; + callingThread = Thread.currentThread(); + } + currentRefreshStartTime = System.nanoTime(); } @Override - public int recoveryFromSnapshot(Engine engine, Translog.Snapshot snapshot) throws IOException { - assert recoveryState != null; - RecoveryState.Translog translogStats = recoveryState.getTranslog(); - translogStats.totalOperations(snapshot.totalOperations()); - translogStats.totalOperationsOnStart(snapshot.totalOperations()); - return super.recoveryFromSnapshot(engine, snapshot); - } - - @Override - protected void index(Engine engine, Engine.Index engineIndex) throws IOException { - IndexShard.this.index(engine, engineIndex); - } - - @Override - protected void delete(Engine engine, Engine.Delete engineDelete) throws IOException { - IndexShard.this.delete(engine, engineDelete); + public void afterRefresh(boolean didRefresh) throws IOException { + if (Assertions.ENABLED) { + assert callingThread != null : "afterRefresh called but not beforeRefresh"; + assert callingThread == Thread.currentThread() : "beforeRefreshed called by a different thread. current [" + + Thread.currentThread().getName() + "], thread that called beforeRefresh [" + callingThread.getName() + "]"; + callingThread = null; + } + refreshMetric.inc(System.nanoTime() - currentRefreshStartTime); } } - } diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java index fea26168efa..de539026e7a 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java @@ -20,12 +20,13 @@ package org.elasticsearch.index.shard; import org.apache.logging.log4j.Logger; +import org.elasticsearch.Assertions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.ThreadedActionListener; import org.elasticsearch.common.CheckedRunnable; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ThreadContext.StoredContext; import org.elasticsearch.threadpool.ThreadPool; @@ -36,20 +37,35 @@ import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; import java.util.function.Supplier; +/** + * Tracks shard operation permits. Each operation on the shard obtains a permit. When we need to block operations (e.g., to transition + * between terms) we immediately delay all operations to a queue, obtain all available permits, and wait for outstanding operations to drain + * and return their permits. Delayed operations will acquire permits and be completed after the operation that blocked all operations has + * completed. + */ final class IndexShardOperationPermits implements Closeable { + private final ShardId shardId; private final Logger logger; private final ThreadPool threadPool; - private static final int TOTAL_PERMITS = Integer.MAX_VALUE; - // fair semaphore to ensure that blockOperations() does not starve under thread contention - final Semaphore semaphore = new Semaphore(TOTAL_PERMITS, true); - @Nullable private List> delayedOperations; // operations that are delayed + static final int TOTAL_PERMITS = Integer.MAX_VALUE; + final Semaphore semaphore = new Semaphore(TOTAL_PERMITS, true); // fair to ensure a blocking thread is not starved + private final List> delayedOperations = new ArrayList<>(); // operations that are delayed private volatile boolean closed; + private boolean delayed; // does not need to be volatile as all accesses are done under a lock on this - IndexShardOperationPermits(ShardId shardId, Logger logger, ThreadPool threadPool) { + /** + * Construct operation permits for the specified shards. + * + * @param shardId the shard + * @param logger the logger for the shard + * @param threadPool the thread pool (used to execute delayed operations) + */ + IndexShardOperationPermits(final ShardId shardId, final Logger logger, final ThreadPool threadPool) { this.shardId = shardId; this.logger = logger; this.threadPool = threadPool; @@ -61,115 +77,196 @@ final class IndexShardOperationPermits implements Closeable { } /** - * Wait for in-flight operations to finish and executes onBlocked under the guarantee that no new operations are started. Queues - * operations that are occurring in the meanwhile and runs them once onBlocked has executed. + * Wait for in-flight operations to finish and executes {@code onBlocked} under the guarantee that no new operations are started. Queues + * operations that are occurring in the meanwhile and runs them once {@code onBlocked} has executed. * - * @param timeout the maximum time to wait for the in-flight operations block - * @param timeUnit the time unit of the {@code timeout} argument + * @param timeout the maximum time to wait for the in-flight operations block + * @param timeUnit the time unit of the {@code timeout} argument * @param onBlocked the action to run once the block has been acquired - * @throws InterruptedException if calling thread is interrupted - * @throws TimeoutException if timed out waiting for in-flight operations to finish + * @param the type of checked exception thrown by {@code onBlocked} + * @throws InterruptedException if calling thread is interrupted + * @throws TimeoutException if timed out waiting for in-flight operations to finish * @throws IndexShardClosedException if operation permit has been closed */ - public void blockOperations(long timeout, TimeUnit timeUnit, CheckedRunnable onBlocked) throws - InterruptedException, TimeoutException, E { + void blockOperations( + final long timeout, + final TimeUnit timeUnit, + final CheckedRunnable onBlocked) throws InterruptedException, TimeoutException, E { if (closed) { throw new IndexShardClosedException(shardId); } + delayOperations(); try { - if (semaphore.tryAcquire(TOTAL_PERMITS, timeout, timeUnit)) { - assert semaphore.availablePermits() == 0; - try { - onBlocked.run(); - } finally { - semaphore.release(TOTAL_PERMITS); - } - } else { - throw new TimeoutException("timed out during blockOperations"); - } + doBlockOperations(timeout, timeUnit, onBlocked); } finally { - final List> queuedActions; + releaseDelayedOperations(); + } + } + + /** + * Immediately delays operations and on another thread waits for in-flight operations to finish and then executes {@code onBlocked} + * under the guarantee that no new operations are started. Delayed operations are run after {@code onBlocked} has executed. After + * operations are delayed and the blocking is forked to another thread, returns to the caller. If a failure occurs while blocking + * operations or executing {@code onBlocked} then the {@code onFailure} handler will be invoked. + * + * @param timeout the maximum time to wait for the in-flight operations block + * @param timeUnit the time unit of the {@code timeout} argument + * @param onBlocked the action to run once the block has been acquired + * @param onFailure the action to run if a failure occurs while blocking operations + * @param the type of checked exception thrown by {@code onBlocked} (not thrown on the calling thread) + */ + void asyncBlockOperations( + final long timeout, final TimeUnit timeUnit, final CheckedRunnable onBlocked, final Consumer onFailure) { + delayOperations(); + threadPool.executor(ThreadPool.Names.GENERIC).execute(new AbstractRunnable() { + @Override + public void onFailure(final Exception e) { + onFailure.accept(e); + } + + @Override + protected void doRun() throws Exception { + doBlockOperations(timeout, timeUnit, onBlocked); + } + + @Override + public void onAfter() { + releaseDelayedOperations(); + } + }); + } + + private void delayOperations() { + synchronized (this) { + if (delayed) { + throw new IllegalStateException("operations are already delayed"); + } else { + assert delayedOperations.isEmpty(); + delayed = true; + } + } + } + + private void doBlockOperations( + final long timeout, + final TimeUnit timeUnit, + final CheckedRunnable onBlocked) throws InterruptedException, TimeoutException, E { + if (Assertions.ENABLED) { + // since delayed is not volatile, we have to synchronize even here for visibility synchronized (this) { - queuedActions = delayedOperations; - delayedOperations = null; + assert delayed; } - if (queuedActions != null) { - // Try acquiring permits on fresh thread (for two reasons): - // - blockOperations can be called on recovery thread which can be expected to be interrupted when recovery is cancelled. - // Interruptions are bad here as permit acquisition will throw an InterruptedException which will be swallowed by - // ThreadedActionListener if the queue of the thread pool on which it submits is full. - // - if permit is acquired and queue of the thread pool which the ThreadedActionListener uses is full, the onFailure - // handler is executed on the calling thread. This should not be the recovery thread as it would delay the recovery. - threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { - for (ActionListener queuedAction : queuedActions) { - acquire(queuedAction, null, false); - } - }); + } + if (semaphore.tryAcquire(TOTAL_PERMITS, timeout, timeUnit)) { + assert semaphore.availablePermits() == 0; + try { + onBlocked.run(); + } finally { + semaphore.release(TOTAL_PERMITS); } + } else { + throw new TimeoutException("timeout while blocking operations"); + } + } + + private void releaseDelayedOperations() { + final List> queuedActions; + synchronized (this) { + assert delayed; + queuedActions = new ArrayList<>(delayedOperations); + delayedOperations.clear(); + delayed = false; + } + if (!queuedActions.isEmpty()) { + /* + * Try acquiring permits on fresh thread (for two reasons): + * - blockOperations can be called on a recovery thread which can be expected to be interrupted when recovery is cancelled; + * interruptions are bad here as permit acquisition will throw an interrupted exception which will be swallowed by + * the threaded action listener if the queue of the thread pool on which it submits is full + * - if a permit is acquired and the queue of the thread pool which the the threaded action listener uses is full, the + * onFailure handler is executed on the calling thread; this should not be the recovery thread as it would delay the + * recovery + */ + threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { + for (ActionListener queuedAction : queuedActions) { + acquire(queuedAction, null, false); + } + }); } } /** * Acquires a permit whenever permit acquisition is not blocked. If the permit is directly available, the provided * {@link ActionListener} will be called on the calling thread. During calls of - * {@link #blockOperations(long, TimeUnit, CheckedRunnable)}, permit acquisition can be delayed. The provided ActionListener will - * then be called using the provided executor once operations are no longer blocked. + * {@link #blockOperations(long, TimeUnit, CheckedRunnable)}, permit acquisition can be delayed. The provided {@link ActionListener} + * will then be called using the provided executor once operations are no longer blocked. * * @param onAcquired {@link ActionListener} that is invoked once acquisition is successful or failed * @param executorOnDelay executor to use for delayed call * @param forceExecution whether the runnable should force its execution in case it gets rejected */ - public void acquire(ActionListener onAcquired, String executorOnDelay, boolean forceExecution) { + public void acquire(final ActionListener onAcquired, final String executorOnDelay, final boolean forceExecution) { if (closed) { onAcquired.onFailure(new IndexShardClosedException(shardId)); return; } - Releasable releasable; + final Releasable releasable; try { synchronized (this) { - releasable = tryAcquire(); - if (releasable == null) { - // blockOperations is executing, this operation will be retried by blockOperations once it finishes - if (delayedOperations == null) { - delayedOperations = new ArrayList<>(); - } + if (delayed) { final Supplier contextSupplier = threadPool.getThreadContext().newRestorableContext(false); if (executorOnDelay != null) { delayedOperations.add( - new ThreadedActionListener<>(logger, threadPool, executorOnDelay, - new ContextPreservingActionListener<>(contextSupplier, onAcquired), forceExecution)); + new ThreadedActionListener<>(logger, threadPool, executorOnDelay, + new ContextPreservingActionListener<>(contextSupplier, onAcquired), forceExecution)); } else { delayedOperations.add(new ContextPreservingActionListener<>(contextSupplier, onAcquired)); } return; + } else { + releasable = acquire(); } } - } catch (InterruptedException e) { + } catch (final InterruptedException e) { onAcquired.onFailure(e); return; } + // execute this outside the synchronized block! onAcquired.onResponse(releasable); } - @Nullable private Releasable tryAcquire() throws InterruptedException { - if (semaphore.tryAcquire(1, 0, TimeUnit.SECONDS)) { // the untimed tryAcquire methods do not honor the fairness setting - AtomicBoolean closed = new AtomicBoolean(); + private Releasable acquire() throws InterruptedException { + assert Thread.holdsLock(this); + if (semaphore.tryAcquire(1, 0, TimeUnit.SECONDS)) { // the un-timed tryAcquire methods do not honor the fairness setting + final AtomicBoolean closed = new AtomicBoolean(); return () -> { if (closed.compareAndSet(false, true)) { semaphore.release(1); } }; + } else { + // this should never happen, if it does something is deeply wrong + throw new IllegalStateException("failed to obtain permit but operations are not delayed"); } - return null; } - public int getActiveOperationsCount() { + /** + * Obtain the active operation count, or zero if all permits are held (even if there are outstanding operations in flight). + * + * @return the active operation count, or zero when all permits ar eheld + */ + int getActiveOperationsCount() { int availablePermits = semaphore.availablePermits(); if (availablePermits == 0) { - // when blockOperations is holding all permits + /* + * This occurs when either doBlockOperations is holding all the permits or there are outstanding operations in flight and the + * remainder of the permits are held by doBlockOperations. We do not distinguish between these two cases and simply say that + * the active operations count is zero. + */ return 0; } else { return TOTAL_PERMITS - availablePermits; } } + } diff --git a/core/src/main/java/org/elasticsearch/index/shard/SearchOperationListener.java b/core/src/main/java/org/elasticsearch/index/shard/SearchOperationListener.java index 583bcbc561d..153a985ab08 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/SearchOperationListener.java +++ b/core/src/main/java/org/elasticsearch/index/shard/SearchOperationListener.java @@ -23,6 +23,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.transport.TransportRequest; import java.util.List; @@ -110,8 +111,9 @@ public interface SearchOperationListener { * from the active contexts. If the context is deemed invalid a runtime * exception can be thrown, which will prevent the context from being used. * @param context the context retrieved from the active contexts + * @param transportRequest the request that is going to use the search context */ - default void validateSearchContext(SearchContext context) {} + default void validateSearchContext(SearchContext context, TransportRequest transportRequest) {} /** * A Composite listener that multiplexes calls to each of the listeners methods. @@ -236,11 +238,11 @@ public interface SearchOperationListener { } @Override - public void validateSearchContext(SearchContext context) { + public void validateSearchContext(SearchContext context, TransportRequest request) { Exception exception = null; for (SearchOperationListener listener : listeners) { try { - listener.validateSearchContext(context); + listener.validateSearchContext(context, request); } catch (Exception e) { exception = ExceptionsHelper.useOrSuppress(exception, e); } diff --git a/core/src/main/java/org/elasticsearch/index/shard/TranslogOpToEngineOpConverter.java b/core/src/main/java/org/elasticsearch/index/shard/TranslogOpToEngineOpConverter.java new file mode 100644 index 00000000000..372e8f4e25a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/shard/TranslogOpToEngineOpConverter.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.shard; + +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.mapper.DocumentMapperForType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.translog.Translog; + +import static org.elasticsearch.index.mapper.SourceToParse.source; + +/** + * The TranslogOpToEngineOpConverter encapsulates all the logic needed to transform a translog entry into an + * indexing operation including source parsing and field creation from the source. + */ +public class TranslogOpToEngineOpConverter { + private final MapperService mapperService; + private final ShardId shardId; + + protected TranslogOpToEngineOpConverter(ShardId shardId, MapperService mapperService) { + this.shardId = shardId; + this.mapperService = mapperService; + } + + protected DocumentMapperForType docMapper(String type) { + return mapperService.documentMapperWithAutoCreate(type); // protected for testing + } + + public Engine.Operation convertToEngineOp(Translog.Operation operation, Engine.Operation.Origin origin) { + switch (operation.opType()) { + case INDEX: + final Translog.Index index = (Translog.Index) operation; + // we set canHaveDuplicates to true all the time such that we de-optimze the translog case and ensure that all + // autoGeneratedID docs that are coming from the primary are updated correctly. + final Engine.Index engineIndex = IndexShard.prepareIndex(docMapper(index.type()), + source(shardId.getIndexName(), index.type(), index.id(), index.source(), XContentFactory.xContentType(index.source())) + .routing(index.routing()).parent(index.parent()), index.seqNo(), index.primaryTerm(), + index.version(), index.versionType().versionTypeForReplicationAndRecovery(), origin, + index.getAutoGeneratedIdTimestamp(), true); + return engineIndex; + case DELETE: + final Translog.Delete delete = (Translog.Delete) operation; + final Engine.Delete engineDelete = new Engine.Delete(delete.type(), delete.id(), delete.uid(), delete.seqNo(), + delete.primaryTerm(), delete.version(), delete.versionType().versionTypeForReplicationAndRecovery(), + origin, System.nanoTime()); + return engineDelete; + case NO_OP: + final Translog.NoOp noOp = (Translog.NoOp) operation; + final Engine.NoOp engineNoOp = + new Engine.NoOp(noOp.seqNo(), noOp.primaryTerm(), origin, System.nanoTime(), noOp.reason()); + return engineNoOp; + default: + throw new IllegalStateException("No operation defined for [" + operation + "]"); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java deleted file mode 100644 index 668e957ae52..00000000000 --- a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java +++ /dev/null @@ -1,214 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.shard; - -import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.mapper.DocumentMapperForType; -import org.elasticsearch.index.mapper.MapperException; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.Mapping; -import org.elasticsearch.index.translog.Translog; -import org.elasticsearch.rest.RestStatus; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -import static org.elasticsearch.index.mapper.SourceToParse.source; - -/** - * The TranslogRecoveryPerformer encapsulates all the logic needed to transform a translog entry into an - * indexing operation including source parsing and field creation from the source. - */ -public class TranslogRecoveryPerformer { - private final MapperService mapperService; - private final Logger logger; - private final Map recoveredTypes = new HashMap<>(); - private final ShardId shardId; - - protected TranslogRecoveryPerformer(ShardId shardId, MapperService mapperService, Logger logger) { - this.shardId = shardId; - this.mapperService = mapperService; - this.logger = logger; - } - - protected DocumentMapperForType docMapper(String type) { - return mapperService.documentMapperWithAutoCreate(type); // protected for testing - } - - /** - * Applies all operations in the iterable to the current engine and returns the number of operations applied. - * This operation will stop applying operations once an operation failed to apply. - * - * Throws a {@link MapperException} to be thrown if a mapping update is encountered. - */ - int performBatchRecovery(Engine engine, Iterable operations) { - int numOps = 0; - try { - for (Translog.Operation operation : operations) { - performRecoveryOperation(engine, operation, false, Engine.Operation.Origin.PEER_RECOVERY); - numOps++; - } - engine.getTranslog().sync(); - } catch (Exception e) { - throw new BatchOperationException(shardId, "failed to apply batch translog operation", numOps, e); - } - return numOps; - } - - public int recoveryFromSnapshot(Engine engine, Translog.Snapshot snapshot) throws IOException { - Translog.Operation operation; - int opsRecovered = 0; - while ((operation = snapshot.next()) != null) { - try { - performRecoveryOperation(engine, operation, true, Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY); - opsRecovered++; - } catch (ElasticsearchException e) { - if (e.status() == RestStatus.BAD_REQUEST) { - // mainly for MapperParsingException and Failure to detect xcontent - logger.info("ignoring recovery of a corrupt translog entry", e); - } else { - throw e; - } - } - } - - return opsRecovered; - } - - public static class BatchOperationException extends ElasticsearchException { - - private final int completedOperations; - - public BatchOperationException(ShardId shardId, String msg, int completedOperations, Throwable cause) { - super(msg, cause); - setShard(shardId); - this.completedOperations = completedOperations; - } - - public BatchOperationException(StreamInput in) throws IOException{ - super(in); - completedOperations = in.readInt(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeInt(completedOperations); - } - - /** the number of successful operations performed before the exception was thrown */ - public int completedOperations() { - return completedOperations; - } - } - - private void maybeAddMappingUpdate(String type, Mapping update, String docId, boolean allowMappingUpdates) { - if (update == null) { - return; - } - if (allowMappingUpdates == false) { - throw new MapperException("mapping updates are not allowed (type: [" + type + "], id: [" + docId + "])"); - } - Mapping currentUpdate = recoveredTypes.get(type); - if (currentUpdate == null) { - recoveredTypes.put(type, update); - } else { - currentUpdate = currentUpdate.merge(update, false); - } - } - - /** - * Performs a single recovery operation. - * - * @param allowMappingUpdates true if mapping update should be accepted (but collected). Setting it to false will - * cause a {@link MapperException} to be thrown if an update - * is encountered. - */ - private void performRecoveryOperation(Engine engine, Translog.Operation operation, boolean allowMappingUpdates, Engine.Operation.Origin origin) throws IOException { - switch (operation.opType()) { - case INDEX: - Translog.Index index = (Translog.Index) operation; - // we set canHaveDuplicates to true all the time such that we de-optimze the translog case and ensure that all - // autoGeneratedID docs that are coming from the primary are updated correctly. - Engine.Index engineIndex = IndexShard.prepareIndex(docMapper(index.type()), - source(shardId.getIndexName(), index.type(), index.id(), index.source(), XContentFactory.xContentType(index.source())) - .routing(index.routing()).parent(index.parent()), index.seqNo(), index.primaryTerm(), - index.version(), index.versionType().versionTypeForReplicationAndRecovery(), origin, index.getAutoGeneratedIdTimestamp(), true); - maybeAddMappingUpdate(engineIndex.type(), engineIndex.parsedDoc().dynamicMappingsUpdate(), engineIndex.id(), allowMappingUpdates); - logger.trace("[translog] recover [index] op [({}, {})] of [{}][{}]", index.seqNo(), index.primaryTerm(), index.type(), index.id()); - index(engine, engineIndex); - break; - case DELETE: - Translog.Delete delete = (Translog.Delete) operation; - logger.trace("[translog] recover [delete] op [({}, {})] of [{}][{}]", delete.seqNo(), delete.primaryTerm(), delete.type(), delete.id()); - final Engine.Delete engineDelete = new Engine.Delete(delete.type(), delete.id(), delete.uid(), delete.seqNo(), - delete.primaryTerm(), delete.version(), delete.versionType().versionTypeForReplicationAndRecovery(), - origin, System.nanoTime()); - delete(engine, engineDelete); - break; - case NO_OP: - final Translog.NoOp noOp = (Translog.NoOp) operation; - final long seqNo = noOp.seqNo(); - final long primaryTerm = noOp.primaryTerm(); - final String reason = noOp.reason(); - logger.trace("[translog] recover [no_op] op [({}, {})] of [{}]", seqNo, primaryTerm, reason); - final Engine.NoOp engineNoOp = - new Engine.NoOp(seqNo, primaryTerm, origin, System.nanoTime(), reason); - noOp(engine, engineNoOp); - break; - default: - throw new IllegalStateException("No operation defined for [" + operation + "]"); - } - operationProcessed(); - } - - protected void index(Engine engine, Engine.Index engineIndex) throws IOException { - engine.index(engineIndex); - } - - protected void delete(Engine engine, Engine.Delete engineDelete) throws IOException { - engine.delete(engineDelete); - } - - protected void noOp(Engine engine, Engine.NoOp engineNoOp) { - engine.noOp(engineNoOp); - } - - /** - * Called once for every processed operation by this recovery performer. - * This can be used to get progress information on the translog execution. - */ - protected void operationProcessed() { - // noop - } - - - /** - * Returns the recovered types modifying the mapping during the recovery - */ - public Map getRecoveredTypes() { - return recoveredTypes; - } -} diff --git a/core/src/main/java/org/elasticsearch/index/store/Store.java b/core/src/main/java/org/elasticsearch/index/store/Store.java index c157b78e231..6700a005c9c 100644 --- a/core/src/main/java/org/elasticsearch/index/store/Store.java +++ b/core/src/main/java/org/elasticsearch/index/store/Store.java @@ -24,7 +24,6 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.CorruptIndexException; -import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFormatTooNewException; @@ -62,11 +61,10 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; -import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.Callback; import org.elasticsearch.common.util.SingleObjectCache; import org.elasticsearch.common.util.concurrent.AbstractRefCounted; import org.elasticsearch.common.util.concurrent.RefCounted; @@ -99,8 +97,8 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.function.Consumer; import java.util.zip.CRC32; import java.util.zip.Checksum; @@ -412,7 +410,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref try { directory.innerClose(); // this closes the distributorDirectory as well } finally { - onClose.handle(shardLock); + onClose.accept(shardLock); } } catch (IOException e) { logger.debug("failed to close directory", e); @@ -1371,14 +1369,14 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref /** * A listener that is executed once the store is closed and all references to it are released */ - public interface OnClose extends Callback { + public interface OnClose extends Consumer { OnClose EMPTY = new OnClose() { /** * This method is called while the provided {@link org.elasticsearch.env.ShardLock} is held. * This method is only called once after all resources for a store are released. */ @Override - public void handle(ShardLock Lock) { + public void accept(ShardLock Lock) { } }; } diff --git a/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java b/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java index ce5cc8e7601..547d5aa499f 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Checkpoint.java @@ -44,6 +44,7 @@ final class Checkpoint { final long minSeqNo; final long maxSeqNo; final long globalCheckpoint; + final long minTranslogGeneration; private static final int INITIAL_VERSION = 1; // start with 1, just to recognize there was some magic serialization logic before private static final int CURRENT_VERSION = 2; // introduction of global checkpoints @@ -58,6 +59,7 @@ final class Checkpoint { + Long.BYTES // minimum sequence number, introduced in 6.0.0 + Long.BYTES // maximum sequence number, introduced in 6.0.0 + Long.BYTES // global checkpoint, introduced in 6.0.0 + + Long.BYTES // minimum translog generation in the translog - introduced in 6.0.0 + CodecUtil.footerLength(); // size of 5.0.0 checkpoint @@ -76,15 +78,19 @@ final class Checkpoint { * @param minSeqNo the current minimum sequence number of all operations in the translog * @param maxSeqNo the current maximum sequence number of all operations in the translog * @param globalCheckpoint the last-known global checkpoint + * @param minTranslogGeneration the minimum generation referenced by the translog at this moment. */ - Checkpoint(long offset, int numOps, long generation, long minSeqNo, long maxSeqNo, long globalCheckpoint) { - assert minSeqNo <= maxSeqNo; + Checkpoint(long offset, int numOps, long generation, long minSeqNo, long maxSeqNo, long globalCheckpoint, long minTranslogGeneration) { + assert minSeqNo <= maxSeqNo : "minSeqNo [" + minSeqNo + "] is higher than maxSeqNo [" + maxSeqNo + "]"; + assert minTranslogGeneration <= generation : + "minTranslogGen [" + minTranslogGeneration + "] is higher than generation [" + generation + "]"; this.offset = offset; this.numOps = numOps; this.generation = generation; this.minSeqNo = minSeqNo; this.maxSeqNo = maxSeqNo; this.globalCheckpoint = globalCheckpoint; + this.minTranslogGeneration = minTranslogGeneration; } private void write(DataOutput out) throws IOException { @@ -94,16 +100,18 @@ final class Checkpoint { out.writeLong(minSeqNo); out.writeLong(maxSeqNo); out.writeLong(globalCheckpoint); + out.writeLong(minTranslogGeneration); } - static Checkpoint emptyTranslogCheckpoint(final long offset, final long generation, final long globalCheckpoint) { + static Checkpoint emptyTranslogCheckpoint(final long offset, final long generation, final long globalCheckpoint, + long minTranslogGeneration) { final long minSeqNo = SequenceNumbersService.NO_OPS_PERFORMED; final long maxSeqNo = SequenceNumbersService.NO_OPS_PERFORMED; - return new Checkpoint(offset, 0, generation, minSeqNo, maxSeqNo, globalCheckpoint); + return new Checkpoint(offset, 0, generation, minSeqNo, maxSeqNo, globalCheckpoint, minTranslogGeneration); } static Checkpoint readCheckpointV6_0_0(final DataInput in) throws IOException { - return new Checkpoint(in.readLong(), in.readInt(), in.readLong(), in.readLong(), in.readLong(), in.readLong()); + return new Checkpoint(in.readLong(), in.readInt(), in.readLong(), in.readLong(), in.readLong(), in.readLong(), in.readLong()); } // reads a checksummed checkpoint introduced in ES 5.0.0 @@ -111,7 +119,8 @@ final class Checkpoint { final long minSeqNo = SequenceNumbersService.NO_OPS_PERFORMED; final long maxSeqNo = SequenceNumbersService.NO_OPS_PERFORMED; final long globalCheckpoint = SequenceNumbersService.UNASSIGNED_SEQ_NO; - return new Checkpoint(in.readLong(), in.readInt(), in.readLong(), minSeqNo, maxSeqNo, globalCheckpoint); + final long minTranslogGeneration = -1L; + return new Checkpoint(in.readLong(), in.readInt(), in.readLong(), minSeqNo, maxSeqNo, globalCheckpoint, minTranslogGeneration); } @Override @@ -123,6 +132,7 @@ final class Checkpoint { ", minSeqNo=" + minSeqNo + ", maxSeqNo=" + maxSeqNo + ", globalCheckpoint=" + globalCheckpoint + + ", minTranslogGeneration=" + minTranslogGeneration + '}'; } diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index 7b6922e7867..d4a5fe0d99f 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -24,6 +24,7 @@ import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.index.Term; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.IOUtils; +import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; @@ -37,10 +38,11 @@ import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ReleasableLock; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.seqno.SequenceNumbersService; import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.IndexShardComponent; @@ -54,17 +56,17 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.nio.file.StandardOpenOption; import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; import java.util.List; -import java.util.Locale; +import java.util.Objects; import java.util.Optional; -import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.LongSupplier; import java.util.regex.Matcher; import java.util.regex.Pattern; -import java.util.stream.Collectors; import java.util.stream.Stream; /** @@ -76,19 +78,17 @@ import java.util.stream.Stream; * between the lucene index an the transaction log file. This UUID is used to prevent accidental recovery from a transaction log that belongs to a * different engine. *

- * Each Translog has only one translog file open at any time referenced by a translog generation ID. This ID is written to a translog.ckp file that is designed - * to fit in a single disk block such that a write of the file is atomic. The checkpoint file is written on each fsync operation of the translog and records the number of operations - * written, the current translogs file generation and it's fsynced offset in bytes. + * Each Translog has only one translog file open for writes at any time referenced by a translog generation ID. This ID is written to a + * translog.ckp file that is designed to fit in a single disk block such that a write of the file is atomic. The checkpoint file + * is written on each fsync operation of the translog and records the number of operations written, the current translog's file generation, + * its fsynced offset in bytes, and other important statistics. *

*

- * When a translog is opened the checkpoint is use to retrieve the latest translog file generation and subsequently to open the last written file to recovery operations. - * The {@link org.elasticsearch.index.translog.Translog.TranslogGeneration}, given when the translog is opened / constructed is compared against - * the latest generation and all consecutive translog files singe the given generation and the last generation in the checkpoint will be recovered and preserved until the next - * generation is committed using {@link Translog#commit(long)}. In the common case the translog file generation in the checkpoint and the generation passed to the translog on creation are - * the same. The only situation when they can be different is when an actual translog commit fails in between {@link Translog#prepareCommit()} and {@link Translog#commit(long)}. In such a case - * the currently being committed translog file will not be deleted since it's commit was not successful. Yet, a new/current translog file is already opened at that point such that there is more than - * one translog file present. Such an uncommitted translog file always has a translog-${gen}.ckp associated with it which is an fsynced copy of the it's last translog.ckp such that in - * disaster recovery last fsynced offsets, number of operation etc. are still preserved. + * When the current translog file reaches a certain size ({@link IndexSettings#INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING}, or when + * a clear separation between old and new operations (upon change in primary term), the current file is reopened for read only and a new + * write only file is created. Any non-current, read only translog file always has a translog-${gen}.ckp associated with it + * which is an fsynced copy of its last translog.ckp such that in disaster recovery last fsynced offsets, number of + * operation etc. are still preserved. *

*/ public class Translog extends AbstractIndexShardComponent implements IndexShardComponent, Closeable { @@ -111,23 +111,17 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC // the list of translog readers is guaranteed to be in order of translog generation private final List readers = new ArrayList<>(); - // this is a concurrent set and is not protected by any of the locks. The main reason - // is that is being accessed by two separate classes (additions & reading are done by Translog, remove by View when closed) - private final Set outstandingViews = ConcurrentCollections.newConcurrentSet(); private BigArrays bigArrays; protected final ReleasableLock readLock; protected final ReleasableLock writeLock; private final Path location; private TranslogWriter current; - private static final long NOT_SET_GENERATION = -1; // -1 is safe as it will not cause a translog deletion. - - private volatile long currentCommittingGeneration = NOT_SET_GENERATION; - private volatile long lastCommittedTranslogFileGeneration = NOT_SET_GENERATION; private final AtomicBoolean closed = new AtomicBoolean(); private final TranslogConfig config; private final LongSupplier globalCheckpointSupplier; private final String translogUUID; + private final TranslogDeletionPolicy deletionPolicy; /** * Creates a new Translog instance. This method will create a new transaction log unless the given {@link TranslogGeneration} is @@ -137,20 +131,22 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC * translog file referenced by this generation. The translog creation will fail if this generation can't be opened. * * @param config the configuration of this translog - * @param translogGeneration the translog generation to open + * @param expectedTranslogUUID the translog uuid to open, null for a new translog + * @param deletionPolicy an instance of {@link TranslogDeletionPolicy} that controls when a translog file can be safely + * deleted * @param globalCheckpointSupplier a supplier for the global checkpoint */ public Translog( - final TranslogConfig config, - final TranslogGeneration translogGeneration, + final TranslogConfig config, final String expectedTranslogUUID, TranslogDeletionPolicy deletionPolicy, final LongSupplier globalCheckpointSupplier) throws IOException { super(config.getShardId(), config.getIndexSettings()); this.config = config; this.globalCheckpointSupplier = globalCheckpointSupplier; - if (translogGeneration == null || translogGeneration.translogUUID == null) { // legacy case + this.deletionPolicy = deletionPolicy; + if (expectedTranslogUUID == null) { translogUUID = UUIDs.randomBase64UUID(); } else { - translogUUID = translogGeneration.translogUUID; + translogUUID = expectedTranslogUUID; } bigArrays = config.getBigArrays(); ReadWriteLock rwl = new ReentrantReadWriteLock(); @@ -160,7 +156,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC Files.createDirectories(this.location); try { - if (translogGeneration != null) { + if (expectedTranslogUUID != null) { final Checkpoint checkpoint = readCheckpoint(location); final Path nextTranslogFile = location.resolve(getFilename(checkpoint.generation + 1)); final Path currentCheckpointFile = location.resolve(getCommitCheckpointFileName(checkpoint.generation)); @@ -172,19 +168,19 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC // // For this to happen we must have already copied the translog.ckp file into translog-gen.ckp so we first check if that file exists // if not we don't even try to clean it up and wait until we fail creating it - assert Files.exists(nextTranslogFile) == false || Files.size(nextTranslogFile) <= TranslogWriter.getHeaderLength(translogUUID) : "unexpected translog file: [" + nextTranslogFile + "]"; + assert Files.exists(nextTranslogFile) == false || Files.size(nextTranslogFile) <= TranslogWriter.getHeaderLength(expectedTranslogUUID) : "unexpected translog file: [" + nextTranslogFile + "]"; if (Files.exists(currentCheckpointFile) // current checkpoint is already copied && Files.deleteIfExists(nextTranslogFile)) { // delete it and log a warning logger.warn("deleted previously created, but not yet committed, next generation [{}]. This can happen due to a tragic exception when creating a new generation", nextTranslogFile.getFileName()); } - this.readers.addAll(recoverFromFiles(translogGeneration, checkpoint)); + this.readers.addAll(recoverFromFiles(checkpoint)); if (readers.isEmpty()) { throw new IllegalStateException("at least one reader must be recovered"); } boolean success = false; + current = null; try { current = createWriter(checkpoint.generation + 1); - this.lastCommittedTranslogFileGeneration = translogGeneration.translogFileGeneration; success = true; } finally { // we have to close all the recovered ones otherwise we leak file handles here @@ -196,18 +192,17 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } } else { IOUtils.rm(location); - logger.debug("wipe translog location - creating new translog"); + // start from whatever generation lucene points to + final long generation = deletionPolicy.getMinTranslogGenerationForRecovery(); + logger.debug("wipe translog location - creating new translog, starting generation [{}]", generation); Files.createDirectories(location); - final long generation = 1; - final Checkpoint checkpoint = Checkpoint.emptyTranslogCheckpoint(0, generation, globalCheckpointSupplier.getAsLong()); + final Checkpoint checkpoint = Checkpoint.emptyTranslogCheckpoint(0, generation, globalCheckpointSupplier.getAsLong(), generation); final Path checkpointFile = location.resolve(CHECKPOINT_FILE_NAME); Checkpoint.write(getChannelFactory(), checkpointFile, checkpoint, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW); IOUtils.fsync(checkpointFile, false); - current = createWriter(generation); - this.lastCommittedTranslogFileGeneration = NOT_SET_GENERATION; - + current = createWriter(generation, generation); + readers.clear(); } - // now that we know which files are there, create a new current one. } catch (Exception e) { // close the opened translog files if we fail to create a new translog... IOUtils.closeWhileHandlingException(current); @@ -217,24 +212,46 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } /** recover all translog files found on disk */ - private ArrayList recoverFromFiles(TranslogGeneration translogGeneration, Checkpoint checkpoint) throws IOException { + private ArrayList recoverFromFiles(Checkpoint checkpoint) throws IOException { boolean success = false; ArrayList foundTranslogs = new ArrayList<>(); final Path tempFile = Files.createTempFile(location, TRANSLOG_FILE_PREFIX, TRANSLOG_FILE_SUFFIX); // a temp file to copy checkpoint to - note it must be in on the same FS otherwise atomic move won't work boolean tempFileRenamed = false; try (ReleasableLock lock = writeLock.acquire()) { logger.debug("open uncommitted translog checkpoint {}", checkpoint); + + final long minGenerationToRecoverFrom; + if (checkpoint.minTranslogGeneration < 0) { + final Version indexVersionCreated = indexSettings().getIndexVersionCreated(); + assert indexVersionCreated.before(Version.V_6_0_0_alpha3) : + "no minTranslogGeneration in checkpoint, but index was created with version [" + indexVersionCreated + "]"; + minGenerationToRecoverFrom = deletionPolicy.getMinTranslogGenerationForRecovery(); + } else { + minGenerationToRecoverFrom = checkpoint.minTranslogGeneration; + } + final String checkpointTranslogFile = getFilename(checkpoint.generation); - for (long i = translogGeneration.translogFileGeneration; i < checkpoint.generation; i++) { + // we open files in reverse order in order to validate tranlsog uuid before we start traversing the translog based on + // the generation id we found in the lucene commit. This gives for better error messages if the wrong + // translog was found. + foundTranslogs.add(openReader(location.resolve(checkpointTranslogFile), checkpoint)); + for (long i = checkpoint.generation - 1; i >= minGenerationToRecoverFrom; i--) { Path committedTranslogFile = location.resolve(getFilename(i)); if (Files.exists(committedTranslogFile) == false) { - throw new IllegalStateException("translog file doesn't exist with generation: " + i + " lastCommitted: " + lastCommittedTranslogFileGeneration + " checkpoint: " + checkpoint.generation + " - translog ids must be consecutive"); + throw new IllegalStateException("translog file doesn't exist with generation: " + i + " recovering from: " + + minGenerationToRecoverFrom + " checkpoint: " + checkpoint.generation + " - translog ids must be consecutive"); } final TranslogReader reader = openReader(committedTranslogFile, Checkpoint.read(location.resolve(getCommitCheckpointFileName(i)))); foundTranslogs.add(reader); logger.debug("recovered local translog from checkpoint {}", checkpoint); } - foundTranslogs.add(openReader(location.resolve(checkpointTranslogFile), checkpoint)); + Collections.reverse(foundTranslogs); + + // when we clean up files, we first update the checkpoint with a new minReferencedTranslog and then delete them; + // if we crash just at the wrong moment, it may be that we leave one unreferenced file behind so we delete it if there + IOUtils.deleteFilesIgnoringExceptions(location.resolve(getFilename(minGenerationToRecoverFrom - 1)), + location.resolve(getCommitCheckpointFileName(minGenerationToRecoverFrom - 1))); + Path commitCheckpoint = location.resolve(getCommitCheckpointFileName(checkpoint.generation)); if (Files.exists(commitCheckpoint)) { Checkpoint checkpointFromDisk = Checkpoint.read(commitCheckpoint); @@ -335,18 +352,32 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } } + /** + * Returns the minimum file generation referenced by the translog + */ + long getMinFileGeneration() { + try (ReleasableLock ignored = readLock.acquire()) { + if (readers.isEmpty()) { + return current.getGeneration(); + } else { + return readers.get(0).getGeneration(); + } + } + } + + /** * Returns the number of operations in the transaction files that aren't committed to lucene.. */ public int totalOperations() { - return totalOperations(lastCommittedTranslogFileGeneration); + return totalOperations(deletionPolicy.getMinTranslogGenerationForRecovery()); } /** * Returns the size in bytes of the translog files that aren't committed to lucene. */ public long sizeInBytes() { - return sizeInBytes(lastCommittedTranslogFileGeneration); + return sizeInBytes(deletionPolicy.getMinTranslogGenerationForRecovery()); } /** @@ -375,7 +406,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } } - /** * Creates a new translog for the specified generation. * @@ -384,6 +414,18 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC * @throws IOException if creating the translog failed */ TranslogWriter createWriter(long fileGeneration) throws IOException { + return createWriter(fileGeneration, getMinFileGeneration()); + } + + /** + * creates a new writer + * + * @param fileGeneration the generation of the write to be written + * @param initialMinTranslogGen the minimum translog generation to be written in the first checkpoint. This is + * needed to solve and initialization problem while constructing an empty translog. + * With no readers and no current, a call to {@link #getMinFileGeneration()} would not work. + */ + private TranslogWriter createWriter(long fileGeneration, long initialMinTranslogGen) throws IOException { final TranslogWriter newFile; try { newFile = TranslogWriter.create( @@ -393,7 +435,9 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC location.resolve(getFilename(fileGeneration)), getChannelFactory(), config.getBufferSize(), - globalCheckpointSupplier); + globalCheckpointSupplier, + initialMinTranslogGen, + this::getMinFileGeneration); } catch (final IOException e) { throw new TranslogException(shardId, "failed to create new translog file", e); } @@ -497,12 +541,18 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC * Snapshots are fixed in time and will not be updated with future operations. */ public Snapshot newSnapshot() { - return createSnapshot(Long.MIN_VALUE); + try (ReleasableLock ignored = readLock.acquire()) { + return newSnapshot(getMinFileGeneration()); + } } - private Snapshot createSnapshot(long minGeneration) { + public Snapshot newSnapshot(long minGeneration) { try (ReleasableLock ignored = readLock.acquire()) { ensureOpen(); + if (minGeneration < getMinFileGeneration()) { + throw new IllegalArgumentException("requested snapshot generation [" + minGeneration + "] is not available. " + + "Min referenced generation is [" + getMinFileGeneration() + "]"); + } Snapshot[] snapshots = Stream.concat(readers.stream(), Stream.of(current)) .filter(reader -> reader.getGeneration() >= minGeneration) .map(BaseTranslogReader::newSnapshot).toArray(Snapshot[]::new); @@ -517,9 +567,13 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC public Translog.View newView() { try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); - View view = new View(lastCommittedTranslogFileGeneration); - outstandingViews.add(view); - return view; + final long viewGen = deletionPolicy.acquireTranslogGenForView(); + try { + return new View(viewGen); + } catch (Exception e) { + deletionPolicy.releaseTranslogGenView(viewGen); + throw e; + } } } @@ -628,6 +682,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return config; } + // public for testing + public TranslogDeletionPolicy getDeletionPolicy() { + return deletionPolicy; + } + /** * a view into the translog, capturing all translog file at the moment of creation * and updated with any future translog. @@ -667,7 +726,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC /** create a snapshot from this view */ public Snapshot snapshot() { ensureOpen(); - return Translog.this.createSnapshot(minGeneration); + return Translog.this.newSnapshot(minGeneration); } void ensureOpen() { @@ -679,9 +738,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC @Override public void close() throws IOException { if (closed.getAndSet(true) == false) { - logger.trace("closing view starting at translog [{}]", minTranslogGeneration()); - boolean removed = outstandingViews.remove(this); - assert removed : "View was never set but was supposed to be removed"; + logger.trace("closing view starting at translog [{}]", minGeneration); + deletionPolicy.releaseTranslogGenView(minGeneration); trimUnreferencedReaders(); closeFilesIfNoPendingViews(); } @@ -863,8 +921,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC private final String id; private final long autoGeneratedIdTimestamp; private final String type; - private long seqNo = SequenceNumbersService.UNASSIGNED_SEQ_NO; - private long primaryTerm = 0; + private final long seqNo; + private final long primaryTerm; private final long version; private final VersionType versionType; private final BytesReference source; @@ -894,6 +952,9 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC if (format >= FORMAT_SEQ_NO) { seqNo = in.readLong(); primaryTerm = in.readLong(); + } else { + seqNo = SequenceNumbersService.UNASSIGNED_SEQ_NO; + primaryTerm = 0; } } @@ -911,15 +972,21 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } public Index(String type, String id, long seqNo, byte[] source) { + this(type, id, seqNo, Versions.MATCH_ANY, VersionType.INTERNAL, source, null, null, -1); + } + + public Index(String type, String id, long seqNo, long version, VersionType versionType, byte[] source, String routing, + String parent, long autoGeneratedIdTimestamp) { this.type = type; this.id = id; this.source = new BytesArray(source); this.seqNo = seqNo; - version = Versions.MATCH_ANY; - versionType = VersionType.INTERNAL; - routing = null; - parent = null; - autoGeneratedIdTimestamp = -1; + this.primaryTerm = 0; + this.version = version; + this.versionType = versionType; + this.routing = routing; + this.parent = parent; + this.autoGeneratedIdTimestamp = autoGeneratedIdTimestamp; } @Override @@ -1052,27 +1119,42 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC public static class Delete implements Operation { - private static final int FORMAT_5_X = 2; - private static final int FORMAT_SEQ_NO = FORMAT_5_X + 1; + public static final int FORMAT_5_0 = 2; // 5.0 - 5.5 + private static final int FORMAT_SINGLE_TYPE = FORMAT_5_0 + 1; // 5.5 - 6.0 + private static final int FORMAT_SEQ_NO = FORMAT_SINGLE_TYPE + 1; // 6.0 - * public static final int SERIALIZATION_FORMAT = FORMAT_SEQ_NO; - private String type, id; - private Term uid; - private long seqNo = SequenceNumbersService.UNASSIGNED_SEQ_NO; - private long primaryTerm = 0; - private long version = Versions.MATCH_ANY; - private VersionType versionType = VersionType.INTERNAL; + private final String type, id; + private final Term uid; + private final long seqNo; + private final long primaryTerm; + private final long version; + private final VersionType versionType; public Delete(StreamInput in) throws IOException { final int format = in.readVInt();// SERIALIZATION_FORMAT - assert format >= FORMAT_5_X : "format was: " + format; - uid = new Term(in.readString(), in.readString()); + assert format >= FORMAT_5_0 : "format was: " + format; + if (format >= FORMAT_SINGLE_TYPE) { + type = in.readString(); + id = in.readString(); + uid = new Term(in.readString(), in.readString()); + } else { + uid = new Term(in.readString(), in.readString()); + // the uid was constructed from the type and id so we can + // extract them back + Uid uidObject = Uid.createUid(uid.text()); + type = uidObject.type(); + id = uidObject.id(); + } this.version = in.readLong(); this.versionType = VersionType.fromValue(in.readByte()); assert versionType.validateVersionForWrites(this.version); if (format >= FORMAT_SEQ_NO) { seqNo = in.readLong(); primaryTerm = in.readLong(); + } else { + seqNo = SequenceNumbersService.UNASSIGNED_SEQ_NO; + primaryTerm = 0; } } @@ -1086,8 +1168,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } public Delete(String type, String id, Term uid, long seqNo, long primaryTerm, long version, VersionType versionType) { - this.type = type; - this.id = id; + this.type = Objects.requireNonNull(type); + this.id = Objects.requireNonNull(id); this.uid = uid; this.seqNo = seqNo; this.primaryTerm = primaryTerm; @@ -1143,6 +1225,8 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(SERIALIZATION_FORMAT); + out.writeString(type); + out.writeString(id); out.writeString(uid.field()); out.writeString(uid.text()); out.writeLong(version); @@ -1429,88 +1513,64 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } /** - * Prepares a translog commit by setting the current committing generation and rolling the translog generation. - * - * @throws IOException if an I/O exception occurred while rolling the translog generation + * Trims unreferenced translog generations by asking {@link TranslogDeletionPolicy} for the minimum + * required generation */ - public void prepareCommit() throws IOException { - try (ReleasableLock ignored = writeLock.acquire()) { - ensureOpen(); - if (currentCommittingGeneration != NOT_SET_GENERATION) { - final String message = - String.format(Locale.ROOT, "already committing a translog with generation [%d]", currentCommittingGeneration); - throw new IllegalStateException(message); - } - currentCommittingGeneration = current.getGeneration(); - rollGeneration(); - } - } - - /** - * Commits the translog and sets the last committed translog generation to the specified generation. The specified committed generation - * will be used when trimming unreferenced translog generations such that generations from the committed generation will be preserved. - * - * If {@link Translog#prepareCommit()} was not called before calling commit, this method will be invoked too causing the translog - * generation to be rolled. - * - * @param committedGeneration the minimum translog generation to preserve after trimming unreferenced generations - * @throws IOException if an I/O exception occurred preparing the translog commit - */ - public void commit(final long committedGeneration) throws IOException { - try (ReleasableLock ignored = writeLock.acquire()) { - ensureOpen(); - assert assertCommittedGenerationIsInValidRange(committedGeneration); - if (currentCommittingGeneration == NOT_SET_GENERATION) { - prepareCommit(); - } - assert currentCommittingGeneration != NOT_SET_GENERATION; - assert readers.stream().anyMatch(r -> r.getGeneration() == currentCommittingGeneration) - : "readers missing committing generation [" + currentCommittingGeneration + "]"; - // set the last committed generation otherwise old files will not be cleaned up - lastCommittedTranslogFileGeneration = committedGeneration; - currentCommittingGeneration = NOT_SET_GENERATION; - trimUnreferencedReaders(); - } - } - - private boolean assertCommittedGenerationIsInValidRange(final long committedGeneration) { - assert committedGeneration <= current.generation - : "tried to commit generation [" + committedGeneration + "] after current generation [" + current.generation + "]"; - final long min = readers.stream().map(TranslogReader::getGeneration).min(Long::compareTo).orElse(Long.MIN_VALUE); - assert committedGeneration >= min - : "tried to commit generation [" + committedGeneration + "] before minimum generation [" + min + "]"; - return true; - } - - /** - * Trims unreferenced translog generations. The guarantee here is that translog generations will be preserved for all outstanding views - * and from the last committed translog generation defined by {@link Translog#lastCommittedTranslogFileGeneration}. - */ - void trimUnreferencedReaders() { + public void trimUnreferencedReaders() throws IOException { try (ReleasableLock ignored = writeLock.acquire()) { if (closed.get()) { // we're shutdown potentially on some tragic event, don't delete anything return; } - long minReferencedGen = Math.min( - lastCommittedTranslogFileGeneration, - outstandingViews.stream().mapToLong(View::minTranslogGeneration).min().orElse(Long.MAX_VALUE)); - final List unreferenced = - readers.stream().filter(r -> r.getGeneration() < minReferencedGen).collect(Collectors.toList()); - for (final TranslogReader unreferencedReader : unreferenced) { - final Path translogPath = unreferencedReader.path(); + long minReferencedGen = deletionPolicy.minTranslogGenRequired(); + assert minReferencedGen >= getMinFileGeneration() : + "deletion policy requires a minReferenceGen of [" + minReferencedGen + "] but the lowest gen available is [" + + getMinFileGeneration() + "]"; + assert minReferencedGen <= currentFileGeneration() : + "deletion policy requires a minReferenceGen of [" + minReferencedGen + "] which is higher than the current generation [" + + currentFileGeneration() + "]"; + + + for (Iterator iterator = readers.iterator(); iterator.hasNext(); ) { + TranslogReader reader = iterator.next(); + if (reader.getGeneration() >= minReferencedGen) { + break; + } + iterator.remove(); + IOUtils.closeWhileHandlingException(reader); + final Path translogPath = reader.path(); logger.trace("delete translog file [{}], not referenced and not current anymore", translogPath); - IOUtils.closeWhileHandlingException(unreferencedReader); - IOUtils.deleteFilesIgnoringExceptions(translogPath, - translogPath.resolveSibling(getCommitCheckpointFileName(unreferencedReader.getGeneration()))); + // The checkpoint is used when opening the translog to know which files should be recovered from. + // We now update the checkpoint to ignore the file we are going to remove. + // Note that there is a provision in recoverFromFiles to allow for the case where we synced the checkpoint + // but crashed before we could delete the file. + current.sync(); + deleteReaderFiles(reader); } - readers.removeAll(unreferenced); + assert readers.isEmpty() == false || current.generation == minReferencedGen : + "all readers were cleaned but the minReferenceGen [" + minReferencedGen + "] is not the current writer's gen [" + + current.generation + "]"; + } catch (Exception ex) { + try { + closeOnTragicEvent(ex); + } catch (final Exception inner) { + ex.addSuppressed(inner); + } + throw ex; } } + /** + * deletes all files associated with a reader. package-private to be able to simulate node failures at this point + */ + void deleteReaderFiles(TranslogReader reader) { + IOUtils.deleteFilesIgnoringExceptions(reader.path(), + reader.path().resolveSibling(getCommitCheckpointFileName(reader.getGeneration()))); + } + void closeFilesIfNoPendingViews() throws IOException { try (ReleasableLock ignored = writeLock.acquire()) { - if (closed.get() && outstandingViews.isEmpty()) { + if (closed.get() && deletionPolicy.pendingViewsCount() == 0) { logger.trace("closing files. translog is closed and there are no pending views"); ArrayList toClose = new ArrayList<>(readers); toClose.add(current); @@ -1567,13 +1627,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC } } - /** - * The number of currently open views - */ - int getNumOpenViews() { - return outstandingViews.size(); - } - ChannelFactory getChannelFactory() { return FileChannel::open; } diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogDeletionPolicy.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogDeletionPolicy.java new file mode 100644 index 00000000000..84f61a642cc --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogDeletionPolicy.java @@ -0,0 +1,87 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.translog; + +import org.apache.lucene.util.Counter; + +import java.util.HashMap; +import java.util.Map; + +public class TranslogDeletionPolicy { + + /** Records how many views are held against each + * translog generation */ + private final Map translogRefCounts = new HashMap<>(); + + /** + * the translog generation that is requires to properly recover from the oldest non deleted + * {@link org.apache.lucene.index.IndexCommit}. + */ + private long minTranslogGenerationForRecovery = 1; + + public synchronized void setMinTranslogGenerationForRecovery(long newGen) { + if (newGen < minTranslogGenerationForRecovery) { + throw new IllegalArgumentException("minTranslogGenerationForRecovery can't go backwards. new [" + newGen + "] current [" + + minTranslogGenerationForRecovery+ "]"); + } + minTranslogGenerationForRecovery = newGen; + } + + /** + * acquires the basis generation for a new view. Any translog generation above, and including, the returned generation + * will not be deleted until a corresponding call to {@link #releaseTranslogGenView(long)} is called. + */ + synchronized long acquireTranslogGenForView() { + translogRefCounts.computeIfAbsent(minTranslogGenerationForRecovery, l -> Counter.newCounter(false)).addAndGet(1); + return minTranslogGenerationForRecovery; + } + + /** returns the number of generations that were acquired for views */ + synchronized int pendingViewsCount() { + return translogRefCounts.size(); + } + + /** + * releases a generation that was acquired by {@link #acquireTranslogGenForView()} + */ + synchronized void releaseTranslogGenView(long translogGen) { + Counter current = translogRefCounts.get(translogGen); + if (current == null || current.get() <= 0) { + throw new IllegalArgumentException("translog gen [" + translogGen + "] wasn't acquired"); + } + if (current.addAndGet(-1) == 0) { + translogRefCounts.remove(translogGen); + } + } + + /** + * returns the minimum translog generation that is still required by the system. Any generation below + * the returned value may be safely deleted + */ + synchronized long minTranslogGenRequired() { + long viewRefs = translogRefCounts.keySet().stream().reduce(Math::min).orElse(Long.MAX_VALUE); + return Math.min(viewRefs, minTranslogGenerationForRecovery); + } + + /** returns the translog generation that will be used as a basis of a future store/peer recovery */ + public synchronized long getMinTranslogGenerationForRecovery() { + return minTranslogGenerationForRecovery; + } +} diff --git a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 4a98365e02f..d637c9da79f 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -71,6 +71,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { private volatile long maxSeqNo; private final LongSupplier globalCheckpointSupplier; + private final LongSupplier minTranslogGenerationSupplier; protected final AtomicBoolean closed = new AtomicBoolean(false); // lock order synchronized(syncLock) -> synchronized(this) @@ -85,10 +86,11 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { final FileChannel channel, final Path path, final ByteSizeValue bufferSize, - final LongSupplier globalCheckpointSupplier) throws IOException { + final LongSupplier globalCheckpointSupplier, LongSupplier minTranslogGenerationSupplier) throws IOException { super(initialCheckpoint.generation, channel, path, channel.position()); this.shardId = shardId; this.channelFactory = channelFactory; + this.minTranslogGenerationSupplier = minTranslogGenerationSupplier; this.outputStream = new BufferedChannelOutputStream(java.nio.channels.Channels.newOutputStream(channel), bufferSize.bytesAsInt()); this.lastSyncedCheckpoint = initialCheckpoint; this.totalOffset = initialCheckpoint.offset; @@ -121,7 +123,9 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { Path file, ChannelFactory channelFactory, ByteSizeValue bufferSize, - final LongSupplier globalCheckpointSupplier) throws IOException { + final LongSupplier globalCheckpointSupplier, + final long initialMinTranslogGen, + final LongSupplier minTranslogGenerationSupplier) throws IOException { final BytesRef ref = new BytesRef(translogUUID); final int headerLength = getHeaderLength(ref.length); final FileChannel channel = channelFactory.open(file); @@ -132,9 +136,11 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { writeHeader(out, ref); channel.force(true); final Checkpoint checkpoint = - Checkpoint.emptyTranslogCheckpoint(headerLength, fileGeneration, globalCheckpointSupplier.getAsLong()); + Checkpoint.emptyTranslogCheckpoint(headerLength, fileGeneration, globalCheckpointSupplier.getAsLong(), + initialMinTranslogGen); writeCheckpoint(channelFactory, file.getParent(), checkpoint); - return new TranslogWriter(channelFactory, shardId, checkpoint, channel, file, bufferSize, globalCheckpointSupplier); + return new TranslogWriter(channelFactory, shardId, checkpoint, channel, file, bufferSize, globalCheckpointSupplier, + minTranslogGenerationSupplier); } catch (Exception exception) { // if we fail to bake the file-generation into the checkpoint we stick with the file and once we recover and that // file exists we remove it. We only apply this logic to the checkpoint.generation+1 any other file with a higher generation is an error condition @@ -242,7 +248,9 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { * checkpoint has not yet been fsynced */ public boolean syncNeeded() { - return totalOffset != lastSyncedCheckpoint.offset || globalCheckpointSupplier.getAsLong() != lastSyncedCheckpoint.globalCheckpoint; + return totalOffset != lastSyncedCheckpoint.offset || + globalCheckpointSupplier.getAsLong() != lastSyncedCheckpoint.globalCheckpoint || + minTranslogGenerationSupplier.getAsLong() != lastSyncedCheckpoint.minTranslogGeneration; } @Override @@ -330,6 +338,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { final long currentMinSeqNo; final long currentMaxSeqNo; final long currentGlobalCheckpoint; + final long currentMinTranslogGeneration; synchronized (this) { ensureOpen(); try { @@ -339,6 +348,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { currentMinSeqNo = minSeqNo; currentMaxSeqNo = maxSeqNo; currentGlobalCheckpoint = globalCheckpointSupplier.getAsLong(); + currentMinTranslogGeneration = minTranslogGenerationSupplier.getAsLong(); } catch (Exception ex) { try { closeWithTragicEvent(ex); @@ -354,7 +364,8 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { try { channel.force(false); checkpoint = - writeCheckpoint(channelFactory, offsetToSync, opsCounter, currentMinSeqNo, currentMaxSeqNo, currentGlobalCheckpoint, path.getParent(), generation); + writeCheckpoint(channelFactory, offsetToSync, opsCounter, currentMinSeqNo, currentMaxSeqNo, + currentGlobalCheckpoint, currentMinTranslogGeneration, path.getParent(), generation); } catch (Exception ex) { try { closeWithTragicEvent(ex); @@ -398,9 +409,11 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable { long minSeqNo, long maxSeqNo, long globalCheckpoint, + long minTranslogGeneration, Path translogFile, long generation) throws IOException { - final Checkpoint checkpoint = new Checkpoint(syncPosition, numOperations, generation, minSeqNo, maxSeqNo, globalCheckpoint); + final Checkpoint checkpoint = + new Checkpoint(syncPosition, numOperations, generation, minSeqNo, maxSeqNo, globalCheckpoint, minTranslogGeneration); writeCheckpoint(channelFactory, translogFile, checkpoint); return checkpoint; } diff --git a/core/src/main/java/org/elasticsearch/index/translog/TruncateTranslogCommand.java b/core/src/main/java/org/elasticsearch/index/translog/TruncateTranslogCommand.java index ea1f4c13dfd..408691692ca 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/TruncateTranslogCommand.java +++ b/core/src/main/java/org/elasticsearch/index/translog/TruncateTranslogCommand.java @@ -168,8 +168,8 @@ public class TruncateTranslogCommand extends EnvironmentAwareCommand { /** Write a checkpoint file to the given location with the given generation */ public static void writeEmptyCheckpoint(Path filename, int translogLength, long translogGeneration) throws IOException { - Checkpoint emptyCheckpoint = - Checkpoint.emptyTranslogCheckpoint(translogLength, translogGeneration, SequenceNumbersService.UNASSIGNED_SEQ_NO); + Checkpoint emptyCheckpoint = Checkpoint.emptyTranslogCheckpoint(translogLength, translogGeneration, + SequenceNumbersService.UNASSIGNED_SEQ_NO, translogGeneration); Checkpoint.write(FileChannel::open, filename, emptyCheckpoint, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW); // fsync with metadata here to make sure. diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index b2843bf9f8a..e486aede53f 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -55,7 +55,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lease.Releasable; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -63,7 +62,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.Callback; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -491,7 +489,7 @@ public class IndicesService extends AbstractLifecycleComponent @Override public IndexShard createShard(ShardRouting shardRouting, RecoveryState recoveryState, PeerRecoveryTargetService recoveryTargetService, PeerRecoveryTargetService.RecoveryListener recoveryListener, RepositoriesService repositoriesService, - Callback onShardFailure) throws IOException { + Consumer onShardFailure) throws IOException { ensureChangesAllowed(); IndexService indexService = indexService(shardRouting.index()); IndexShard indexShard = indexService.createShard(shardRouting); diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index 4dd146599c9..3f26b722f41 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -81,7 +81,6 @@ import org.elasticsearch.index.analysis.KStemTokenFilterFactory; import org.elasticsearch.index.analysis.KeepTypesFilterFactory; import org.elasticsearch.index.analysis.KeepWordFilterFactory; import org.elasticsearch.index.analysis.KeywordAnalyzerProvider; -import org.elasticsearch.index.analysis.KeywordMarkerTokenFilterFactory; import org.elasticsearch.index.analysis.KeywordTokenizerFactory; import org.elasticsearch.index.analysis.LatvianAnalyzerProvider; import org.elasticsearch.index.analysis.LengthTokenFilterFactory; @@ -101,8 +100,8 @@ import org.elasticsearch.index.analysis.PatternReplaceTokenFilterFactory; import org.elasticsearch.index.analysis.PatternTokenizerFactory; import org.elasticsearch.index.analysis.PersianAnalyzerProvider; import org.elasticsearch.index.analysis.PersianNormalizationFilterFactory; -import org.elasticsearch.index.analysis.PorterStemTokenFilterFactory; import org.elasticsearch.index.analysis.PortugueseAnalyzerProvider; +import org.elasticsearch.index.analysis.PreConfiguredCharFilter; import org.elasticsearch.index.analysis.PreConfiguredTokenFilter; import org.elasticsearch.index.analysis.PreConfiguredTokenizer; import org.elasticsearch.index.analysis.ReverseTokenFilterFactory; @@ -115,7 +114,6 @@ import org.elasticsearch.index.analysis.SerbianNormalizationFilterFactory; import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; import org.elasticsearch.index.analysis.SimpleAnalyzerProvider; import org.elasticsearch.index.analysis.SnowballAnalyzerProvider; -import org.elasticsearch.index.analysis.SnowballTokenFilterFactory; import org.elasticsearch.index.analysis.SoraniAnalyzerProvider; import org.elasticsearch.index.analysis.SoraniNormalizationFilterFactory; import org.elasticsearch.index.analysis.SpanishAnalyzerProvider; @@ -132,7 +130,6 @@ import org.elasticsearch.index.analysis.ThaiAnalyzerProvider; import org.elasticsearch.index.analysis.ThaiTokenizerFactory; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; -import org.elasticsearch.index.analysis.TrimTokenFilterFactory; import org.elasticsearch.index.analysis.TruncateTokenFilterFactory; import org.elasticsearch.index.analysis.TurkishAnalyzerProvider; import org.elasticsearch.index.analysis.UAX29URLEmailTokenizerFactory; @@ -177,11 +174,14 @@ public final class AnalysisModule { NamedRegistry>> analyzers = setupAnalyzers(plugins); NamedRegistry>> normalizers = setupNormalizers(plugins); + Map preConfiguredCharFilters = setupPreConfiguredCharFilters(plugins); Map preConfiguredTokenFilters = setupPreConfiguredTokenFilters(plugins); Map preConfiguredTokenizers = setupPreConfiguredTokenizers(plugins); - analysisRegistry = new AnalysisRegistry(environment, charFilters.getRegistry(), tokenFilters.getRegistry(), tokenizers - .getRegistry(), analyzers.getRegistry(), normalizers.getRegistry(), preConfiguredTokenFilters, preConfiguredTokenizers); + analysisRegistry = new AnalysisRegistry(environment, + charFilters.getRegistry(), tokenFilters.getRegistry(), tokenizers.getRegistry(), + analyzers.getRegistry(), normalizers.getRegistry(), + preConfiguredCharFilters, preConfiguredTokenFilters, preConfiguredTokenizers); } HunspellService getHunspellService() { @@ -212,7 +212,6 @@ public final class AnalysisModule { tokenFilters.register("length", LengthTokenFilterFactory::new); tokenFilters.register("lowercase", LowerCaseTokenFilterFactory::new); tokenFilters.register("uppercase", UpperCaseTokenFilterFactory::new); - tokenFilters.register("porter_stem", PorterStemTokenFilterFactory::new); tokenFilters.register("kstem", KStemTokenFilterFactory::new); tokenFilters.register("standard", StandardTokenFilterFactory::new); tokenFilters.register("nGram", NGramTokenFilterFactory::new); @@ -223,10 +222,8 @@ public final class AnalysisModule { tokenFilters.register("min_hash", MinHashTokenFilterFactory::new); tokenFilters.register("unique", UniqueTokenFilterFactory::new); tokenFilters.register("truncate", requriesAnalysisSettings(TruncateTokenFilterFactory::new)); - tokenFilters.register("trim", TrimTokenFilterFactory::new); tokenFilters.register("limit", LimitTokenCountFilterFactory::new); tokenFilters.register("common_grams", requriesAnalysisSettings(CommonGramsTokenFilterFactory::new)); - tokenFilters.register("snowball", SnowballTokenFilterFactory::new); tokenFilters.register("stemmer", StemmerTokenFilterFactory::new); tokenFilters.register("delimited_payload_filter", DelimitedPayloadTokenFilterFactory::new); tokenFilters.register("elision", ElisionTokenFilterFactory::new); @@ -244,7 +241,6 @@ public final class AnalysisModule { tokenFilters.register("french_stem", FrenchStemTokenFilterFactory::new); tokenFilters.register("german_stem", GermanStemTokenFilterFactory::new); tokenFilters.register("russian_stem", RussianStemTokenFilterFactory::new); - tokenFilters.register("keyword_marker", requriesAnalysisSettings(KeywordMarkerTokenFilterFactory::new)); tokenFilters.register("stemmer_override", requriesAnalysisSettings(StemmerOverrideTokenFilterFactory::new)); tokenFilters.register("arabic_normalization", ArabicNormalizationFilterFactory::new); tokenFilters.register("german_normalization", GermanNormalizationFilterFactory::new); @@ -269,6 +265,19 @@ public final class AnalysisModule { return tokenFilters; } + static Map setupPreConfiguredCharFilters(List plugins) { + NamedRegistry preConfiguredCharFilters = new NamedRegistry<>("pre-configured char_filter"); + + // No char filter are available in lucene-core so none are built in to Elasticsearch core + + for (AnalysisPlugin plugin: plugins) { + for (PreConfiguredCharFilter filter : plugin.getPreConfiguredCharFilters()) { + preConfiguredCharFilters.register(filter.getName(), filter); + } + } + return unmodifiableMap(preConfiguredCharFilters.getRegistry()); + } + static Map setupPreConfiguredTokenFilters(List plugins) { NamedRegistry preConfiguredTokenFilters = new NamedRegistry<>("pre-configured token_filter"); diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltCharFilters.java b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltCharFilters.java deleted file mode 100644 index 063763006a0..00000000000 --- a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltCharFilters.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.indices.analysis; - -import org.apache.lucene.analysis.charfilter.HTMLStripCharFilter; -import org.elasticsearch.Version; -import org.elasticsearch.index.analysis.CharFilterFactory; -import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy; - -import java.io.Reader; -import java.util.Locale; - -public enum PreBuiltCharFilters { - - HTML_STRIP(CachingStrategy.ONE) { - @Override - public Reader create(Reader tokenStream, Version version) { - return new HTMLStripCharFilter(tokenStream); - } - }; - - public abstract Reader create(Reader tokenStream, Version version); - - protected final PreBuiltCacheFactory.PreBuiltCache cache; - - PreBuiltCharFilters(CachingStrategy cachingStrategy) { - cache = PreBuiltCacheFactory.getCache(cachingStrategy); - } - - public synchronized CharFilterFactory getCharFilterFactory(final Version version) { - CharFilterFactory charFilterFactory = cache.get(version); - if (charFilterFactory == null) { - final String finalName = name(); - - charFilterFactory = new CharFilterFactory() { - @Override - public String name() { - return finalName.toLowerCase(Locale.ROOT); - } - - @Override - public Reader create(Reader tokenStream) { - return valueOf(finalName).create(tokenStream, version); - } - }; - cache.put(version, charFilterFactory); - } - - return charFilterFactory; - } - - /** - * Get a pre built CharFilter by its name or fallback to the default one - * @param name CharFilter name - * @param defaultCharFilter default CharFilter if name not found - */ - public static PreBuiltCharFilters getOrDefault(String name, PreBuiltCharFilters defaultCharFilter) { - try { - return valueOf(name.toUpperCase(Locale.ROOT)); - } catch (IllegalArgumentException e) { - return defaultCharFilter; - } - } -} diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenizers.java b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenizers.java index 52e7ff6c9c4..9cc9ed1ea23 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenizers.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenizers.java @@ -38,8 +38,6 @@ import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy; -import java.util.Locale; - public enum PreBuiltTokenizers { STANDARD(CachingStrategy.ONE) { @@ -148,14 +146,8 @@ public enum PreBuiltTokenizers { public synchronized TokenizerFactory getTokenizerFactory(final Version version) { TokenizerFactory tokenizerFactory = cache.get(version); if (tokenizerFactory == null) { - final String finalName = name().toLowerCase(Locale.ROOT); if (getMultiTermComponent(version) != null) { tokenizerFactory = new MultiTermAwareTokenizerFactory() { - @Override - public String name() { - return finalName; - } - @Override public Tokenizer create() { return PreBuiltTokenizers.this.create(version); @@ -168,11 +160,6 @@ public enum PreBuiltTokenizers { }; } else { tokenizerFactory = new TokenizerFactory() { - @Override - public String name() { - return finalName; - } - @Override public Tokenizer create() { return PreBuiltTokenizers.this.create(version); diff --git a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index d230785a23e..9d091429f22 100644 --- a/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/core/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -45,7 +45,6 @@ import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.Callback; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.env.ShardLockObtainFailedException; @@ -54,8 +53,8 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexComponent; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.seqno.GlobalCheckpointTracker; import org.elasticsearch.index.seqno.GlobalCheckpointSyncAction; +import org.elasticsearch.index.seqno.GlobalCheckpointTracker; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardRelocatedException; @@ -694,9 +693,9 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple } } - private class FailedShardHandler implements Callback { + private class FailedShardHandler implements Consumer { @Override - public void handle(final IndexShard.ShardFailure shardFailure) { + public void accept(final IndexShard.ShardFailure shardFailure) { final ShardRouting shardRouting = shardFailure.routing; threadPool.generic().execute(() -> { synchronized (IndicesClusterStateService.this) { @@ -832,7 +831,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent imple */ T createShard(ShardRouting shardRouting, RecoveryState recoveryState, PeerRecoveryTargetService recoveryTargetService, PeerRecoveryTargetService.RecoveryListener recoveryListener, RepositoriesService repositoriesService, - Callback onShardFailure) throws IOException; + Consumer onShardFailure) throws IOException; /** * Returns shard for the specified id if it exists otherwise returns null. diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index 8435fe4ee1e..4823edcc2f1 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -48,7 +48,6 @@ import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotFoundException; -import org.elasticsearch.index.shard.TranslogRecoveryPerformer; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.recovery.RecoveriesCollection.RecoveryRef; @@ -423,22 +422,10 @@ public class PeerRecoveryTargetService extends AbstractComponent implements Inde try { recoveryTarget.indexTranslogOperations(request.operations(), request.totalTranslogOps()); channel.sendResponse(new RecoveryTranslogOperationsResponse(recoveryTarget.indexShard().getLocalCheckpoint())); - } catch (TranslogRecoveryPerformer.BatchOperationException exception) { - MapperException mapperException = (MapperException) ExceptionsHelper.unwrap(exception, MapperException.class); - if (mapperException == null) { - throw exception; - } + } catch (MapperException exception) { // in very rare cases a translog replay from primary is processed before a mapping update on this node // which causes local mapping changes since the mapping (clusterstate) might not have arrived on this node. - // we want to wait until these mappings are processed but also need to do some maintenance and roll back the - // number of processed (completed) operations in this batch to ensure accounting is correct. - logger.trace( - (Supplier) () -> new ParameterizedMessage( - "delaying recovery due to missing mapping changes (rolling back stats for [{}] ops)", - exception.completedOperations()), - exception); - final RecoveryState.Translog translog = recoveryTarget.state().getTranslog(); - translog.decrementRecoveredOperations(exception.completedOperations()); // do the maintainance and rollback competed ops + logger.debug("delaying recovery due to missing mapping changes", exception); // we do not need to use a timeout here since the entire recovery mechanism has an inactivity protection (it will be // canceled) observer.waitForNextChange(new ClusterStateObserver.Listener() { diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java index 4fb2e398e52..6b81d34ab5f 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveriesCollection.java @@ -25,8 +25,6 @@ import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.Callback; -import org.elasticsearch.common.util.CancellableThreads; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.index.shard.IndexShard; @@ -39,7 +37,7 @@ import java.util.Iterator; import java.util.List; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Function; +import java.util.function.LongConsumer; /** * This class holds a collection of all on going recoveries on the current node (i.e., the node is the target node @@ -54,9 +52,9 @@ public class RecoveriesCollection { private final Logger logger; private final ThreadPool threadPool; - private final Callback ensureClusterStateVersionCallback; + private final LongConsumer ensureClusterStateVersionCallback; - public RecoveriesCollection(Logger logger, ThreadPool threadPool, Callback ensureClusterStateVersionCallback) { + public RecoveriesCollection(Logger logger, ThreadPool threadPool, LongConsumer ensureClusterStateVersionCallback) { this.logger = logger; this.threadPool = threadPool; this.ensureClusterStateVersionCallback = ensureClusterStateVersionCallback; diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 5c7787999da..8abd3a05d8e 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -510,7 +510,7 @@ public class RecoverySourceHandler { logger.trace("no translog operations to send"); } - final CancellableThreads.Interruptable sendBatch = + final CancellableThreads.IOInterruptable sendBatch = () -> targetLocalCheckpoint.set(recoveryTarget.indexTranslogOperations(operations, expectedTotalOps)); // send operations in batches @@ -536,7 +536,7 @@ public class RecoverySourceHandler { // check if this request is past bytes threshold, and if so, send it off if (size >= chunkSizeInBytes) { - cancellableThreads.execute(sendBatch); + cancellableThreads.executeIO(sendBatch); logger.trace("sent batch of [{}][{}] (total: [{}]) translog operations", ops, new ByteSizeValue(size), expectedTotalOps); ops = 0; size = 0; @@ -546,7 +546,7 @@ public class RecoverySourceHandler { if (!operations.isEmpty() || totalSentOps == 0) { // send the leftover operations or if no operations were sent, request the target to respond with its local checkpoint - cancellableThreads.execute(sendBatch); + cancellableThreads.executeIO(sendBatch); } assert expectedTotalOps == skippedOps + totalSentOps diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java index 18557b5c7b8..6a465f11115 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java @@ -36,13 +36,15 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.util.Callback; import org.elasticsearch.common.util.CancellableThreads; import org.elasticsearch.common.util.concurrent.AbstractRefCounted; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.mapper.MapperException; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardNotRecoveringException; +import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.TranslogRecoveryPerformer; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetaData; import org.elasticsearch.index.translog.Translog; @@ -59,6 +61,8 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; +import java.util.stream.Collectors; +import java.util.function.LongConsumer; /** * Represents a recovery where the current node is the target node of the recovery. To track recoveries in a central place, instances of @@ -79,7 +83,7 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget private final String tempFilePrefix; private final Store store; private final PeerRecoveryTargetService.RecoveryListener listener; - private final Callback ensureClusterStateVersionCallback; + private final LongConsumer ensureClusterStateVersionCallback; private final AtomicBoolean finished = new AtomicBoolean(); @@ -107,7 +111,7 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget public RecoveryTarget(final IndexShard indexShard, final DiscoveryNode sourceNode, final PeerRecoveryTargetService.RecoveryListener listener, - final Callback ensureClusterStateVersionCallback) { + final LongConsumer ensureClusterStateVersionCallback) { super("recovery_status"); this.cancellableThreads = new CancellableThreads(); this.recoveryId = idGenerator.incrementAndGet(); @@ -371,16 +375,34 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget @Override public void ensureClusterStateVersion(long clusterStateVersion) { - ensureClusterStateVersionCallback.handle(clusterStateVersion); + ensureClusterStateVersionCallback.accept(clusterStateVersion); } @Override - public long indexTranslogOperations( - List operations, int totalTranslogOps) throws TranslogRecoveryPerformer.BatchOperationException { + public long indexTranslogOperations(List operations, int totalTranslogOps) throws MapperException, IOException { final RecoveryState.Translog translog = state().getTranslog(); translog.totalOperations(totalTranslogOps); assert indexShard().recoveryState() == state(); - indexShard().performBatchRecovery(operations); + if (indexShard().state() != IndexShardState.RECOVERING) { + throw new IndexShardNotRecoveringException(shardId, indexShard().state()); + } + // first convert all translog operations to engine operations to check for mapping updates + List engineOps = operations.stream().map( + op -> { + Engine.Operation engineOp = indexShard().convertToEngineOp(op, Engine.Operation.Origin.PEER_RECOVERY); + if (engineOp instanceof Engine.Index && ((Engine.Index) engineOp).parsedDoc().dynamicMappingsUpdate() != null) { + throw new MapperException("mapping updates are not allowed (type: [" + engineOp.type() + "], id: [" + + ((Engine.Index) engineOp).id() + "])"); + } + return engineOp; + } + ).collect(Collectors.toList()); + // actually apply engine operations + for (Engine.Operation engineOp : engineOps) { + indexShard().applyOperation(engineOp); + translog.incrementRecoveredOperations(); + } + indexShard().sync(); return indexShard().getLocalCheckpoint(); } @@ -476,5 +498,4 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget Path translogLocation() { return indexShard().shardPath().resolveTranslog(); } - } diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java index 38f412fed73..42cf1bc1ce1 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java @@ -56,7 +56,7 @@ public interface RecoveryTargetHandler { * * @return the local checkpoint on the target shard */ - long indexTranslogOperations(List operations, int totalTranslogOps); + long indexTranslogOperations(List operations, int totalTranslogOps) throws IOException; /** * Notifies the target of the files it is going to receive diff --git a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index 84d3354ba4e..404a19b0ab3 100644 --- a/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/core/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -159,11 +159,6 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction, Streamable { private ShardId shardId; Store.MetadataSnapshot metadataSnapshot; diff --git a/core/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java b/core/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java index 08669188a9f..25772d2d9a4 100644 --- a/core/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java +++ b/core/src/main/java/org/elasticsearch/ingest/ConfigurationUtils.java @@ -22,6 +22,10 @@ package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.script.TemplateScript; import java.util.ArrayList; import java.util.Arrays; @@ -29,6 +33,8 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import static org.elasticsearch.script.Script.DEFAULT_TEMPLATE_LANG; + public final class ConfigurationUtils { public static final String TAG_KEY = "tag"; @@ -265,10 +271,24 @@ public final class ConfigurationUtils { return processors; } - public static TemplateService.Template compileTemplate(String processorType, String processorTag, String propertyName, - String propertyValue, TemplateService templateService) { + public static TemplateScript.Factory compileTemplate(String processorType, String processorTag, String propertyName, + String propertyValue, ScriptService scriptService) { try { - return templateService.compile(propertyValue); + // This check is here because the DEFAULT_TEMPLATE_LANG(mustache) is not + // installed for use by REST tests. `propertyValue` will not be + // modified if templating is not available so a script that simply returns an unmodified `propertyValue` + // is returned. + if (scriptService.isLangSupported(DEFAULT_TEMPLATE_LANG)) { + Script script = new Script(ScriptType.INLINE, DEFAULT_TEMPLATE_LANG, propertyValue, Collections.emptyMap()); + return scriptService.compile(script, TemplateScript.CONTEXT); + } else { + return (params) -> new TemplateScript(params) { + @Override + public String execute() { + return propertyValue; + } + }; + } } catch (Exception e) { throw ConfigurationUtils.newConfigurationException(processorType, processorTag, propertyName, e); } diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 05b92b57723..2ebb919d51b 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -27,6 +27,8 @@ import org.elasticsearch.index.mapper.ParentFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TypeFieldMapper; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.TemplateScript; import java.time.ZoneOffset; import java.time.ZonedDateTime; @@ -144,7 +146,7 @@ public final class IngestDocument { * @throws IllegalArgumentException if the pathTemplate is null, empty, invalid, if the field doesn't exist, * or if the field that is found at the provided path is not of the expected type. */ - public T getFieldValue(TemplateService.Template pathTemplate, Class clazz) { + public T getFieldValue(TemplateScript.Factory pathTemplate, Class clazz) { return getFieldValue(renderTemplate(pathTemplate), clazz); } @@ -191,7 +193,7 @@ public final class IngestDocument { * @return true if the document contains a value for the field, false otherwise * @throws IllegalArgumentException if the path is null, empty or invalid */ - public boolean hasField(TemplateService.Template fieldPathTemplate) { + public boolean hasField(TemplateScript.Factory fieldPathTemplate) { return hasField(renderTemplate(fieldPathTemplate)); } @@ -280,7 +282,7 @@ public final class IngestDocument { * @param fieldPathTemplate Resolves to the path with dot-notation within the document * @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist. */ - public void removeField(TemplateService.Template fieldPathTemplate) { + public void removeField(TemplateScript.Factory fieldPathTemplate) { removeField(renderTemplate(fieldPathTemplate)); } @@ -391,9 +393,9 @@ public final class IngestDocument { * @param valueSource The value source that will produce the value or values to append to the existing ones * @throws IllegalArgumentException if the path is null, empty or invalid. */ - public void appendFieldValue(TemplateService.Template fieldPathTemplate, ValueSource valueSource) { + public void appendFieldValue(TemplateScript.Factory fieldPathTemplate, ValueSource valueSource) { Map model = createTemplateModel(); - appendFieldValue(fieldPathTemplate.execute(model), valueSource.copyAndResolve(model)); + appendFieldValue(fieldPathTemplate.newInstance(model).execute(), valueSource.copyAndResolve(model)); } /** @@ -419,9 +421,9 @@ public final class IngestDocument { * @throws IllegalArgumentException if the path is null, empty, invalid or if the value cannot be set to the * item identified by the provided path. */ - public void setFieldValue(TemplateService.Template fieldPathTemplate, ValueSource valueSource) { + public void setFieldValue(TemplateScript.Factory fieldPathTemplate, ValueSource valueSource) { Map model = createTemplateModel(); - setFieldValue(fieldPathTemplate.execute(model), valueSource.copyAndResolve(model), false); + setFieldValue(fieldPathTemplate.newInstance(model).execute(), valueSource.copyAndResolve(model), false); } private void setFieldValue(String path, Object value, boolean append) { @@ -549,8 +551,8 @@ public final class IngestDocument { clazz.getName() + "]"); } - public String renderTemplate(TemplateService.Template template) { - return template.execute(createTemplateModel()); + public String renderTemplate(TemplateScript.Factory template) { + return template.newInstance(createTemplateModel()).execute(); } private Map createTemplateModel() { diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestService.java b/core/src/main/java/org/elasticsearch/ingest/IngestService.java index 1455e37588a..d869d892124 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -49,9 +49,7 @@ public class IngestService { public IngestService(ClusterSettings clusterSettings, Settings settings, ThreadPool threadPool, Environment env, ScriptService scriptService, AnalysisRegistry analysisRegistry, List ingestPlugins) { - - final TemplateService templateService = new InternalTemplateService(scriptService); - Processor.Parameters parameters = new Processor.Parameters(env, scriptService, templateService, + Processor.Parameters parameters = new Processor.Parameters(env, scriptService, analysisRegistry, threadPool.getThreadContext()); Map processorFactories = new HashMap<>(); for (IngestPlugin ingestPlugin : ingestPlugins) { diff --git a/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java b/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java deleted file mode 100644 index fa5444102a8..00000000000 --- a/core/src/main/java/org/elasticsearch/ingest/InternalTemplateService.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest; - -import java.util.Collections; -import java.util.Map; - -import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.script.ExecutableScript; -import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.script.ScriptType; -import org.elasticsearch.template.CompiledTemplate; - -public class InternalTemplateService implements TemplateService { - - private final ScriptService scriptService; - - InternalTemplateService(ScriptService scriptService) { - this.scriptService = scriptService; - } - - @Override - public Template compile(String template) { - int mustacheStart = template.indexOf("{{"); - int mustacheEnd = template.indexOf("}}"); - if (mustacheStart != -1 && mustacheEnd != -1 && mustacheStart < mustacheEnd) { - Script script = new Script(ScriptType.INLINE, "mustache", template, Collections.emptyMap()); - CompiledTemplate compiledTemplate = scriptService.compileTemplate(script, ExecutableScript.INGEST_CONTEXT); - return new Template() { - @Override - public String execute(Map model) { - return compiledTemplate.run(model); - } - - @Override - public String getKey() { - return template; - } - }; - } else { - return new StringTemplate(template); - } - } - - class StringTemplate implements Template { - - private final String value; - - StringTemplate(String value) { - this.value = value; - } - - @Override - public String execute(Map model) { - return value; - } - - @Override - public String getKey() { - return value; - } - } -} diff --git a/core/src/main/java/org/elasticsearch/ingest/Processor.java b/core/src/main/java/org/elasticsearch/ingest/Processor.java index 228ca5f4930..39d74fb09a9 100644 --- a/core/src/main/java/org/elasticsearch/ingest/Processor.java +++ b/core/src/main/java/org/elasticsearch/ingest/Processor.java @@ -84,11 +84,6 @@ public interface Processor { */ public final ScriptService scriptService; - /** - * Provides template support to pipeline settings. - */ - public final TemplateService templateService; - /** * Provide analyzer support */ @@ -100,11 +95,10 @@ public interface Processor { */ public final ThreadContext threadContext; - public Parameters(Environment env, ScriptService scriptService, TemplateService templateService, + public Parameters(Environment env, ScriptService scriptService, AnalysisRegistry analysisRegistry, ThreadContext threadContext) { this.env = env; this.scriptService = scriptService; - this.templateService = templateService; this.threadContext = threadContext; this.analysisRegistry = analysisRegistry; } diff --git a/core/src/main/java/org/elasticsearch/ingest/TemplateService.java b/core/src/main/java/org/elasticsearch/ingest/TemplateService.java deleted file mode 100644 index 2ece5a94304..00000000000 --- a/core/src/main/java/org/elasticsearch/ingest/TemplateService.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.ingest; - -import java.util.Map; - -/** - * Abstraction for the ingest template engine used to decouple {@link IngestDocument} from {@link org.elasticsearch.script.ScriptService}. - * Allows to compile a template into an ingest {@link Template} object. - * A compiled template can be executed by calling its {@link Template#execute(Map)} method. - */ -public interface TemplateService { - - Template compile(String template); - - interface Template { - - String execute(Map model); - - String getKey(); - } -} diff --git a/core/src/main/java/org/elasticsearch/ingest/ValueSource.java b/core/src/main/java/org/elasticsearch/ingest/ValueSource.java index fa483a5fbee..4e2787c0235 100644 --- a/core/src/main/java/org/elasticsearch/ingest/ValueSource.java +++ b/core/src/main/java/org/elasticsearch/ingest/ValueSource.java @@ -19,13 +19,21 @@ package org.elasticsearch.ingest; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.script.TemplateScript; + import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.script.Script.DEFAULT_TEMPLATE_LANG; + /** * Holds a value. If the value is requested a copy is made and optionally template snippets are resolved too. */ @@ -41,13 +49,14 @@ public interface ValueSource { */ Object copyAndResolve(Map model); - static ValueSource wrap(Object value, TemplateService templateService) { + static ValueSource wrap(Object value, ScriptService scriptService) { + if (value instanceof Map) { @SuppressWarnings("unchecked") Map mapValue = (Map) value; Map valueTypeMap = new HashMap<>(mapValue.size()); for (Map.Entry entry : mapValue.entrySet()) { - valueTypeMap.put(wrap(entry.getKey(), templateService), wrap(entry.getValue(), templateService)); + valueTypeMap.put(wrap(entry.getKey(), scriptService), wrap(entry.getValue(), scriptService)); } return new MapValue(valueTypeMap); } else if (value instanceof List) { @@ -55,7 +64,7 @@ public interface ValueSource { List listValue = (List) value; List valueSourceList = new ArrayList<>(listValue.size()); for (Object item : listValue) { - valueSourceList.add(wrap(item, templateService)); + valueSourceList.add(wrap(item, scriptService)); } return new ListValue(valueSourceList); } else if (value == null || value instanceof Number || value instanceof Boolean) { @@ -63,7 +72,15 @@ public interface ValueSource { } else if (value instanceof byte[]) { return new ByteValue((byte[]) value); } else if (value instanceof String) { - return new TemplatedValue(templateService.compile((String) value)); + // This check is here because the DEFAULT_TEMPLATE_LANG(mustache) is not + // installed for use by REST tests. `value` will not be + // modified if templating is not available + if (scriptService.isLangSupported(DEFAULT_TEMPLATE_LANG)) { + Script script = new Script(ScriptType.INLINE, DEFAULT_TEMPLATE_LANG, (String) value, Collections.emptyMap()); + return new TemplatedValue(scriptService.compile(script, TemplateScript.CONTEXT)); + } else { + return new ObjectValue(value); + } } else { throw new IllegalArgumentException("unexpected value type [" + value.getClass() + "]"); } @@ -194,15 +211,15 @@ public interface ValueSource { final class TemplatedValue implements ValueSource { - private final TemplateService.Template template; + private final TemplateScript.Factory template; - TemplatedValue(TemplateService.Template template) { + TemplatedValue(TemplateScript.Factory template) { this.template = template; } @Override public Object copyAndResolve(Map model) { - return template.execute(model); + return template.newInstance(model).execute(); } @Override @@ -211,12 +228,12 @@ public interface ValueSource { if (o == null || getClass() != o.getClass()) return false; TemplatedValue templatedValue = (TemplatedValue) o; - return Objects.equals(template.getKey(), templatedValue.template.getKey()); + return Objects.equals(template, templatedValue.template); } @Override public int hashCode() { - return Objects.hashCode(template.getKey()); + return Objects.hashCode(template); } } diff --git a/core/src/main/java/org/elasticsearch/node/InternalSettingsPreparer.java b/core/src/main/java/org/elasticsearch/node/InternalSettingsPreparer.java index 32ad3e93329..9ee08126420 100644 --- a/core/src/main/java/org/elasticsearch/node/InternalSettingsPreparer.java +++ b/core/src/main/java/org/elasticsearch/node/InternalSettingsPreparer.java @@ -24,10 +24,8 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.function.Function; import org.elasticsearch.cli.Terminal; @@ -38,8 +36,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.env.Environment; -import static org.elasticsearch.common.Strings.cleanPath; - public class InternalSettingsPreparer { private static final String[] ALLOWED_SUFFIXES = {".yml", ".yaml", ".json"}; @@ -111,7 +107,7 @@ public class InternalSettingsPreparer { environment = new Environment(output.build()); // we put back the path.logs so we can use it in the logging configuration file - output.put(Environment.PATH_LOGS_SETTING.getKey(), cleanPath(environment.logsFile().toAbsolutePath().toString())); + output.put(Environment.PATH_LOGS_SETTING.getKey(), environment.logsFile().toAbsolutePath().normalize().toString()); return new Environment(output.build()); } diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index dd50b0526ee..13c829844e1 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -132,6 +132,7 @@ import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportInterceptor; import org.elasticsearch.transport.TransportService; import org.elasticsearch.tribe.TribeService; +import org.elasticsearch.usage.UsageService; import org.elasticsearch.watcher.ResourceWatcherService; import java.io.BufferedWriter; @@ -340,6 +341,7 @@ public class Node implements Closeable { final IngestService ingestService = new IngestService(clusterService.getClusterSettings(), settings, threadPool, this.environment, scriptModule.getScriptService(), analysisModule.getAnalysisRegistry(), pluginsService.filterPlugins(IngestPlugin.class)); final ClusterInfoService clusterInfoService = newClusterInfoService(settings, clusterService, threadPool, client); + final UsageService usageService = new UsageService(settings); ModulesBuilder modules = new ModulesBuilder(); // plugin modules must be added here, before others or we can get crazy injection errors... @@ -360,7 +362,7 @@ public class Node implements Closeable { resourcesToClose.add(circuitBreakerService); ActionModule actionModule = new ActionModule(false, settings, clusterModule.getIndexNameExpressionResolver(), settingsModule.getIndexScopedSettings(), settingsModule.getClusterSettings(), settingsModule.getSettingsFilter(), - threadPool, pluginsService.filterPlugins(ActionPlugin.class), client, circuitBreakerService); + threadPool, pluginsService.filterPlugins(ActionPlugin.class), client, circuitBreakerService, usageService); modules.add(actionModule); modules.add(new GatewayModule()); @@ -455,6 +457,7 @@ public class Node implements Closeable { b.bind(ScriptService.class).toInstance(scriptModule.getScriptService()); b.bind(AnalysisRegistry.class).toInstance(analysisModule.getAnalysisRegistry()); b.bind(IngestService.class).toInstance(ingestService); + b.bind(UsageService.class).toInstance(usageService); b.bind(NamedWriteableRegistry.class).toInstance(namedWriteableRegistry); b.bind(MetaDataUpgrader.class).toInstance(metaDataUpgrader); b.bind(MetaStateService.class).toInstance(metaStateService); diff --git a/core/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java b/core/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java index 99b4117f112..cc04ed875d9 100644 --- a/core/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java +++ b/core/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java @@ -28,8 +28,9 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalyzerProvider; import org.elasticsearch.index.analysis.CharFilterFactory; -import org.elasticsearch.index.analysis.PreConfiguredTokenizer; +import org.elasticsearch.index.analysis.PreConfiguredCharFilter; import org.elasticsearch.index.analysis.PreConfiguredTokenFilter; +import org.elasticsearch.index.analysis.PreConfiguredTokenizer; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; @@ -91,6 +92,13 @@ public interface AnalysisPlugin { return emptyMap(); } + /** + * Override to add additional pre-configured {@link CharFilter}s. + */ + default List getPreConfiguredCharFilters() { + return emptyList(); + } + /** * Override to add additional pre-configured {@link TokenFilter}s. */ diff --git a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java index ae2f330b717..b2cea4c0ad2 100644 --- a/core/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/core/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -52,6 +52,7 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; @@ -60,6 +61,7 @@ import java.util.Objects; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory; @@ -285,6 +287,27 @@ public class PluginsService extends AbstractComponent { return bundles; } + static void checkForFailedPluginRemovals(final Path pluginsDirectory) throws IOException { + /* + * Check for the existence of a marker file that indicates any plugins are in a garbage state from a failed attempt to remove the + * plugin. + */ + try (DirectoryStream stream = Files.newDirectoryStream(pluginsDirectory, ".removing-*")) { + final Iterator iterator = stream.iterator(); + if (iterator.hasNext()) { + final Path removing = iterator.next(); + final String fileName = removing.getFileName().toString(); + final String name = fileName.substring(1 + fileName.indexOf("-")); + final String message = String.format( + Locale.ROOT, + "found file [%s] from a failed attempt to remove the plugin [%s]; execute [elasticsearch-plugin remove %2$s]", + removing, + name); + throw new IllegalStateException(message); + } + } + } + static Set getPluginBundles(Path pluginsDirectory) throws IOException { Logger logger = Loggers.getLogger(PluginsService.class); @@ -295,6 +318,8 @@ public class PluginsService extends AbstractComponent { Set bundles = new LinkedHashSet<>(); + checkForFailedPluginRemovals(pluginsDirectory); + try (DirectoryStream stream = Files.newDirectoryStream(pluginsDirectory)) { for (Path plugin : stream) { logger.trace("--- adding plugin [{}]", plugin.toAbsolutePath()); @@ -305,19 +330,6 @@ public class PluginsService extends AbstractComponent { throw new IllegalStateException("Could not load plugin descriptor for existing plugin [" + plugin.getFileName() + "]. Was the plugin built before 2.0?", e); } - /* - * Check for the existence of a marker file that indicates the plugin is in a garbage state from a failed attempt to remove - * the plugin. - */ - final Path removing = plugin.resolve(".removing-" + info.getName()); - if (Files.exists(removing)) { - final String message = String.format( - Locale.ROOT, - "found file [%s] from a failed attempt to remove the plugin [%s]; execute [elasticsearch-plugin remove %2$s]", - removing, - info.getName()); - throw new IllegalStateException(message); - } Set urls = new LinkedHashSet<>(); try (DirectoryStream jarStream = Files.newDirectoryStream(plugin, "*.jar")) { diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 4c9ebd94de6..29b12666c6c 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -106,6 +106,7 @@ import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.file.DirectoryNotEmptyException; +import java.nio.file.FileAlreadyExistsException; import java.nio.file.NoSuchFileException; import java.util.ArrayList; import java.util.Collection; @@ -463,21 +464,24 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp final int totalShards, final List shardFailures, final long repositoryStateId) { + + SnapshotInfo blobStoreSnapshot = new SnapshotInfo(snapshotId, + indices.stream().map(IndexId::getName).collect(Collectors.toList()), + startTime, failure, System.currentTimeMillis(), totalShards, shardFailures); try { - SnapshotInfo blobStoreSnapshot = new SnapshotInfo(snapshotId, - indices.stream().map(IndexId::getName).collect(Collectors.toList()), - startTime, - failure, - System.currentTimeMillis(), - totalShards, - shardFailures); snapshotFormat.write(blobStoreSnapshot, snapshotsBlobContainer, snapshotId.getUUID()); final RepositoryData repositoryData = getRepositoryData(); writeIndexGen(repositoryData.addSnapshot(snapshotId, blobStoreSnapshot.state(), indices), repositoryStateId); - return blobStoreSnapshot; + } catch (FileAlreadyExistsException ex) { + // if another master was elected and took over finalizing the snapshot, it is possible + // that both nodes try to finalize the snapshot and write to the same blobs, so we just + // log a warning here and carry on + throw new RepositoryException(metadata.name(), "Blob already exists while " + + "finalizing snapshot, assume the snapshot has already been saved", ex); } catch (IOException ex) { throw new RepositoryException(metadata.name(), "failed to update snapshot in repository", ex); } + return blobStoreSnapshot; } @Override diff --git a/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index 3db635b044c..585713b641f 100644 --- a/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/core/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.rest.action.admin.cluster.RestNodesUsageAction; import java.io.IOException; import java.util.ArrayList; @@ -39,6 +40,7 @@ import java.util.Locale; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; +import java.util.concurrent.atomic.LongAdder; import java.util.stream.Collectors; /** @@ -54,10 +56,24 @@ public abstract class BaseRestHandler extends AbstractComponent implements RestH public static final Setting MULTI_ALLOW_EXPLICIT_INDEX = Setting.boolSetting("rest.action.multi.allow_explicit_index", true, Property.NodeScope); + private final LongAdder usageCount = new LongAdder(); + protected BaseRestHandler(Settings settings) { super(settings); } + public final long getUsageCount() { + return usageCount.sum(); + } + + /** + * @return the name of this handler. The name should be human readable and + * should describe the action that will performed when this API is + * called. This name is used in the response to the + * {@link RestNodesUsageAction}. + */ + public abstract String getName(); + @Override public final void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { // prepare the request for execution; has the side effect of touching the request parameters @@ -76,6 +92,7 @@ public abstract class BaseRestHandler extends AbstractComponent implements RestH throw new IllegalArgumentException(unrecognized(request, unconsumedParams, candidateParams, "parameter")); } + usageCount.increment(); // execute the action action.accept(channel); } diff --git a/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java b/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java index 55991b35413..11daaddd147 100644 --- a/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java +++ b/core/src/main/java/org/elasticsearch/rest/BytesRestResponse.java @@ -93,14 +93,9 @@ public class BytesRestResponse extends RestResponse { public BytesRestResponse(RestChannel channel, RestStatus status, Exception e) throws IOException { this.status = status; - if (channel.request().method() == RestRequest.Method.HEAD) { - this.content = BytesArray.EMPTY; - this.contentType = TEXT_CONTENT_TYPE; - } else { - try (XContentBuilder builder = build(channel, status, e)) { - this.content = builder.bytes(); - this.contentType = builder.contentType().mediaType(); - } + try (XContentBuilder builder = build(channel, status, e)) { + this.content = builder.bytes(); + this.contentType = builder.contentType().mediaType(); } if (e instanceof ElasticsearchException) { copyHeaders(((ElasticsearchException) e)); diff --git a/core/src/main/java/org/elasticsearch/rest/RestController.java b/core/src/main/java/org/elasticsearch/rest/RestController.java index a3d8a4b7db5..addc6bec51c 100644 --- a/core/src/main/java/org/elasticsearch/rest/RestController.java +++ b/core/src/main/java/org/elasticsearch/rest/RestController.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.usage.UsageService; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -72,11 +73,13 @@ public class RestController extends AbstractComponent implements HttpServerTrans /** Rest headers that are copied to internal requests made during a rest request. */ private final Set headersToCopy; + private UsageService usageService; public RestController(Settings settings, Set headersToCopy, UnaryOperator handlerWrapper, - NodeClient client, CircuitBreakerService circuitBreakerService) { + NodeClient client, CircuitBreakerService circuitBreakerService, UsageService usageService) { super(settings); this.headersToCopy = headersToCopy; + this.usageService = usageService; if (handlerWrapper == null) { handlerWrapper = h -> h; // passthrough if no wrapper set } @@ -148,6 +151,9 @@ public class RestController extends AbstractComponent implements HttpServerTrans PathTrie handlers = getHandlersForMethod(method); if (handlers != null) { handlers.insert(path, handler); + if (handler instanceof BaseRestHandler) { + usageService.addRestHandler((BaseRestHandler) handler); + } } else { throw new IllegalArgumentException("Can't handle [" + method + "] for path [" + path + "]"); } diff --git a/core/src/main/java/org/elasticsearch/rest/RestRequest.java b/core/src/main/java/org/elasticsearch/rest/RestRequest.java index 509bfa7a3c0..1a74a6c6811 100644 --- a/core/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/core/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -136,6 +136,18 @@ public abstract class RestRequest implements ToXContent.Params { public abstract BytesReference content(); + /** + * @return content of the request body or throw an exception if the body or content type is missing + */ + public final BytesReference requiredContent() { + if (hasContent() == false) { + throw new ElasticsearchParseException("request body is required"); + } else if (xContentType.get() == null) { + throw new IllegalStateException("unknown content type"); + } + return content(); + } + /** * Get the value of the header or {@code null} if not found. This method only retrieves the first header value if multiple values are * sent. Use of {@link #getAllHeaderValues(String)} should be preferred @@ -329,12 +341,7 @@ public abstract class RestRequest implements ToXContent.Params { * {@link #contentOrSourceParamParser()} for requests that support specifying the request body in the {@code source} param. */ public final XContentParser contentParser() throws IOException { - BytesReference content = content(); - if (content.length() == 0) { - throw new ElasticsearchParseException("Body required"); - } else if (xContentType.get() == null) { - throw new IllegalStateException("unknown content type"); - } + BytesReference content = requiredContent(); // will throw exception if body or content type missing return xContentType.get().xContent().createParser(xContentRegistry, content); } @@ -364,11 +371,7 @@ public abstract class RestRequest implements ToXContent.Params { */ public final XContentParser contentOrSourceParamParser() throws IOException { Tuple tuple = contentOrSourceParam(); - BytesReference content = tuple.v2(); - if (content.length() == 0) { - throw new ElasticsearchParseException("Body required"); - } - return tuple.v1().xContent().createParser(xContentRegistry, content); + return tuple.v1().xContent().createParser(xContentRegistry, tuple.v2()); } /** @@ -377,10 +380,10 @@ public abstract class RestRequest implements ToXContent.Params { * back to the user when there isn't request content. */ public final void withContentOrSourceParamParserOrNull(CheckedConsumer withParser) throws IOException { - Tuple tuple = contentOrSourceParam(); - BytesReference content = tuple.v2(); - XContentType xContentType = tuple.v1(); - if (content.length() > 0) { + if (hasContentOrSourceParam()) { + Tuple tuple = contentOrSourceParam(); + BytesReference content = tuple.v2(); + XContentType xContentType = tuple.v1(); try (XContentParser parser = xContentType.xContent().createParser(xContentRegistry, content)) { withParser.accept(parser); } @@ -390,28 +393,26 @@ public abstract class RestRequest implements ToXContent.Params { } /** - * Get the content of the request or the contents of the {@code source} param. Prefer {@link #contentOrSourceParamParser()} or - * {@link #withContentOrSourceParamParserOrNull(CheckedConsumer)} if you need a parser. + * Get the content of the request or the contents of the {@code source} param or throw an exception if both are missing. + * Prefer {@link #contentOrSourceParamParser()} or {@link #withContentOrSourceParamParserOrNull(CheckedConsumer)} if you need a parser. */ public final Tuple contentOrSourceParam() { - if (hasContent()) { - if (xContentType.get() == null) { - throw new IllegalStateException("unknown content type"); - } - return new Tuple<>(xContentType.get(), content()); + if (hasContentOrSourceParam() == false) { + throw new ElasticsearchParseException("request body or source parameter is required"); + } else if (hasContent()) { + return new Tuple<>(xContentType.get(), requiredContent()); } - String source = param("source"); String typeParam = param("source_content_type"); - if (source != null && typeParam != null) { - BytesArray bytes = new BytesArray(source); - final XContentType xContentType = parseContentType(Collections.singletonList(typeParam)); - if (xContentType == null) { - throw new IllegalStateException("Unknown value for source_content_type [" + typeParam + "]"); - } - return new Tuple<>(xContentType, bytes); + if (source == null || typeParam == null) { + throw new IllegalStateException("source and source_content_type parameters are required"); } - return new Tuple<>(XContentType.JSON, BytesArray.EMPTY); + BytesArray bytes = new BytesArray(source); + final XContentType xContentType = parseContentType(Collections.singletonList(typeParam)); + if (xContentType == null) { + throw new IllegalStateException("Unknown value for source_content_type [" + typeParam + "]"); + } + return new Tuple<>(xContentType, bytes); } /** diff --git a/core/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java b/core/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java index e983bdc182a..470f98a1e63 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java @@ -38,7 +38,6 @@ import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.rest.RestStatus.NOT_FOUND; import static org.elasticsearch.rest.RestStatus.OK; public class RestFieldCapabilitiesAction extends BaseRestHandler { @@ -50,6 +49,11 @@ public class RestFieldCapabilitiesAction extends BaseRestHandler { controller.registerHandler(POST, "/{index}/_field_caps", this); } + @Override + public String getName() { + return "field_capabilities_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/rest/action/RestFieldStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/RestFieldStatsAction.java index 34e7b636aeb..e087a5fb5ed 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/RestFieldStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/RestFieldStatsAction.java @@ -60,6 +60,11 @@ public class RestFieldStatsAction extends BaseRestHandler { "run a min/max aggregations on the desired fields."; } + @Override + public String getName() { + return "field_stats_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/rest/action/RestMainAction.java b/core/src/main/java/org/elasticsearch/rest/action/RestMainAction.java index 006b6d71db4..c155b76cb81 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/RestMainAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/RestMainAction.java @@ -44,6 +44,11 @@ public class RestMainAction extends BaseRestHandler { controller.registerHandler(HEAD, "/", this); } + @Override + public String getName() { + return "main_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { return channel -> client.execute(MainAction.INSTANCE, new MainRequest(), new RestBuilderListener(channel) { diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCancelTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCancelTasksAction.java index 28631501b7f..bd94e7351eb 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCancelTasksAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCancelTasksAction.java @@ -46,6 +46,11 @@ public class RestCancelTasksAction extends BaseRestHandler { controller.registerHandler(POST, "/_tasks/{task_id}/_cancel", this); } + @Override + public String getName() { + return "cancel_tasks_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final String[] nodesIds = Strings.splitStringByCommaToArray(request.param("nodes")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterAllocationExplainAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterAllocationExplainAction.java index 8855e65f976..504c8f365d7 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterAllocationExplainAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterAllocationExplainAction.java @@ -46,6 +46,11 @@ public class RestClusterAllocationExplainAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.POST, "/_cluster/allocation/explain", this); } + @Override + public String getName() { + return "cluster_allocation_explain_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { ClusterAllocationExplainRequest req; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java index 5c3be8f4347..f9716d8d1ba 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterGetSettingsAction.java @@ -53,6 +53,11 @@ public class RestClusterGetSettingsAction extends BaseRestHandler { this.settingsFilter = settingsFilter; } + @Override + public String getName() { + return "cluster_get_settings_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { ClusterStateRequest clusterStateRequest = Requests.clusterStateRequest() diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthAction.java index d8dd34d4a28..f3dd274e040 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterHealthAction.java @@ -46,6 +46,11 @@ public class RestClusterHealthAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.GET, "/_cluster/health/{index}", this); } + @Override + public String getName() { + return "cluster_health_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { ClusterHealthRequest clusterHealthRequest = clusterHealthRequest(Strings.splitStringByCommaToArray(request.param("index"))); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java index 08ed23c7fb4..1eafe29cfe7 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java @@ -63,6 +63,11 @@ public class RestClusterRerouteAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.POST, "/_cluster/reroute", this); } + @Override + public String getName() { + return "cluster_reroute_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { ClusterRerouteRequest clusterRerouteRequest = createRequest(request); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterSearchShardsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterSearchShardsAction.java index 90673dccd68..e761e848023 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterSearchShardsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterSearchShardsAction.java @@ -44,6 +44,11 @@ public class RestClusterSearchShardsAction extends BaseRestHandler { controller.registerHandler(POST, "/{index}/_search_shards", this); } + @Override + public String getName() { + return "cluster_search_shards_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String[] indices = Strings.splitStringByCommaToArray(request.param("index")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStateAction.java index c25b612d78e..5e374363717 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStateAction.java @@ -56,6 +56,11 @@ public class RestClusterStateAction extends BaseRestHandler { this.settingsFilter = settingsFilter; } + @Override + public String getName() { + return "cluster_state_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final ClusterStateRequest clusterStateRequest = Requests.clusterStateRequest(); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStatsAction.java index e58cf72ffca..830fc3041f9 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterStatsAction.java @@ -36,6 +36,11 @@ public class RestClusterStatsAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.GET, "/_cluster/stats/nodes/{nodeId}", this); } + @Override + public String getName() { + return "cluster_stats_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { ClusterStatsRequest clusterStatsRequest = new ClusterStatsRequest().nodesIds(request.paramAsStringArray("nodeId", null)); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java index 02b62b8e4bf..88e8ada5597 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterUpdateSettingsAction.java @@ -41,6 +41,11 @@ public class RestClusterUpdateSettingsAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.PUT, "/_cluster/settings", this); } + @Override + public String getName() { + return "cluster_update_settings_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final ClusterUpdateSettingsRequest clusterUpdateSettingsRequest = Requests.clusterUpdateSettingsRequest(); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCreateSnapshotAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCreateSnapshotAction.java index bd0d7e2a9d7..bf2866b5771 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCreateSnapshotAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCreateSnapshotAction.java @@ -43,6 +43,11 @@ public class RestCreateSnapshotAction extends BaseRestHandler { controller.registerHandler(POST, "/_snapshot/{repository}/{snapshot}", this); } + @Override + public String getName() { + return "create_snapshot_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { CreateSnapshotRequest createSnapshotRequest = createSnapshotRequest(request.param("repository"), request.param("snapshot")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java index 2019e04be55..33455365dcc 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteRepositoryAction.java @@ -41,6 +41,11 @@ public class RestDeleteRepositoryAction extends BaseRestHandler { controller.registerHandler(DELETE, "/_snapshot/{repository}", this); } + @Override + public String getName() { + return "delete_repository_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { DeleteRepositoryRequest deleteRepositoryRequest = deleteRepositoryRequest(request.param("repository")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteSnapshotAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteSnapshotAction.java index a11d47278a8..fbd8822a84e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteSnapshotAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteSnapshotAction.java @@ -41,6 +41,11 @@ public class RestDeleteSnapshotAction extends BaseRestHandler { controller.registerHandler(DELETE, "/_snapshot/{repository}/{snapshot}", this); } + @Override + public String getName() { + return "delete_snapshot_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { DeleteSnapshotRequest deleteSnapshotRequest = deleteSnapshotRequest(request.param("repository"), request.param("snapshot")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteStoredScriptAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteStoredScriptAction.java index 102ae6bd571..8aadba3adf4 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteStoredScriptAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestDeleteStoredScriptAction.java @@ -42,6 +42,11 @@ public class RestDeleteStoredScriptAction extends BaseRestHandler { controller.registerHandler(DELETE, "/_scripts/{lang}/{id}", this); } + @Override + public String getName() { + return "delete_stored_script_action"; + } + @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { String id = request.param("id"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java index 1af138400e3..ed6fe2f95f4 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java @@ -56,6 +56,11 @@ public class RestGetRepositoriesAction extends BaseRestHandler { this.settingsFilter = settingsFilter; } + @Override + public String getName() { + return "get_respositories_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final String[] repositories = request.paramAsStringArray("repository", Strings.EMPTY_ARRAY); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetSnapshotsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetSnapshotsAction.java index 7348cb5896c..f42180b5029 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetSnapshotsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetSnapshotsAction.java @@ -42,6 +42,11 @@ public class RestGetSnapshotsAction extends BaseRestHandler { controller.registerHandler(GET, "/_snapshot/{repository}/{snapshot}", this); } + @Override + public String getName() { + return "get_snapshots_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String repository = request.param("repository"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java index 0c3857ffb58..f6299fcac58 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java @@ -53,6 +53,11 @@ public class RestGetStoredScriptAction extends BaseRestHandler { controller.registerHandler(GET, "/_scripts/{lang}/{id}", this); } + @Override + public String getName() { + return "get_stored_scripts_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, NodeClient client) throws IOException { String id; @@ -93,7 +98,7 @@ public class RestGetStoredScriptAction extends BaseRestHandler { if (lang == null) { builder.startObject(StoredScriptSource.SCRIPT_PARSE_FIELD.getPreferredName()); builder.field(StoredScriptSource.LANG_PARSE_FIELD.getPreferredName(), source.getLang()); - builder.field(StoredScriptSource.CODE_PARSE_FIELD.getPreferredName(), source.getCode()); + builder.field(StoredScriptSource.SOURCE_PARSE_FIELD.getPreferredName(), source.getSource()); if (source.getOptions().isEmpty() == false) { builder.field(StoredScriptSource.OPTIONS_PARSE_FIELD.getPreferredName(), source.getOptions()); @@ -101,7 +106,7 @@ public class RestGetStoredScriptAction extends BaseRestHandler { builder.endObject(); } else { - builder.field(StoredScriptSource.SCRIPT_PARSE_FIELD.getPreferredName(), source.getCode()); + builder.field(StoredScriptSource.SCRIPT_PARSE_FIELD.getPreferredName(), source.getSource()); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetTaskAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetTaskAction.java index e013970553f..8ff823ea9e4 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetTaskAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetTaskAction.java @@ -39,6 +39,11 @@ public class RestGetTaskAction extends BaseRestHandler { controller.registerHandler(GET, "/_tasks/{taskId}", this); } + @Override + public String getName() { + return "get_task_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { TaskId taskId = new TaskId(request.param("taskId")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestListTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestListTasksAction.java index 4177386eff9..6ef5d5a2de2 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestListTasksAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestListTasksAction.java @@ -55,6 +55,11 @@ public class RestListTasksAction extends BaseRestHandler { controller.registerHandler(GET, "/_tasks", this); } + @Override + public String getName() { + return "list_tasks_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final ListTasksRequest listTasksRequest = generateListTasksRequest(request); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java index 48ad588688d..be163d0431a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java @@ -51,6 +51,11 @@ public class RestNodesHotThreadsAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.GET, "/_nodes/{nodeId}/hot_threads", this); } + @Override + public String getName() { + return "nodes_hot_threads_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String[] nodesIds = Strings.splitStringByCommaToArray(request.param("nodeId")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesInfoAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesInfoAction.java index dfe3b08697c..256693fc392 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesInfoAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesInfoAction.java @@ -62,6 +62,11 @@ public class RestNodesInfoAction extends BaseRestHandler { this.settingsFilter = settingsFilter; } + @Override + public String getName() { + return "nodes_info_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String[] nodeIds; diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsAction.java index f42a8fcf077..28f8163760f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsAction.java @@ -84,6 +84,11 @@ public class RestNodesStatsAction extends BaseRestHandler { FLAGS = Collections.unmodifiableMap(flags); } + @Override + public String getName() { + return "nodes_stats_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String[] nodesIds = Strings.splitStringByCommaToArray(request.param("nodeId")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesUsageAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesUsageAction.java new file mode 100644 index 00000000000..b22f63ca78d --- /dev/null +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesUsageAction.java @@ -0,0 +1,98 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.admin.cluster; + +import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageRequest; +import org.elasticsearch.action.admin.cluster.node.usage.NodesUsageResponse; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestActions; +import org.elasticsearch.rest.action.RestBuilderListener; + +import java.io.IOException; +import java.util.Locale; +import java.util.Set; + +import static org.elasticsearch.rest.RestRequest.Method.GET; + +public class RestNodesUsageAction extends BaseRestHandler { + + @Inject + public RestNodesUsageAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(GET, "/_nodes/usage", this); + controller.registerHandler(GET, "/_nodes/{nodeId}/usage", this); + + controller.registerHandler(GET, "/_nodes/usage/{metric}", this); + controller.registerHandler(GET, "/_nodes/{nodeId}/usage/{metric}", this); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + String[] nodesIds = Strings.splitStringByCommaToArray(request.param("nodeId")); + Set metrics = Strings.splitStringByCommaToSet(request.param("metric", "_all")); + + NodesUsageRequest nodesUsageRequest = new NodesUsageRequest(nodesIds); + nodesUsageRequest.timeout(request.param("timeout")); + + if (metrics.size() == 1 && metrics.contains("_all")) { + nodesUsageRequest.all(); + } else if (metrics.contains("_all")) { + throw new IllegalArgumentException(String.format(Locale.ROOT, "request [%s] contains _all and individual metrics [%s]", + request.path(), request.param("metric"))); + } else { + nodesUsageRequest.clear(); + nodesUsageRequest.restActions(metrics.contains("rest_actions")); + } + + return channel -> client.admin().cluster().nodesUsage(nodesUsageRequest, new RestBuilderListener(channel) { + + @Override + public RestResponse buildResponse(NodesUsageResponse response, XContentBuilder builder) throws Exception { + builder.startObject(); + RestActions.buildNodesHeader(builder, channel.request(), response); + builder.field("cluster_name", response.getClusterName().value()); + response.toXContent(builder, channel.request()); + builder.endObject(); + + return new BytesRestResponse(RestStatus.OK, builder); + } + }); + } + + @Override + public String getName() { + return "nodes_usage_action"; + } + + @Override + public boolean canTripCircuitBreaker() { + return false; + } +} diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPendingClusterTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPendingClusterTasksAction.java index 29b2b72895b..ab2beda1d03 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPendingClusterTasksAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPendingClusterTasksAction.java @@ -35,6 +35,11 @@ public class RestPendingClusterTasksAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.GET, "/_cluster/pending_tasks", this); } + @Override + public String getName() { + return "pending_cluster_tasks_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { PendingClusterTasksRequest pendingClusterTasksRequest = new PendingClusterTasksRequest(); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutRepositoryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutRepositoryAction.java index afd2fd851ae..b87871e064e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutRepositoryAction.java @@ -44,6 +44,10 @@ public class RestPutRepositoryAction extends BaseRestHandler { controller.registerHandler(POST, "/_snapshot/{repository}", this); } + @Override + public String getName() { + return "put_repository_action"; + } @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutStoredScriptAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutStoredScriptAction.java index 358c3656ced..84c2aa0a9e2 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutStoredScriptAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPutStoredScriptAction.java @@ -46,10 +46,16 @@ public class RestPutStoredScriptAction extends BaseRestHandler { controller.registerHandler(PUT, "/_scripts/{lang}/{id}", this); } + @Override + public String getName() { + return "put_stored_script_action"; + } + @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { String id = request.param("id"); String lang = request.param("lang"); + String context = request.param("context"); // In the case where only {lang} is not null, we make it {id} because of // name ordering issues in the handlers' paths. @@ -58,14 +64,14 @@ public class RestPutStoredScriptAction extends BaseRestHandler { lang = null; } - BytesReference content = request.content(); + BytesReference content = request.requiredContent(); if (lang != null) { deprecationLogger.deprecated( "specifying lang [" + lang + "] as part of the url path is deprecated, use request content instead"); } - PutStoredScriptRequest putRequest = new PutStoredScriptRequest(id, lang, content, request.getXContentType()); + PutStoredScriptRequest putRequest = new PutStoredScriptRequest(id, lang, context, content, request.getXContentType()); return channel -> client.admin().cluster().putStoredScript(putRequest, new AcknowledgedRestListener<>(channel)); } } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestRemoteClusterInfoAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestRemoteClusterInfoAction.java index c15b2553e5d..75baf8cecaa 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestRemoteClusterInfoAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestRemoteClusterInfoAction.java @@ -44,6 +44,11 @@ public final class RestRemoteClusterInfoAction extends BaseRestHandler { controller.registerHandler(GET, "_remote/info", this); } + @Override + public String getName() { + return "remote_cluster_info_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestRestoreSnapshotAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestRestoreSnapshotAction.java index 3948921c12d..2bd077037ec 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestRestoreSnapshotAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestRestoreSnapshotAction.java @@ -41,6 +41,11 @@ public class RestRestoreSnapshotAction extends BaseRestHandler { controller.registerHandler(POST, "/_snapshot/{repository}/{snapshot}/_restore", this); } + @Override + public String getName() { + return "restore_snapshot_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { RestoreSnapshotRequest restoreSnapshotRequest = restoreSnapshotRequest(request.param("repository"), request.param("snapshot")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestSnapshotsStatusAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestSnapshotsStatusAction.java index c5122641c1a..a06a916420e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestSnapshotsStatusAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestSnapshotsStatusAction.java @@ -44,6 +44,11 @@ public class RestSnapshotsStatusAction extends BaseRestHandler { controller.registerHandler(GET, "/_snapshot/_status", this); } + @Override + public String getName() { + return "snapshot_status_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String repository = request.param("repository", "_all"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestVerifyRepositoryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestVerifyRepositoryAction.java index a9fb44eae64..92debec6bf8 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestVerifyRepositoryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestVerifyRepositoryAction.java @@ -38,6 +38,11 @@ public class RestVerifyRepositoryAction extends BaseRestHandler { controller.registerHandler(POST, "/_snapshot/{repository}/_verify", this); } + @Override + public String getName() { + return "verify_repository_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { VerifyRepositoryRequest verifyRepositoryRequest = verifyRepositoryRequest(request.param("repository")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java index 52ba58e4c38..44ff79c4d9f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java @@ -56,6 +56,11 @@ public class RestAnalyzeAction extends BaseRestHandler { controller.registerHandler(POST, "/{index}/_analyze", this); } + @Override + public String getName() { + return "analyze_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java index 1544a01f9f0..6e38b867edc 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestClearIndicesCacheAction.java @@ -52,6 +52,11 @@ public class RestClearIndicesCacheAction extends BaseRestHandler { controller.registerHandler(GET, "/{index}/_cache/clear", this); } + @Override + public String getName() { + return "clear_indices_cache_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { ClearIndicesCacheRequest clearIndicesCacheRequest = new ClearIndicesCacheRequest( diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCloseIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCloseIndexAction.java index 2e0a46747ca..d750dedd4b8 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCloseIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCloseIndexAction.java @@ -38,6 +38,11 @@ public class RestCloseIndexAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.POST, "/{index}/_close", this); } + @Override + public String getName() { + return "close_index_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { CloseIndexRequest closeIndexRequest = new CloseIndexRequest(Strings.splitStringByCommaToArray(request.param("index"))); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java index 8c314efee63..6a741fd3951 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java @@ -38,6 +38,11 @@ public class RestCreateIndexAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.PUT, "/{index}", this); } + @Override + public String getName() { + return "create_index_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { CreateIndexRequest createIndexRequest = new CreateIndexRequest(request.param("index")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexAction.java index 6ca806bcffe..d3f25660261 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexAction.java @@ -38,6 +38,11 @@ public class RestDeleteIndexAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.DELETE, "/{index}", this); } + @Override + public String getName() { + return "delete_index_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(Strings.splitStringByCommaToArray(request.param("index"))); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexTemplateAction.java index e9f315e8aee..c3fad980877 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestDeleteIndexTemplateAction.java @@ -34,6 +34,11 @@ public class RestDeleteIndexTemplateAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.DELETE, "/_template/{name}", this); } + @Override + public String getName() { + return "delete_index_template_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { DeleteIndexTemplateRequest deleteIndexTemplateRequest = new DeleteIndexTemplateRequest(request.param("name")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestFlushAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestFlushAction.java index ff084e023a8..be64b655758 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestFlushAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestFlushAction.java @@ -50,6 +50,11 @@ public class RestFlushAction extends BaseRestHandler { controller.registerHandler(GET, "/{index}/_flush", this); } + @Override + public String getName() { + return "flush_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { FlushRequest flushRequest = new FlushRequest(Strings.splitStringByCommaToArray(request.param("index"))); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java index 32820edda0d..79beb66d40b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java @@ -46,6 +46,11 @@ public class RestForceMergeAction extends BaseRestHandler { controller.registerHandler(POST, "/{index}/_forcemerge", this); } + @Override + public String getName() { + return "force_merge_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { ForceMergeRequest mergeRequest = new ForceMergeRequest(Strings.splitStringByCommaToArray(request.param("index"))); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java index e7b53961b98..51ff743d2d1 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.rest.action.admin.indices; +import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; @@ -26,7 +27,10 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; @@ -38,14 +42,17 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestBuilderListener; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; import java.util.List; import java.util.Locale; +import java.util.Set; +import java.util.SortedSet; import java.util.stream.Collectors; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.HEAD; -import static org.elasticsearch.rest.RestStatus.OK; /** * The REST handler for get alias and head alias APIs. @@ -56,12 +63,20 @@ public class RestGetAliasesAction extends BaseRestHandler { super(settings); controller.registerHandler(GET, "/_alias/{name}", this); controller.registerHandler(HEAD, "/_alias/{name}", this); + controller.registerHandler(GET, "/{index}/_alias", this); + controller.registerHandler(HEAD, "/{index}/_alias", this); controller.registerHandler(GET, "/{index}/_alias/{name}", this); controller.registerHandler(HEAD, "/{index}/_alias/{name}", this); } + @Override + public String getName() { + return "get_aliases_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + final boolean namesProvided = request.hasParam("name"); final String[] aliases = request.paramAsStringArrayOrEmptyIfAll("name"); final GetAliasesRequest getAliasesRequest = new GetAliasesRequest(aliases); final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); @@ -72,24 +87,56 @@ public class RestGetAliasesAction extends BaseRestHandler { return channel -> client.admin().indices().getAliases(getAliasesRequest, new RestBuilderListener(channel) { @Override public RestResponse buildResponse(GetAliasesResponse response, XContentBuilder builder) throws Exception { - if (response.getAliases().isEmpty()) { - // empty body if indices were specified but no matching aliases exist - if (indices.length > 0) { - return new BytesRestResponse(OK, builder.startObject().endObject()); - } else { - final String message = String.format(Locale.ROOT, "alias [%s] missing", toNamesString(getAliasesRequest.aliases())); - builder.startObject(); - { - builder.field("error", message); - builder.field("status", RestStatus.NOT_FOUND.getStatus()); + final ImmutableOpenMap> aliasMap = response.getAliases(); + + final Set aliasNames = new HashSet<>(); + final Set indicesToDisplay = new HashSet<>(); + for (final ObjectObjectCursor> cursor : aliasMap) { + for (final AliasMetaData aliasMetaData : cursor.value) { + aliasNames.add(aliasMetaData.alias()); + if (namesProvided) { + indicesToDisplay.add(cursor.key); } - builder.endObject(); - return new BytesRestResponse(RestStatus.NOT_FOUND, builder); } - } else { - builder.startObject(); - { - for (final ObjectObjectCursor> entry : response.getAliases()) { + } + + // first remove requested aliases that are exact matches + final SortedSet difference = Sets.sortedDifference(Arrays.stream(aliases).collect(Collectors.toSet()), aliasNames); + + // now remove requested aliases that contain wildcards that are simple matches + final List matches = new ArrayList<>(); + outer: + for (final String pattern : difference) { + if (pattern.contains("*")) { + for (final String aliasName : aliasNames) { + if (Regex.simpleMatch(pattern, aliasName)) { + matches.add(pattern); + continue outer; + } + } + } + } + difference.removeAll(matches); + + final RestStatus status; + builder.startObject(); + { + if (difference.isEmpty()) { + status = RestStatus.OK; + } else { + status = RestStatus.NOT_FOUND; + final String message; + if (difference.size() == 1) { + message = String.format(Locale.ROOT, "alias [%s] missing", toNamesString(difference.iterator().next())); + } else { + message = String.format(Locale.ROOT, "aliases [%s] missing", toNamesString(difference.toArray(new String[0]))); + } + builder.field("error", message); + builder.field("status", status.getStatus()); + } + + for (final ObjectObjectCursor> entry : response.getAliases()) { + if (namesProvided == false || (namesProvided && indicesToDisplay.contains(entry.key))) { builder.startObject(entry.key); { builder.startObject("aliases"); @@ -103,10 +150,11 @@ public class RestGetAliasesAction extends BaseRestHandler { builder.endObject(); } } - builder.endObject(); - return new BytesRestResponse(OK, builder); } + builder.endObject(); + return new BytesRestResponse(status, builder); } + }); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAllAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAllAliasesAction.java new file mode 100644 index 00000000000..0d10ac5800e --- /dev/null +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAllAliasesAction.java @@ -0,0 +1,107 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.admin.indices; + +import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.xcontent.ToXContent.Params; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.action.RestBuilderListener; + +import java.io.IOException; +import java.util.List; +import java.util.Set; + +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestRequest.Method.HEAD; +import static org.elasticsearch.rest.RestStatus.OK; + +/** + * The REST handler for retrieving all aliases + */ +public class RestGetAllAliasesAction extends BaseRestHandler { + + public RestGetAllAliasesAction(final Settings settings, final RestController controller) { + super(settings); + controller.registerHandler(GET, "/_alias", this); + controller.registerHandler(GET, "/_aliases", this); + } + + @Override + public String getName() { + return "get_all_aliases_action"; + } + + @Override + public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + final GetIndexRequest getIndexRequest = new GetIndexRequest(); + getIndexRequest.indices(Strings.EMPTY_ARRAY); + getIndexRequest.features(Feature.ALIASES); + getIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, getIndexRequest.indicesOptions())); + getIndexRequest.local(request.paramAsBoolean("local", getIndexRequest.local())); + getIndexRequest.humanReadable(request.paramAsBoolean("human", false)); + return channel -> client.admin().indices().getIndex(getIndexRequest, new RestBuilderListener(channel) { + + @Override + public RestResponse buildResponse(final GetIndexResponse response, final XContentBuilder builder) throws Exception { + builder.startObject(); + { + for (final String index : response.indices()) { + builder.startObject(index); + { + writeAliases(response.aliases().get(index), builder, request); + } + builder.endObject(); + } + } + builder.endObject(); + + return new BytesRestResponse(OK, builder); + } + + private void writeAliases(final List aliases, final XContentBuilder builder, + final Params params) throws IOException { + builder.startObject("aliases"); + { + if (aliases != null) { + for (final AliasMetaData alias : aliases) { + AliasMetaData.Builder.toXContent(alias, builder, params); + } + } + } + builder.endObject(); + } + }); + } + +} diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAllMappingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAllMappingsAction.java new file mode 100644 index 00000000000..9892717cd77 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAllMappingsAction.java @@ -0,0 +1,109 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.admin.indices; + +import com.carrotsearch.hppc.cursors.ObjectObjectCursor; +import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.xcontent.ToXContent.Params; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.action.RestBuilderListener; + +import java.io.IOException; +import java.util.List; +import java.util.Set; + +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestRequest.Method.HEAD; +import static org.elasticsearch.rest.RestStatus.OK; + +/** + * The REST handler for retrieving all mappings + */ +public class RestGetAllMappingsAction extends BaseRestHandler { + + public RestGetAllMappingsAction(final Settings settings, final RestController controller) { + super(settings); + controller.registerHandler(GET, "/_mapping", this); + controller.registerHandler(GET, "/_mappings", this); + } + + @Override + public String getName() { + return "get_all_mappings_action"; + } + + @Override + public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + final GetIndexRequest getIndexRequest = new GetIndexRequest(); + getIndexRequest.indices(Strings.EMPTY_ARRAY); + getIndexRequest.features(Feature.MAPPINGS); + getIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, getIndexRequest.indicesOptions())); + getIndexRequest.local(request.paramAsBoolean("local", getIndexRequest.local())); + getIndexRequest.humanReadable(request.paramAsBoolean("human", false)); + return channel -> client.admin().indices().getIndex(getIndexRequest, new RestBuilderListener(channel) { + + @Override + public RestResponse buildResponse(final GetIndexResponse response, final XContentBuilder builder) throws Exception { + builder.startObject(); + { + for (final String index : response.indices()) { + builder.startObject(index); + { + writeMappings(response.mappings().get(index), builder); + } + builder.endObject(); + } + } + builder.endObject(); + + return new BytesRestResponse(OK, builder); + } + + private void writeMappings(final ImmutableOpenMap mappings, + final XContentBuilder builder) throws IOException { + builder.startObject("mappings"); + { + for (final ObjectObjectCursor typeEntry : mappings) { + builder.field(typeEntry.key); + builder.map(typeEntry.value.sourceAsMap()); + } + } + builder.endObject(); + } + }); + } + +} diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAllSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAllSettingsAction.java new file mode 100644 index 00000000000..f51cee37ad3 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAllSettingsAction.java @@ -0,0 +1,121 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.admin.indices; + +import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.xcontent.ToXContent.Params; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.action.RestBuilderListener; + +import java.io.IOException; +import java.util.List; +import java.util.Set; + +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestRequest.Method.HEAD; +import static org.elasticsearch.rest.RestStatus.OK; + +/** + * The REST handler for retrieving all settings + */ +public class RestGetAllSettingsAction extends BaseRestHandler { + + private final IndexScopedSettings indexScopedSettings; + private final SettingsFilter settingsFilter; + + public RestGetAllSettingsAction(final Settings settings, final RestController controller, + final IndexScopedSettings indexScopedSettings, final SettingsFilter settingsFilter) { + super(settings); + this.indexScopedSettings = indexScopedSettings; + controller.registerHandler(GET, "/_settings", this); + this.settingsFilter = settingsFilter; + } + + @Override + public String getName() { + return "get_all_settings_action"; + } + + @Override + public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + final GetIndexRequest getIndexRequest = new GetIndexRequest(); + getIndexRequest.indices(Strings.EMPTY_ARRAY); + getIndexRequest.features(Feature.SETTINGS); + getIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, getIndexRequest.indicesOptions())); + getIndexRequest.local(request.paramAsBoolean("local", getIndexRequest.local())); + getIndexRequest.humanReadable(request.paramAsBoolean("human", false)); + // This is required so the "flat_settings" parameter counts as consumed + request.paramAsBoolean("flat_settings", false); + final boolean defaults = request.paramAsBoolean("include_defaults", false); + return channel -> client.admin().indices().getIndex(getIndexRequest, new RestBuilderListener(channel) { + + @Override + public RestResponse buildResponse(final GetIndexResponse response, final XContentBuilder builder) throws Exception { + builder.startObject(); + { + for (final String index : response.indices()) { + builder.startObject(index); + { + writeSettings(response.settings().get(index), builder, request, defaults); + } + builder.endObject(); + } + } + builder.endObject(); + + return new BytesRestResponse(OK, builder); + } + + + private void writeSettings(final Settings settings, final XContentBuilder builder, + final Params params, final boolean defaults) throws IOException { + builder.startObject("settings"); + { + settings.toXContent(builder, params); + } + builder.endObject(); + if (defaults) { + builder.startObject("defaults"); + { + settingsFilter + .filter(indexScopedSettings.diff(settings, RestGetAllSettingsAction.this.settings)) + .toXContent(builder, request); + } + builder.endObject(); + } + } + }); + } + +} diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java index fe6eb9a552f..ea68d9cc3c0 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java @@ -52,6 +52,11 @@ public class RestGetFieldMappingAction extends BaseRestHandler { controller.registerHandler(GET, "/{index}/_mapping/{type}/field/{fields}", this); } + @Override + public String getName() { + return "get_field_mapping_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java index 1814894636f..38c1cb76611 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndexTemplateAction.java @@ -50,6 +50,11 @@ public class RestGetIndexTemplateAction extends BaseRestHandler { controller.registerHandler(HEAD, "/_template/{name}", this); } + @Override + public String getName() { + return "get_index_template_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final String[] names = Strings.splitStringByCommaToArray(request.param("name")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java index fc6682347fd..e9552d47526 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java @@ -67,25 +67,19 @@ public class RestGetIndicesAction extends BaseRestHandler { this.indexScopedSettings = indexScopedSettings; controller.registerHandler(GET, "/{index}", this); controller.registerHandler(HEAD, "/{index}", this); - controller.registerHandler(GET, "/{index}/{type}", this); this.settingsFilter = settingsFilter; } + @Override + public String getName() { + return "get_indices_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String[] indices = Strings.splitStringByCommaToArray(request.param("index")); - String[] featureParams = request.paramAsStringArray("type", null); - // Work out if the indices is a list of features - if (featureParams == null && indices.length > 0 && indices[0] != null && indices[0].startsWith("_") && !"_all".equals(indices[0])) { - featureParams = indices; - indices = new String[]{"_all"}; - } final GetIndexRequest getIndexRequest = new GetIndexRequest(); getIndexRequest.indices(indices); - if (featureParams != null) { - Feature[] features = Feature.convertToFeatures(featureParams); - getIndexRequest.features(features); - } getIndexRequest.indicesOptions(IndicesOptions.fromRequest(request, getIndexRequest.indicesOptions())); getIndexRequest.local(request.paramAsBoolean("local", getIndexRequest.local())); getIndexRequest.humanReadable(request.paramAsBoolean("human", false)); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java index 66e23fd5f47..99b8215025e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.rest.action.admin.indices; +import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; @@ -28,7 +29,9 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.TypeMissingException; @@ -37,71 +40,126 @@ import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestBuilderListener; import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Set; +import java.util.SortedSet; +import java.util.stream.Collectors; import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestRequest.Method.HEAD; import static org.elasticsearch.rest.RestStatus.OK; public class RestGetMappingAction extends BaseRestHandler { - public RestGetMappingAction(Settings settings, RestController controller) { + + public RestGetMappingAction(final Settings settings, final RestController controller) { super(settings); controller.registerHandler(GET, "/{index}/{type}/_mapping", this); + controller.registerHandler(GET, "/{index}/_mappings", this); + controller.registerHandler(GET, "/{index}/_mapping", this); controller.registerHandler(GET, "/{index}/_mappings/{type}", this); controller.registerHandler(GET, "/{index}/_mapping/{type}", this); + controller.registerHandler(HEAD, "/{index}/_mapping/{type}", this); controller.registerHandler(GET, "/_mapping/{type}", this); } + @Override + public String getName() { + return "get_mapping_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); final String[] types = request.paramAsStringArrayOrEmptyIfAll("type"); - GetMappingsRequest getMappingsRequest = new GetMappingsRequest(); + final GetMappingsRequest getMappingsRequest = new GetMappingsRequest(); getMappingsRequest.indices(indices).types(types); getMappingsRequest.indicesOptions(IndicesOptions.fromRequest(request, getMappingsRequest.indicesOptions())); getMappingsRequest.local(request.paramAsBoolean("local", getMappingsRequest.local())); return channel -> client.admin().indices().getMappings(getMappingsRequest, new RestBuilderListener(channel) { @Override - public RestResponse buildResponse(GetMappingsResponse response, XContentBuilder builder) throws Exception { - - ImmutableOpenMap> mappingsByIndex = response.getMappings(); - if (mappingsByIndex.isEmpty()) { - if (indices.length != 0 && types.length != 0) { - return new BytesRestResponse(OK, builder.startObject().endObject()); - } else if (indices.length != 0) { + public RestResponse buildResponse(final GetMappingsResponse response, final XContentBuilder builder) throws Exception { + final ImmutableOpenMap> mappingsByIndex = response.getMappings(); + if (mappingsByIndex.isEmpty() && (indices.length != 0 || types.length != 0)) { + if (indices.length != 0 && types.length == 0) { builder.close(); - return new BytesRestResponse(channel, new IndexNotFoundException(indices[0])); - } else if (types.length != 0) { - builder.close(); - return new BytesRestResponse(channel, new TypeMissingException("_all", types[0])); + return new BytesRestResponse(channel, new IndexNotFoundException(String.join(",", indices))); } else { - return new BytesRestResponse(OK, builder.startObject().endObject()); + builder.close(); + return new BytesRestResponse(channel, new TypeMissingException("_all", String.join(",", types))); } } + final Set typeNames = new HashSet<>(); + for (final ObjectCursor> cursor : mappingsByIndex.values()) { + for (final ObjectCursor inner : cursor.value.keys()) { + typeNames.add(inner.value); + } + } + + final SortedSet difference = Sets.sortedDifference(Arrays.stream(types).collect(Collectors.toSet()), typeNames); + + // now remove requested aliases that contain wildcards that are simple matches + final List matches = new ArrayList<>(); + outer: + for (final String pattern : difference) { + if (pattern.contains("*")) { + for (final String typeName : typeNames) { + if (Regex.simpleMatch(pattern, typeName)) { + matches.add(pattern); + continue outer; + } + } + } + } + difference.removeAll(matches); + + final RestStatus status; builder.startObject(); - for (ObjectObjectCursor> indexEntry : mappingsByIndex) { - if (indexEntry.value.isEmpty()) { - continue; + { + if (difference.isEmpty()) { + status = RestStatus.OK; + } else { + status = RestStatus.NOT_FOUND; + final String message; + if (difference.size() == 1) { + message = String.format(Locale.ROOT, "type [%s] missing", toNamesString(difference.iterator().next())); + } else { + message = String.format(Locale.ROOT, "types [%s] missing", toNamesString(difference.toArray(new String[0]))); + } + builder.field("error", message); + builder.field("status", status.getStatus()); } - builder.startObject(indexEntry.key); - builder.startObject(Fields.MAPPINGS); - for (ObjectObjectCursor typeEntry : indexEntry.value) { - builder.field(typeEntry.key); - builder.map(typeEntry.value.sourceAsMap()); - } - builder.endObject(); - builder.endObject(); - } + for (final ObjectObjectCursor> indexEntry : mappingsByIndex) { + builder.startObject(indexEntry.key); + { + builder.startObject("mappings"); + { + for (final ObjectObjectCursor typeEntry : indexEntry.value) { + builder.field(typeEntry.key, typeEntry.value.sourceAsMap()); + } + } + builder.endObject(); + } + builder.endObject(); + } + } builder.endObject(); - return new BytesRestResponse(OK, builder); + return new BytesRestResponse(status, builder); } }); } - static class Fields { - static final String MAPPINGS = "mappings"; + private static String toNamesString(final String... names) { + return Arrays.stream(names).collect(Collectors.joining(",")); } + } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java index fcad131a359..8ac7f12312a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetSettingsAction.java @@ -51,16 +51,24 @@ public class RestGetSettingsAction extends BaseRestHandler { final SettingsFilter settingsFilter) { super(settings); this.indexScopedSettings = indexScopedSettings; - controller.registerHandler(GET, "/{index}/_settings/{name}", this); controller.registerHandler(GET, "/_settings/{name}", this); + controller.registerHandler(GET, "/{index}/_settings", this); + controller.registerHandler(GET, "/{index}/_settings/{name}", this); controller.registerHandler(GET, "/{index}/_setting/{name}", this); this.settingsFilter = settingsFilter; } + @Override + public String getName() { + return "get_settings_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final String[] names = request.paramAsStringArrayOrEmptyIfAll("name"); final boolean renderDefaults = request.paramAsBoolean("include_defaults", false); + // This is required so the "flat_settings" parameter counts as consumed + request.paramAsBoolean("flat_settings", false); GetSettingsRequest getSettingsRequest = new GetSettingsRequest() .indices(Strings.splitStringByCommaToArray(request.param("index"))) .indicesOptions(IndicesOptions.fromRequest(request, IndicesOptions.strictExpandOpen())) diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexDeleteAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexDeleteAliasesAction.java index 2068faa83e7..696b7768404 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexDeleteAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexDeleteAliasesAction.java @@ -39,6 +39,11 @@ public class RestIndexDeleteAliasesAction extends BaseRestHandler { controller.registerHandler(DELETE, "/{index}/_aliases/{name}", this); } + @Override + public String getName() { + return "index_delete_aliases_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexPutAliasAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexPutAliasAction.java index 2c68c488610..dc95e15802e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexPutAliasAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexPutAliasAction.java @@ -53,6 +53,11 @@ public class RestIndexPutAliasAction extends BaseRestHandler { //we cannot add POST for "/_aliases" because this is the _aliases api already defined in RestIndicesAliasesAction } + @Override + public String getName() { + return "index_put_alias_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String[] indices = Strings.splitStringByCommaToArray(request.param("index")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesAliasesAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesAliasesAction.java index 58add2b4ea8..b0c8122d4df 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesAliasesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesAliasesAction.java @@ -45,6 +45,11 @@ public class RestIndicesAliasesAction extends BaseRestHandler { }, AliasActions.PARSER, new ParseField("actions")); } + @Override + public String getName() { + return "indices_aliases_action"; + } + public RestIndicesAliasesAction(Settings settings, RestController controller) { super(settings); controller.registerHandler(POST, "/_aliases", this); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesSegmentsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesSegmentsAction.java index 6852b8527c7..a57a404baf2 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesSegmentsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesSegmentsAction.java @@ -46,6 +46,11 @@ public class RestIndicesSegmentsAction extends BaseRestHandler { controller.registerHandler(GET, "/{index}/_segments", this); } + @Override + public String getName() { + return "indices_segments_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { IndicesSegmentsRequest indicesSegmentsRequest = new IndicesSegmentsRequest( diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesShardStoresAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesShardStoresAction.java index c00e6efffb0..a498557ab37 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesShardStoresAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesShardStoresAction.java @@ -49,6 +49,11 @@ public class RestIndicesShardStoresAction extends BaseRestHandler { controller.registerHandler(GET, "/{index}/_shard_stores", this); } + @Override + public String getName() { + return "indices_shard_stores_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { IndicesShardStoresRequest indicesShardStoresRequest = new IndicesShardStoresRequest( diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsAction.java index ef91ad6accd..5458a60e141 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsAction.java @@ -55,6 +55,11 @@ public class RestIndicesStatsAction extends BaseRestHandler { controller.registerHandler(GET, "/{index}/_stats/{metric}", this); } + @Override + public String getName() { + return "indices_stats_action"; + } + static final Map> METRICS; static { diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestOpenIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestOpenIndexAction.java index 4bde6fdb722..0e6ca47dd1a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestOpenIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestOpenIndexAction.java @@ -39,6 +39,11 @@ public class RestOpenIndexAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.POST, "/{index}/_open", this); } + @Override + public String getName() { + return "open_index_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { OpenIndexRequest openIndexRequest = new OpenIndexRequest(Strings.splitStringByCommaToArray(request.param("index"))); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java index b376f3dab30..c96f127ee7a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutIndexTemplateAction.java @@ -44,6 +44,11 @@ public class RestPutIndexTemplateAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.POST, "/_template/{name}", this); } + @Override + public String getName() { + return "put_index_template_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest(request.param("name")); @@ -57,7 +62,7 @@ public class RestPutIndexTemplateAction extends BaseRestHandler { putRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putRequest.masterNodeTimeout())); putRequest.create(request.paramAsBoolean("create", false)); putRequest.cause(request.param("cause", "")); - putRequest.source(request.content(), request.getXContentType()); + putRequest.source(request.requiredContent(), request.getXContentType()); return channel -> client.admin().indices().putTemplate(putRequest, new AcknowledgedRestListener<>(channel)); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java index 06fe6792645..8d7e4a9e6c8 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutMappingAction.java @@ -60,11 +60,16 @@ public class RestPutMappingAction extends BaseRestHandler { controller.registerHandler(POST, "/_mappings/{type}", this); } + @Override + public String getName() { + return "put_mapping_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { PutMappingRequest putMappingRequest = putMappingRequest(Strings.splitStringByCommaToArray(request.param("index"))); putMappingRequest.type(request.param("type")); - putMappingRequest.source(request.content(), request.getXContentType()); + putMappingRequest.source(request.requiredContent(), request.getXContentType()); putMappingRequest.updateAllTypes(request.paramAsBoolean("update_all_types", false)); putMappingRequest.timeout(request.paramAsTime("timeout", putMappingRequest.timeout())); putMappingRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putMappingRequest.masterNodeTimeout())); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRecoveryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRecoveryAction.java index ab1295e9c60..4516ebeeb56 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRecoveryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRecoveryAction.java @@ -48,6 +48,11 @@ public class RestRecoveryAction extends BaseRestHandler { controller.registerHandler(GET, "/{index}/_recovery", this); } + @Override + public String getName() { + return "recovery_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRefreshAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRefreshAction.java index 7b317ab404e..46dadf9040c 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRefreshAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRefreshAction.java @@ -37,7 +37,6 @@ import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.rest.RestStatus.OK; import static org.elasticsearch.rest.action.RestActions.buildBroadcastShardsHeader; public class RestRefreshAction extends BaseRestHandler { @@ -50,6 +49,11 @@ public class RestRefreshAction extends BaseRestHandler { controller.registerHandler(GET, "/{index}/_refresh", this); } + @Override + public String getName() { + return "refresh_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { RefreshRequest refreshRequest = new RefreshRequest(Strings.splitStringByCommaToArray(request.param("index"))); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java index 59068f277d6..bd31ec3a70c 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java @@ -37,6 +37,11 @@ public class RestRolloverIndexAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.POST, "/{index}/_rollover/{new_index}", this); } + @Override + public String getName() { + return "rollover_index_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { RolloverRequest rolloverIndexRequest = new RolloverRequest(request.param("index"), request.param("new_index")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestShrinkIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestShrinkIndexAction.java index 1b63eb7f6d3..10b46be6760 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestShrinkIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestShrinkIndexAction.java @@ -39,6 +39,11 @@ public class RestShrinkIndexAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.POST, "/{index}/_shrink/{target}", this); } + @Override + public String getName() { + return "shrink_index_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { if (request.param("target") == null) { diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java index 8a311331729..4824fe4a842 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java @@ -48,6 +48,11 @@ public class RestSyncedFlushAction extends BaseRestHandler { controller.registerHandler(GET, "/{index}/_flush/synced", this); } + @Override + public String getName() { + return "synced_flush_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { IndicesOptions indicesOptions = IndicesOptions.fromRequest(request, IndicesOptions.lenientExpandOpen()); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestTypesExistsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestTypesExistsAction.java deleted file mode 100644 index dcded02eb0d..00000000000 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestTypesExistsAction.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.rest.action.admin.indices; - -import org.elasticsearch.action.admin.indices.exists.types.TypesExistsRequest; -import org.elasticsearch.action.admin.indices.exists.types.TypesExistsResponse; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.BytesRestResponse; -import org.elasticsearch.rest.RestController; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.rest.action.RestResponseListener; - -import java.io.IOException; - -import static org.elasticsearch.rest.RestRequest.Method.HEAD; -import static org.elasticsearch.rest.RestStatus.NOT_FOUND; -import static org.elasticsearch.rest.RestStatus.OK; - -/** - * Rest api for checking if a type exists. - */ -public class RestTypesExistsAction extends BaseRestHandler { - public RestTypesExistsAction(Settings settings, RestController controller) { - super(settings); - controller.registerWithDeprecatedHandler( - HEAD, "/{index}/_mapping/{type}", this, - HEAD, "/{index}/{type}", deprecationLogger); - } - - @Override - public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - TypesExistsRequest typesExistsRequest = new TypesExistsRequest( - Strings.splitStringByCommaToArray(request.param("index")), Strings.splitStringByCommaToArray(request.param("type")) - ); - typesExistsRequest.local(request.paramAsBoolean("local", typesExistsRequest.local())); - typesExistsRequest.indicesOptions(IndicesOptions.fromRequest(request, typesExistsRequest.indicesOptions())); - return channel -> client.admin().indices().typesExists(typesExistsRequest, new RestResponseListener(channel) { - @Override - public RestResponse buildResponse(TypesExistsResponse response) throws Exception { - if (response.isExists()) { - return new BytesRestResponse(OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY); - } else { - return new BytesRestResponse(NOT_FOUND, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY); - } - } - }); - } -} diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpdateSettingsAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpdateSettingsAction.java index 9a168e84dd6..c2b6b09fa3a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpdateSettingsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpdateSettingsAction.java @@ -45,6 +45,11 @@ public class RestUpdateSettingsAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.PUT, "/_settings", this); } + @Override + public String getName() { + return "update_settings_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { UpdateSettingsRequest updateSettingsRequest = updateSettingsRequest(Strings.splitStringByCommaToArray(request.param("index"))); @@ -54,18 +59,16 @@ public class RestUpdateSettingsAction extends BaseRestHandler { updateSettingsRequest.indicesOptions(IndicesOptions.fromRequest(request, updateSettingsRequest.indicesOptions())); Map settings = new HashMap<>(); - if (request.hasContent()) { - try (XContentParser parser = request.contentParser()) { - Map bodySettings = parser.map(); - Object innerBodySettings = bodySettings.get("settings"); - // clean up in case the body is wrapped with "settings" : { ... } - if (innerBodySettings instanceof Map) { - @SuppressWarnings("unchecked") - Map innerBodySettingsMap = (Map) innerBodySettings; - settings.putAll(innerBodySettingsMap); - } else { - settings.putAll(bodySettings); - } + try (XContentParser parser = request.contentParser()) { + Map bodySettings = parser.map(); + Object innerBodySettings = bodySettings.get("settings"); + // clean up in case the body is wrapped with "settings" : { ... } + if (innerBodySettings instanceof Map) { + @SuppressWarnings("unchecked") + Map innerBodySettingsMap = (Map) innerBodySettings; + settings.putAll(innerBodySettingsMap); + } else { + settings.putAll(bodySettings); } } updateSettingsRequest.settings(settings); diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpgradeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpgradeAction.java index 9437ad5eada..1d32c14655a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpgradeAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpgradeAction.java @@ -55,6 +55,11 @@ public class RestUpgradeAction extends BaseRestHandler { controller.registerHandler(GET, "/{index}/_upgrade", this); } + @Override + public String getName() { + return "upgrade_action"; + } + @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { if (request.method().equals(RestRequest.Method.GET)) { diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java index 0c2374045dd..df1c14c4806 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java @@ -55,6 +55,11 @@ public class RestValidateQueryAction extends BaseRestHandler { controller.registerHandler(POST, "/{index}/{type}/_validate/query", this); } + @Override + public String getName() { + return "validate_query_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { ValidateQueryRequest validateQueryRequest = new ValidateQueryRequest(Strings.splitStringByCommaToArray(request.param("index"))); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java index a783a9c2a82..774a603cb4b 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestAliasAction.java @@ -43,6 +43,11 @@ public class RestAliasAction extends AbstractCatAction { controller.registerHandler(GET, "/_cat/aliases/{alias}", this); } + @Override + public String getName() { + return "cat_alias_action"; + } + @Override protected RestChannelConsumer doCatRequest(final RestRequest request, final NodeClient client) { final GetAliasesRequest getAliasesRequest = request.hasParam("alias") ? diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java index 0077297cbf3..60b76269ebc 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java @@ -50,6 +50,11 @@ public class RestAllocationAction extends AbstractCatAction { controller.registerHandler(GET, "/_cat/allocation/{nodes}", this); } + @Override + public String getName() { + return "cat_allocation_action"; + } + @Override protected void documentation(StringBuilder sb) { sb.append("/_cat/allocation\n"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java index 7442a7d85ee..d52449ea66f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCatAction.java @@ -51,6 +51,11 @@ public class RestCatAction extends BaseRestHandler { HELP = sb.toString(); } + @Override + public String getName() { + return "cat_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { return channel -> channel.sendResponse(new BytesRestResponse(RestStatus.OK, HELP)); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java index bbc7ffa250c..840f4de5ea7 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java @@ -45,6 +45,11 @@ public class RestCountAction extends AbstractCatAction { restController.registerHandler(GET, "/_cat/count/{index}", this); } + @Override + public String getName() { + return "cat_count_action"; + } + @Override protected void documentation(StringBuilder sb) { sb.append("/_cat/count\n"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java index 4156ea46192..120ea603271 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestFielddataAction.java @@ -45,6 +45,11 @@ public class RestFielddataAction extends AbstractCatAction { controller.registerHandler(GET, "/_cat/fielddata/{fields}", this); } + @Override + public String getName() { + return "cat_fielddata_action"; + } + @Override protected RestChannelConsumer doCatRequest(final RestRequest request, final NodeClient client) { final NodesStatsRequest nodesStatsRequest = new NodesStatsRequest("data:true"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestHealthAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestHealthAction.java index 2bc1c11b9a1..d7e72f207ff 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestHealthAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestHealthAction.java @@ -39,6 +39,11 @@ public class RestHealthAction extends AbstractCatAction { controller.registerHandler(GET, "/_cat/health", this); } + @Override + public String getName() { + return "cat_health_action"; + } + @Override protected void documentation(StringBuilder sb) { sb.append("/_cat/health\n"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index 46bbc4652cf..6bcb073d110 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -66,6 +66,11 @@ public class RestIndicesAction extends AbstractCatAction { controller.registerHandler(GET, "/_cat/indices/{index}", this); } + @Override + public String getName() { + return "cat_indices_action"; + } + @Override protected void documentation(StringBuilder sb) { sb.append("/_cat/indices\n"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestMasterAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestMasterAction.java index db5c1149cc0..d1753aa868f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestMasterAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestMasterAction.java @@ -39,6 +39,11 @@ public class RestMasterAction extends AbstractCatAction { controller.registerHandler(GET, "/_cat/master", this); } + @Override + public String getName() { + return "cat_master_action"; + } + @Override protected void documentation(StringBuilder sb) { sb.append("/_cat/master\n"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java index 6d9c5bd57e5..2e396ac59b7 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodeAttrsAction.java @@ -46,6 +46,11 @@ public class RestNodeAttrsAction extends AbstractCatAction { controller.registerHandler(GET, "/_cat/nodeattrs", this); } + @Override + public String getName() { + return "cat_node_attrs_action"; + } + @Override protected void documentation(StringBuilder sb) { sb.append("/_cat/nodeattrs\n"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java index faa07e7ddaa..104ffe420ab 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestNodesAction.java @@ -71,6 +71,11 @@ public class RestNodesAction extends AbstractCatAction { controller.registerHandler(GET, "/_cat/nodes", this); } + @Override + public String getName() { + return "cat_nodes_action"; + } + @Override protected void documentation(StringBuilder sb) { sb.append("/_cat/nodes\n"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java index a9f044c1446..21a3c0c3c82 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java @@ -38,6 +38,11 @@ public class RestPendingClusterTasksAction extends AbstractCatAction { controller.registerHandler(GET, "/_cat/pending_tasks", this); } + @Override + public String getName() { + return "cat_pending_cluster_tasks_action"; + } + @Override protected void documentation(StringBuilder sb) { sb.append("/_cat/pending_tasks\n"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java index 7851c15b32f..f61a5fdeb7e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestPluginsAction.java @@ -44,6 +44,11 @@ public class RestPluginsAction extends AbstractCatAction { controller.registerHandler(GET, "/_cat/plugins", this); } + @Override + public String getName() { + return "cat_plugins_action"; + } + @Override protected void documentation(StringBuilder sb) { sb.append("/_cat/plugins\n"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java index e2e831f890d..a66741c2d94 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestRecoveryAction.java @@ -54,6 +54,11 @@ public class RestRecoveryAction extends AbstractCatAction { restController.registerHandler(GET, "/_cat/recovery/{index}", this); } + @Override + public String getName() { + return "cat_recovery_action"; + } + @Override protected void documentation(StringBuilder sb) { sb.append("/_cat/recovery\n"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestRepositoriesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestRepositoriesAction.java index 631c0305049..42cb904f2f0 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestRepositoriesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestRepositoriesAction.java @@ -58,6 +58,11 @@ public class RestRepositoriesAction extends AbstractCatAction { }); } + @Override + public String getName() { + return "cat_repositories_action"; + } + @Override protected void documentation(StringBuilder sb) { sb.append("/_cat/repositories\n"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java index 48983ab836b..0d84549f145 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestSegmentsAction.java @@ -50,6 +50,11 @@ public class RestSegmentsAction extends AbstractCatAction { controller.registerHandler(GET, "/_cat/segments/{index}", this); } + @Override + public String getName() { + return "cat_segments_action"; + } + @Override protected RestChannelConsumer doCatRequest(final RestRequest request, final NodeClient client) { final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java index e7451001238..d985c86d9b3 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java @@ -52,6 +52,11 @@ public class RestShardsAction extends AbstractCatAction { controller.registerHandler(GET, "/_cat/shards/{index}", this); } + @Override + public String getName() { + return "cat_shards_action"; + } + @Override protected void documentation(StringBuilder sb) { sb.append("/_cat/shards\n"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java index 54337e5e143..6d44e9aa856 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestSnapshotAction.java @@ -49,6 +49,11 @@ public class RestSnapshotAction extends AbstractCatAction { controller.registerHandler(GET, "/_cat/snapshots/{repository}", this); } + @Override + public String getName() { + return "cat_snapshot_action"; + } + @Override protected RestChannelConsumer doCatRequest(final RestRequest request, NodeClient client) { GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest() diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java index 5a49feb9c4b..c0ebddc2908 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestTasksAction.java @@ -55,6 +55,11 @@ public class RestTasksAction extends AbstractCatAction { this.nodesInCluster = nodesInCluster; } + @Override + public String getName() { + return "cat_tasks_action"; + } + @Override protected void documentation(StringBuilder sb) { sb.append("/_cat/tasks\n"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestTemplatesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestTemplatesAction.java index b624a9cea18..b6b63348882 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestTemplatesAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestTemplatesAction.java @@ -43,6 +43,11 @@ public class RestTemplatesAction extends AbstractCatAction { controller.registerHandler(GET, "/_cat/templates/{name}", this); } + @Override + public String getName() { + return "cat_templates_action"; + } + @Override protected void documentation(StringBuilder sb) { sb.append("/_cat/templates\n"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java index 74b2a261713..0e0f4fe8c15 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestThreadPoolAction.java @@ -57,6 +57,11 @@ public class RestThreadPoolAction extends AbstractCatAction { controller.registerHandler(GET, "/_cat/thread_pool/{thread_pool_patterns}", this); } + @Override + public String getName() { + return "cat_threadpool_action"; + } + @Override protected void documentation(StringBuilder sb) { sb.append("/_cat/thread_pool\n"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java index d6af84d9472..671917c380c 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java @@ -67,6 +67,11 @@ public class RestBulkAction extends BaseRestHandler { this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); } + @Override + public String getName() { + return "bulk_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { BulkRequest bulkRequest = Requests.bulkRequest(); @@ -86,7 +91,7 @@ public class RestBulkAction extends BaseRestHandler { } bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT)); bulkRequest.setRefreshPolicy(request.param("refresh")); - bulkRequest.add(request.content(), defaultIndex, defaultType, defaultRouting, defaultFields, + bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting, defaultFields, defaultFetchSourceContext, defaultPipeline, null, allowExplicitIndex, request.getXContentType()); return channel -> client.bulk(bulkRequest, new RestStatusToXContentListener<>(channel)); diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestCountAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestCountAction.java index 004fab81b47..ed9a31aebd5 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestCountAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestCountAction.java @@ -54,6 +54,11 @@ public class RestCountAction extends BaseRestHandler { controller.registerHandler(GET, "/{index}/{type}/_count", this); } + @Override + public String getName() { + return "count_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { SearchRequest countRequest = new SearchRequest(Strings.splitStringByCommaToArray(request.param("index"))); diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java index 832228bcd8a..0db997c1dae 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestDeleteAction.java @@ -40,6 +40,11 @@ public class RestDeleteAction extends BaseRestHandler { controller.registerHandler(DELETE, "/{index}/{type}/{id}", this); } + @Override + public String getName() { + return "document_delete_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { DeleteRequest deleteRequest = new DeleteRequest(request.param("index"), request.param("type"), request.param("id")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java index e6a56f2c429..857af483325 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java @@ -48,6 +48,11 @@ public class RestGetAction extends BaseRestHandler { controller.registerHandler(HEAD, "/{index}/{type}/{id}", this); } + @Override + public String getName() { + return "docuemnt_get_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final GetRequest getRequest = new GetRequest(request.param("index"), request.param("type"), request.param("id")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java index 341c1ddc917..fead47db744 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java @@ -51,6 +51,11 @@ public class RestGetSourceAction extends BaseRestHandler { controller.registerHandler(HEAD, "/{index}/{type}/{id}/_source", this); } + @Override + public String getName() { + return "document_get_source_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final GetRequest getRequest = new GetRequest(request.param("index"), request.param("type"), request.param("id")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index a32848841a4..68b09a4d867 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -46,11 +46,21 @@ public class RestIndexAction extends BaseRestHandler { controller.registerHandler(POST, "/{index}/{type}/{id}/_create", createHandler); } + @Override + public String getName() { + return "document_index_action"; + } + final class CreateHandler extends BaseRestHandler { protected CreateHandler(Settings settings) { super(settings); } + @Override + public String getName() { + return "document_create_action"; + } + @Override public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient client) throws IOException { request.params().put("op_type", "create"); @@ -64,7 +74,7 @@ public class RestIndexAction extends BaseRestHandler { indexRequest.routing(request.param("routing")); indexRequest.parent(request.param("parent")); indexRequest.setPipeline(request.param("pipeline")); - indexRequest.source(request.content(), request.getXContentType()); + indexRequest.source(request.requiredContent(), request.getXContentType()); indexRequest.timeout(request.paramAsTime("timeout", IndexRequest.DEFAULT_TIMEOUT)); indexRequest.setRefreshPolicy(request.param("refresh")); indexRequest.version(RestActions.parseVersion(request)); diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java index cba8fafbda3..923a466d8ec 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestMultiGetAction.java @@ -51,6 +51,11 @@ public class RestMultiGetAction extends BaseRestHandler { this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); } + @Override + public String getName() { + return "document_mget_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { MultiGetRequest multiGetRequest = new MultiGetRequest(); diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java index 605c8654f50..f8dc2c01670 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestMultiTermVectorsAction.java @@ -46,6 +46,11 @@ public class RestMultiTermVectorsAction extends BaseRestHandler { controller.registerHandler(POST, "/{index}/{type}/_mtermvectors", this); } + @Override + public String getName() { + return "document_multi_term_vectors_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { MultiTermVectorsRequest multiTermVectorsRequest = new MultiTermVectorsRequest(); diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java index a649e5eff47..a12b7ce16a7 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestTermVectorsAction.java @@ -57,6 +57,11 @@ public class RestTermVectorsAction extends BaseRestHandler { controller.registerHandler(POST, "/{index}/{type}/{id}/_termvector", this); } + @Override + public String getName() { + return "document_term_vectors_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { TermVectorsRequest termVectorsRequest = new TermVectorsRequest(request.param("index"), request.param("type"), request.param("id")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java index 10a02d75bfd..3204ce68abb 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java @@ -48,6 +48,11 @@ public class RestUpdateAction extends BaseRestHandler { controller.registerHandler(POST, "/{index}/{type}/{id}/_update", this); } + @Override + public String getName() { + return "document_update_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { UpdateRequest updateRequest = new UpdateRequest(request.param("index"), request.param("type"), request.param("id")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java index b776d2475ce..5ad478deea6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java @@ -35,6 +35,11 @@ public class RestDeletePipelineAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.DELETE, "/_ingest/pipeline/{id}", this); } + @Override + public String getName() { + return "ingest_delete_pipeline_action"; + } + @Override public RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { DeletePipelineRequest request = new DeletePipelineRequest(restRequest.param("id")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java index c8facf7b4cc..92330d5bc45 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java @@ -37,6 +37,11 @@ public class RestGetPipelineAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}", this); } + @Override + public String getName() { + return "ingest_get_pipeline_action"; + } + @Override public RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { GetPipelineRequest request = new GetPipelineRequest(Strings.splitStringByCommaToArray(restRequest.param("id"))); diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java index 2496c9b4a24..aa3149fc13d 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java @@ -39,6 +39,11 @@ public class RestPutPipelineAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.PUT, "/_ingest/pipeline/{id}", this); } + @Override + public String getName() { + return "ingest_put_pipeline_action"; + } + @Override public RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { Tuple sourceTuple = restRequest.contentOrSourceParam(); diff --git a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java index 9dbe1808a8c..c5f01e25a9c 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulatePipelineAction.java @@ -41,6 +41,11 @@ public class RestSimulatePipelineAction extends BaseRestHandler { controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/_simulate", this); } + @Override + public String getName() { + return "ingest_simulate_pipeline_action"; + } + @Override public RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { Tuple sourceTuple = restRequest.contentOrSourceParam(); diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java index 80d833ed311..dc2474c6533 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestClearScrollAction.java @@ -41,6 +41,11 @@ public class RestClearScrollAction extends BaseRestHandler { controller.registerHandler(DELETE, "/_search/scroll/{scroll_id}", this); } + @Override + public String getName() { + return "clear_scroll_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String scrollIds = request.param("scroll_id"); diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java index 7339718c28b..b0adc27f447 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java @@ -54,6 +54,11 @@ public class RestExplainAction extends BaseRestHandler { controller.registerHandler(POST, "/{index}/{type}/{id}/_explain", this); } + @Override + public String getName() { + return "explain_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final ExplainRequest explainRequest = new ExplainRequest(request.param("index"), request.param("type"), request.param("id")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index f27ba5018e4..3857a4c5b42 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -69,6 +69,11 @@ public class RestMultiSearchAction extends BaseRestHandler { this.allowExplicitIndex = MULTI_ALLOW_EXPLICIT_INDEX.get(settings); } + @Override + public String getName() { + return "msearch_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { MultiSearchRequest multiSearchRequest = parseRequest(request, allowExplicitIndex); diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 89e2f23861c..e08fd0c6528 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -66,6 +66,11 @@ public class RestSearchAction extends BaseRestHandler { controller.registerHandler(POST, "/{index}/{type}/_search", this); } + @Override + public String getName() { + return "search_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { SearchRequest searchRequest = new SearchRequest(); @@ -192,6 +197,10 @@ public class RestSearchAction extends BaseRestHandler { searchSourceBuilder.trackScores(request.paramAsBoolean("track_scores", false)); } + if (request.hasParam("track_total_hits")) { + searchSourceBuilder.trackTotalHits(request.paramAsBoolean("track_total_hits", true)); + } + String sSorts = request.param("sort"); if (sSorts != null) { String[] sorts = Strings.splitStringByCommaToArray(sSorts); diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java index 59b7c660fa1..bc3b0ccb56a 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java @@ -44,6 +44,11 @@ public class RestSearchScrollAction extends BaseRestHandler { controller.registerHandler(POST, "/_search/scroll/{scroll_id}", this); } + @Override + public String getName() { + return "search_scroll_action"; + } + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { String scrollId = request.param("scroll_id"); diff --git a/core/src/main/java/org/elasticsearch/script/ExplainableSearchScript.java b/core/src/main/java/org/elasticsearch/script/ExplainableSearchScript.java index 9284b36c388..c3970212751 100644 --- a/core/src/main/java/org/elasticsearch/script/ExplainableSearchScript.java +++ b/core/src/main/java/org/elasticsearch/script/ExplainableSearchScript.java @@ -47,7 +47,7 @@ import java.io.IOException; * This is currently not used inside elasticsearch but it is used, see for example here: * https://github.com/elastic/elasticsearch/issues/8561 */ -public interface ExplainableSearchScript extends LeafSearchScript { +public interface ExplainableSearchScript { /** * Build the explanation of the current document being scored diff --git a/core/src/main/java/org/elasticsearch/script/LeafSearchScript.java b/core/src/main/java/org/elasticsearch/script/LeafSearchScript.java deleted file mode 100644 index 762168d3c90..00000000000 --- a/core/src/main/java/org/elasticsearch/script/LeafSearchScript.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.script; - -import org.apache.lucene.search.Scorer; -import org.elasticsearch.common.lucene.ScorerAware; - -import java.util.Map; - -/** - * A per-segment {@link SearchScript}. - * - * This is effectively a functional interface, requiring at least implementing {@link #runAsDouble()}. - */ -public interface LeafSearchScript extends ScorerAware, ExecutableScript { - - /** - * Set the document this script will process next. - */ - default void setDocument(int doc) {} - - @Override - default void setScorer(Scorer scorer) {} - - /** - * Set the source for the current document. - */ - default void setSource(Map source) {} - - /** - * Sets per-document aggregation {@code _value}. - *

- * The default implementation just calls {@code setNextVar("_value", value)} but - * some engines might want to handle this differently for better performance. - *

- * @param value per-document value, typically a String, Long, or Double - */ - default void setNextAggregationValue(Object value) { - setNextVar("_value", value); - } - - @Override - default void setNextVar(String field, Object value) {} - - /** - * Return the result as a long. This is used by aggregation scripts over long fields. - */ - default long runAsLong() { - throw new UnsupportedOperationException("runAsLong is not implemented"); - } - - @Override - default Object run() { - return runAsDouble(); - } - - /** - * Return the result as a double. This is the main use case of search script, used for document scoring. - */ - double runAsDouble(); -} diff --git a/core/src/main/java/org/elasticsearch/script/Script.java b/core/src/main/java/org/elasticsearch/script/Script.java index 994ac13d6c9..eee228eb815 100644 --- a/core/src/main/java/org/elasticsearch/script/Script.java +++ b/core/src/main/java/org/elasticsearch/script/Script.java @@ -113,6 +113,11 @@ public final class Script implements ToXContentObject, Writeable { */ public static final ParseField SCRIPT_PARSE_FIELD = new ParseField("script"); + /** + * Standard {@link ParseField} for source on the inner level. + */ + public static final ParseField SOURCE_PARSE_FIELD = new ParseField("source"); + /** * Standard {@link ParseField} for lang on the inner level. */ @@ -218,9 +223,7 @@ public final class Script implements ToXContentObject, Writeable { */ private Script build(String defaultLang) { if (type == null) { - throw new IllegalArgumentException( - "must specify either code for an [" + ScriptType.INLINE.getParseField().getPreferredName() + "] script " + - "or an id for a [" + ScriptType.STORED.getParseField().getPreferredName() + "] script"); + throw new IllegalArgumentException("must specify either [source] for an inline script or [id] for a stored script"); } if (type == ScriptType.INLINE) { @@ -299,7 +302,10 @@ public final class Script implements ToXContentObject, Writeable { * * {@code * { - * "" : "", + * // Exactly one of "id" or "source" must be specified + * "id" : "", + * // OR + * "source": "", * "lang" : "", * "options" : { * "option0" : "", @@ -317,7 +323,7 @@ public final class Script implements ToXContentObject, Writeable { * Example: * {@code * { - * "inline" : "return Math.log(doc.popularity) * params.multiplier", + * "source" : "return Math.log(doc.popularity) * params.multiplier", * "lang" : "painless", * "params" : { * "multiplier" : 100.0 @@ -330,7 +336,7 @@ public final class Script implements ToXContentObject, Writeable { * * {@code * { - * "inline" : { "query" : ... }, + * "source" : { "query" : ... }, * "lang" : "", * "options" : { * "option0" : "", @@ -567,7 +573,7 @@ public final class Script implements ToXContentObject, Writeable { * * {@code * { - * "" : "", + * "<(id, source)>" : "", * "lang" : "", * "options" : { * "option0" : "", @@ -585,7 +591,7 @@ public final class Script implements ToXContentObject, Writeable { * Example: * {@code * { - * "inline" : "return Math.log(doc.popularity) * params.multiplier;", + * "source" : "return Math.log(doc.popularity) * params.multiplier;", * "lang" : "painless", * "params" : { * "multiplier" : 100.0 @@ -600,7 +606,7 @@ public final class Script implements ToXContentObject, Writeable { * * {@code * { - * "inline" : { "query" : ... }, + * "source" : { "query" : ... }, * "lang" : "", * "options" : { * "option0" : "", @@ -621,10 +627,14 @@ public final class Script implements ToXContentObject, Writeable { String contentType = options == null ? null : options.get(CONTENT_TYPE_OPTION); - if (type == ScriptType.INLINE && contentType != null && builder.contentType().mediaType().equals(contentType)) { - builder.rawField(type.getParseField().getPreferredName(), new BytesArray(idOrCode)); + if (type == ScriptType.INLINE) { + if (contentType != null && builder.contentType().mediaType().equals(contentType)) { + builder.rawField(SOURCE_PARSE_FIELD.getPreferredName(), new BytesArray(idOrCode)); + } else { + builder.field(SOURCE_PARSE_FIELD.getPreferredName(), idOrCode); + } } else { - builder.field(type.getParseField().getPreferredName(), idOrCode); + builder.field("id", idOrCode); } if (lang != null) { diff --git a/core/src/main/java/org/elasticsearch/script/ScriptContext.java b/core/src/main/java/org/elasticsearch/script/ScriptContext.java index 420257e0e50..3f931f659ed 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptContext.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptContext.java @@ -27,27 +27,37 @@ import java.lang.reflect.Method; * A {@link ScriptContext} contains the information related to a single use case and the interfaces * and methods necessary for a {@link ScriptEngine} to implement. *

- * There are two related classes which must be supplied to construct a {@link ScriptContext}. + * There are at least two (and optionally a third) related classes which must be defined. *

- * The FactoryType is a factory class for constructing instances of a script. The - * {@link ScriptService} returns an instance of FactoryType when compiling a script. This class - * must be stateless so it is cacheable by the {@link ScriptService}. It must have an abstract method - * named {@code newInstance} which {@link ScriptEngine} implementations will define. - *

- * The InstanceType is a class returned by the {@code newInstance} method of the - * FactoryType. It is an instance of a script and may be stateful. Instances of + * The InstanceType is a class which users of the script api call to execute a script. It + * may be stateful. Instances of * the InstanceType may be executed multiple times by a caller with different arguments. This * class must have an abstract method named {@code execute} which {@link ScriptEngine} implementations * will define. + *

+ * The FactoryType is a factory class returned by the {@link ScriptService} when compiling + * a script. This class must be stateless so it is cacheable by the {@link ScriptService}. It must + * have one of the following: + *

    + *
  • An abstract method named {@code newInstance} which returns an instance of InstanceType
  • + *
  • An abstract method named {@code newFactory} which returns an instance of StatefulFactoryType
  • + *
+ *

+ * The StatefulFactoryType is an optional class which allows a stateful factory from the + * stateless factory type required by the {@link ScriptService}. If defined, the StatefulFactoryType + * must have a method named {@code newInstance} which returns an instance of InstanceType. */ public final class ScriptContext { /** A unique identifier for this context. */ public final String name; - /** A factory class for constructing instances of a script. */ + /** A factory class for constructing script or stateful factory instances. */ public final Class factoryClazz; + /** A factory class for construct script instances. */ + public final Class statefulFactoryClazz; + /** A class that is an instance of a script. */ public final Class instanceClazz; @@ -55,20 +65,38 @@ public final class ScriptContext { public ScriptContext(String name, Class factoryClazz) { this.name = name; this.factoryClazz = factoryClazz; - Method newInstanceMethod = null; - for (Method method : factoryClazz.getMethods()) { - if (method.getName().equals("newInstance")) { - if (newInstanceMethod != null) { - throw new IllegalArgumentException("Cannot have multiple newInstance methods on FactoryType class [" - + factoryClazz.getName() + "] for script context [" + name + "]"); - } - newInstanceMethod = method; + Method newInstanceMethod = findMethod("FactoryType", factoryClazz, "newInstance"); + Method newFactoryMethod = findMethod("FactoryType", factoryClazz, "newFactory"); + if (newFactoryMethod != null) { + assert newInstanceMethod == null; + statefulFactoryClazz = newFactoryMethod.getReturnType(); + newInstanceMethod = findMethod("StatefulFactoryType", statefulFactoryClazz, "newInstance"); + if (newInstanceMethod == null) { + throw new IllegalArgumentException("Could not find method newInstance StatefulFactoryType class [" + + statefulFactoryClazz.getName() + "] for script context [" + name + "]"); } - } - if (newInstanceMethod == null) { - throw new IllegalArgumentException("Could not find method newInstance on FactoryType class [" + } else if (newInstanceMethod != null) { + assert newFactoryMethod == null; + statefulFactoryClazz = null; + } else { + throw new IllegalArgumentException("Could not find method newInstance or method newFactory on FactoryType class [" + factoryClazz.getName() + "] for script context [" + name + "]"); } instanceClazz = newInstanceMethod.getReturnType(); } + + /** Returns a method with the given name, or throws an exception if multiple are found. */ + private Method findMethod(String type, Class clazz, String methodName) { + Method foundMethod = null; + for (Method method : clazz.getMethods()) { + if (method.getName().equals(methodName)) { + if (foundMethod != null) { + throw new IllegalArgumentException("Cannot have multiple " + methodName + " methods on " + type + " class [" + + clazz.getName() + "] for script context [" + name + "]"); + } + foundMethod = method; + } + } + return foundMethod; + } } diff --git a/core/src/main/java/org/elasticsearch/script/ScriptMetaData.java b/core/src/main/java/org/elasticsearch/script/ScriptMetaData.java index f69302ce014..63b5e2e46ab 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptMetaData.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptMetaData.java @@ -336,7 +336,7 @@ public final class ScriptMetaData implements MetaData.Custom, Writeable, ToXCont throw new IllegalArgumentException("illegal stored script id [" + id + "], does not contain lang"); } else { source = new StoredScriptSource(in); - source = new StoredScriptSource(id.substring(0, split), source.getCode(), Collections.emptyMap()); + source = new StoredScriptSource(id.substring(0, split), source.getSource(), Collections.emptyMap()); } // Version 5.3+ can just be parsed normally using StoredScriptSource. } else { diff --git a/core/src/main/java/org/elasticsearch/script/ScriptModule.java b/core/src/main/java/org/elasticsearch/script/ScriptModule.java index cc098c14359..ffec23568f4 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -19,7 +19,6 @@ package org.elasticsearch.script; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -45,7 +44,8 @@ public class ScriptModule { ExecutableScript.CONTEXT, ExecutableScript.AGGS_CONTEXT, ExecutableScript.UPDATE_CONTEXT, - ExecutableScript.INGEST_CONTEXT + ExecutableScript.INGEST_CONTEXT, + TemplateScript.CONTEXT ).collect(Collectors.toMap(c -> c.name, Function.identity())); } diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index 2634cbda8dd..a64d13c43ca 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -45,7 +45,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.template.CompiledTemplate; import java.io.Closeable; import java.io.IOException; @@ -257,7 +256,7 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust // the script has been updated since the last compilation StoredScriptSource source = getScriptFromClusterState(id, lang); lang = source.getLang(); - idOrCode = source.getCode(); + idOrCode = source.getSource(); options = source.getOptions(); } @@ -330,12 +329,6 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust } } - /** Compiles a template. Note this will be moved to a separate TemplateService in the future. */ - public CompiledTemplate compileTemplate(Script script, ScriptContext scriptContext) { - ExecutableScript.Factory factory = compile(script, scriptContext); - return params -> (String) factory.newInstance(params).run(); - } - /** * Check whether there have been too many compilations within the last minute, throwing a circuit breaking exception if so. * This is a variant of the token bucket algorithm: https://en.wikipedia.org/wiki/Token_bucket @@ -431,14 +424,12 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust } else if (isAnyContextEnabled() == false) { throw new IllegalArgumentException( "cannot put [" + ScriptType.STORED + "] script, no script contexts are enabled"); - } else { - // TODO: executable context here is just a placeholder, replace with optional context name passed into PUT stored script req - Object compiled = scriptEngine.compile(request.id(), source.getCode(), ExecutableScript.CONTEXT, Collections.emptyMap()); - - if (compiled == null) { - throw new IllegalArgumentException("failed to parse/compile stored script [" + request.id() + "]" + - (source.getCode() == null ? "" : " using code [" + source.getCode() + "]")); + } else if (request.context() != null) { + ScriptContext context = contexts.get(request.context()); + if (context == null) { + throw new IllegalArgumentException("Unknown context [" + request.context() + "]"); } + scriptEngine.compile(request.id(), source.getSource(), context, Collections.emptyMap()); } } catch (ScriptException good) { throw good; diff --git a/core/src/main/java/org/elasticsearch/script/ScriptType.java b/core/src/main/java/org/elasticsearch/script/ScriptType.java index c076ccfd88c..2fdf283c57f 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptType.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptType.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import java.io.IOException; +import java.util.Locale; /** * ScriptType represents the way a script is stored and retrieved from the {@link ScriptService}. @@ -40,7 +41,7 @@ public enum ScriptType implements Writeable { * (Groovy and others), but can be overridden by the specific {@link ScriptEngine} * if the language is naturally secure (Painless, Mustache, and Expressions). */ - INLINE ( 0 , new ParseField("inline") , false ), + INLINE ( 0 , new ParseField("source", "inline") , false ), /** * STORED scripts are saved as part of the {@link org.elasticsearch.cluster.ClusterState} @@ -49,7 +50,7 @@ public enum ScriptType implements Writeable { * (Groovy and others), but can be overridden by the specific {@link ScriptEngine} * if the language is naturally secure (Painless, Mustache, and Expressions). */ - STORED ( 1 , new ParseField("stored", "id") , false ); + STORED ( 1 , new ParseField("id", "stored") , false ); /** * Reads an int from the input stream and converts it to a {@link ScriptType}. @@ -101,7 +102,7 @@ public enum ScriptType implements Writeable { * @return The unique name for this {@link ScriptType} based on the {@link ParseField}. */ public String getName() { - return parseField.getPreferredName(); + return name().toLowerCase(Locale.ROOT); } /** diff --git a/core/src/main/java/org/elasticsearch/script/SearchScript.java b/core/src/main/java/org/elasticsearch/script/SearchScript.java index bbee2910c88..4c50147b22c 100644 --- a/core/src/main/java/org/elasticsearch/script/SearchScript.java +++ b/core/src/main/java/org/elasticsearch/script/SearchScript.java @@ -19,30 +19,146 @@ package org.elasticsearch.script; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorer; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.lucene.ScorerAware; +import org.elasticsearch.search.lookup.LeafDocLookup; +import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; import java.util.Map; /** - * A search script. + * A generic script used for per document use cases. + * + * Using a {@link SearchScript} works as follows: + *

    + *
  1. Construct a {@link Factory} using {@link ScriptService#compile(Script, ScriptContext)}
  2. + *
  3. Construct a {@link LeafFactory} for a an index using {@link Factory#newFactory(Map, SearchLookup)}
  4. + *
  5. Construct a {@link SearchScript} for a Lucene segment using {@link LeafFactory#newInstance(LeafReaderContext)}
  6. + *
  7. Call {@link #setDocument(int)} to indicate which document in the segment the script should be run for next
  8. + *
  9. Call one of the {@code run} methods: {@link #run()}, {@link #runAsDouble()}, or {@link #runAsLong()}
  10. + *
*/ -public interface SearchScript { +public abstract class SearchScript implements ScorerAware, ExecutableScript { - LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException; + /** The generic runtime parameters for the script. */ + private final Map params; - /** - * Indicates if document scores may be needed by this {@link SearchScript}. - * - * @return {@code true} if scores are needed. - */ - boolean needsScores(); + /** A lookup for the index this script will operate on. */ + private final SearchLookup lookup; - interface Factory { - SearchScript newInstance(Map params, SearchLookup lookup); + /** A leaf lookup for the bound segment this script will operate on. */ + private final LeafReaderContext leafContext; + + /** A leaf lookup for the bound segment this script will operate on. */ + private final LeafSearchLookup leafLookup; + + /** A scorer that will return the score for the current document when the script is run. */ + private Scorer scorer; + + public SearchScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) { + this.params = params; + this.lookup = lookup; + this.leafContext = leafContext; + // TODO: remove leniency when painless does not implement SearchScript for executable script cases + this.leafLookup = leafContext == null ? null : lookup.getLeafSearchLookup(leafContext); } - ScriptContext CONTEXT = new ScriptContext<>("search", Factory.class); + /** Return the parameters for this script. */ + public Map getParams() { + return params; + } + + /** The leaf lookup for the Lucene segment this script was created for. */ + protected final LeafSearchLookup getLeafLookup() { + return leafLookup; + } + + /** The leaf context for the Lucene segment this script was created for. */ + protected final LeafReaderContext getLeafContext() { + return leafContext; + } + + /** The doc lookup for the Lucene segment this script was created for. */ + public final LeafDocLookup getDoc() { + // TODO: remove leniency when painless does not implement SearchScript for executable script cases + return leafLookup == null ? null : leafLookup.doc(); + } + + /** Set the current document to run the script on next. */ + public void setDocument(int docid) { + // TODO: remove leniency when painless does not implement SearchScript for executable script cases + if (leafLookup != null) { + leafLookup.setDocument(docid); + } + } + + @Override + public void setScorer(Scorer scorer) { + this.scorer = scorer; + } + + /** Return the score of the current document. */ + public double getScore() { + // TODO: remove leniency when painless does not implement SearchScript for executable script cases + if (scorer == null) { + return 0.0d; + } + try { + return scorer.score(); + } catch (IOException e) { + throw new ElasticsearchException("couldn't lookup score", e); + } + } + + /** + * Sets per-document aggregation {@code _value}. + *

+ * The default implementation just calls {@code setNextVar("_value", value)} but + * some engines might want to handle this differently for better performance. + *

+ * @param value per-document value, typically a String, Long, or Double + */ + public void setNextAggregationValue(Object value) { + setNextVar("_value", value); + } + + @Override + public void setNextVar(String field, Object value) {} + + /** Return the result as a long. This is used by aggregation scripts over long fields. */ + public long runAsLong() { + throw new UnsupportedOperationException("runAsLong is not implemented"); + } + + @Override + public Object run() { + return runAsDouble(); + } + + /** Return the result as a double. This is the main use case of search script, used for document scoring. */ + public abstract double runAsDouble(); + + /** A factory to construct {@link SearchScript} instances. */ + public interface LeafFactory { + SearchScript newInstance(LeafReaderContext ctx) throws IOException; + /** + * Indicates if document scores may be needed by this {@link SearchScript}. + * + * @return {@code true} if scores are needed. + */ + boolean needsScores(); + } + + /** A factory to construct stateful {@link SearchScript} factories for a specific index. */ + public interface Factory { + LeafFactory newFactory(Map params, SearchLookup lookup); + } + + /** The context used to compile {@link SearchScript} factories. */ + public static final ScriptContext CONTEXT = new ScriptContext<>("search", Factory.class); // TODO: remove aggs context when it has its own interface - ScriptContext AGGS_CONTEXT = new ScriptContext<>("aggs", Factory.class); + public static final ScriptContext AGGS_CONTEXT = new ScriptContext<>("aggs", Factory.class); } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/script/StoredScriptSource.java b/core/src/main/java/org/elasticsearch/script/StoredScriptSource.java index 7236e6eab39..4c71b05a508 100644 --- a/core/src/main/java/org/elasticsearch/script/StoredScriptSource.java +++ b/core/src/main/java/org/elasticsearch/script/StoredScriptSource.java @@ -70,9 +70,9 @@ public class StoredScriptSource extends AbstractDiffable imp public static final ParseField LANG_PARSE_FIELD = new ParseField("lang"); /** - * Standard {@link ParseField} for code on the inner level. + * Standard {@link ParseField} for source on the inner level. */ - public static final ParseField CODE_PARSE_FIELD = new ParseField("code"); + public static final ParseField SOURCE_PARSE_FIELD = new ParseField("source", "code"); /** * Standard {@link ParseField} for options on the inner level. @@ -85,7 +85,7 @@ public class StoredScriptSource extends AbstractDiffable imp */ private static final class Builder { private String lang; - private String code; + private String source; private Map options; private Builder() { @@ -99,19 +99,19 @@ public class StoredScriptSource extends AbstractDiffable imp /** * Since stored scripts can accept templates rather than just scripts, they must also be able - * to handle template parsing, hence the need for custom parsing code. Templates can + * to handle template parsing, hence the need for custom parsing source. Templates can * consist of either an {@link String} or a JSON object. If a JSON object is discovered * then the content type option must also be saved as a compiler option. */ - private void setCode(XContentParser parser) { + private void setSource(XContentParser parser) { try { if (parser.currentToken() == Token.START_OBJECT) { //this is really for search templates, that need to be converted to json format XContentBuilder builder = XContentFactory.jsonBuilder(); - code = builder.copyCurrentStructure(parser).string(); + source = builder.copyCurrentStructure(parser).string(); options.put(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()); } else { - code = parser.text(); + source = parser.text(); } } catch (IOException exception) { throw new UncheckedIOException(exception); @@ -136,17 +136,17 @@ public class StoredScriptSource extends AbstractDiffable imp throw new IllegalArgumentException("lang cannot be empty"); } - if (code == null) { - throw new IllegalArgumentException("must specify code for stored script"); - } else if (code.isEmpty()) { - throw new IllegalArgumentException("code cannot be empty"); + if (source == null) { + throw new IllegalArgumentException("must specify source for stored script"); + } else if (source.isEmpty()) { + throw new IllegalArgumentException("source cannot be empty"); } if (options.size() > 1 || options.size() == 1 && options.get(Script.CONTENT_TYPE_OPTION) == null) { throw new IllegalArgumentException("illegal compiler options [" + options + "] specified"); } - return new StoredScriptSource(lang, code, options); + return new StoredScriptSource(lang, source, options); } } @@ -155,7 +155,7 @@ public class StoredScriptSource extends AbstractDiffable imp static { // Defines the fields necessary to parse a Script as XContent using an ObjectParser. PARSER.declareString(Builder::setLang, LANG_PARSE_FIELD); - PARSER.declareField(Builder::setCode, parser -> parser, CODE_PARSE_FIELD, ValueType.OBJECT_OR_STRING); + PARSER.declareField(Builder::setSource, parser -> parser, SOURCE_PARSE_FIELD, ValueType.OBJECT_OR_STRING); PARSER.declareField(Builder::setOptions, XContentParser::mapStrings, OPTIONS_PARSE_FIELD, ValueType.OBJECT); } @@ -174,13 +174,13 @@ public class StoredScriptSource extends AbstractDiffable imp * the stored script namespaces. * * The complex script format using the new stored script namespace - * where lang and code are required but options is optional: + * where lang and source are required but options is optional: * * {@code * { * "script" : { * "lang" : "", - * "code" : "", + * "source" : "", * "options" : { * "option0" : "", * "option1" : "", @@ -195,7 +195,23 @@ public class StoredScriptSource extends AbstractDiffable imp * { * "script": { * "lang" : "painless", - * "code" : "return Math.log(doc.popularity) * params.multiplier" + * "source" : "return Math.log(doc.popularity) * params.multiplier" + * } + * } + * } + * + * The use of "source" may also be substituted with "code" for backcompat with 5.3 to 5.5 format. For example: + * + * {@code + * { + * "script" : { + * "lang" : "", + * "code" : "", + * "options" : { + * "option0" : "", + * "option1" : "", + * ... + * } * } * } * } @@ -219,7 +235,7 @@ public class StoredScriptSource extends AbstractDiffable imp * } * * Note that templates can be handled as both strings and complex JSON objects. - * Also templates may be part of the 'code' parameter in a script. The Parser + * Also templates may be part of the 'source' parameter in a script. The Parser * can handle this case as well. * * @param lang An optional parameter to allow for use of the deprecated stored @@ -267,7 +283,7 @@ public class StoredScriptSource extends AbstractDiffable imp } } else { - throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "], expected [{, ]"); + throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "], expected [{, ]"); } } else { if (lang == null) { @@ -306,7 +322,7 @@ public class StoredScriptSource extends AbstractDiffable imp * { * "script" : { * "lang" : "", - * "code" : "", + * "source" : "", * "options" : { * "option0" : "", * "option1" : "", @@ -316,7 +332,7 @@ public class StoredScriptSource extends AbstractDiffable imp * } * } * - * Note that the "code" parameter can also handle template parsing including from + * Note that the "source" parameter can also handle template parsing including from * a complex JSON object. */ public static StoredScriptSource fromXContent(XContentParser parser) throws IOException { @@ -333,66 +349,66 @@ public class StoredScriptSource extends AbstractDiffable imp } private final String lang; - private final String code; + private final String source; private final Map options; /** * Constructor for use with {@link GetStoredScriptResponse} * to support the deprecated stored script namespace. */ - public StoredScriptSource(String code) { + public StoredScriptSource(String source) { this.lang = null; - this.code = Objects.requireNonNull(code); + this.source = Objects.requireNonNull(source); this.options = null; } /** * Standard StoredScriptSource constructor. * @param lang The language to compile the script with. Must not be {@code null}. - * @param code The source code to compile with. Must not be {@code null}. + * @param source The source source to compile with. Must not be {@code null}. * @param options Compiler options to be compiled with. Must not be {@code null}, * use an empty {@link Map} to represent no options. */ - public StoredScriptSource(String lang, String code, Map options) { + public StoredScriptSource(String lang, String source, Map options) { this.lang = Objects.requireNonNull(lang); - this.code = Objects.requireNonNull(code); + this.source = Objects.requireNonNull(source); this.options = Collections.unmodifiableMap(Objects.requireNonNull(options)); } /** * Reads a {@link StoredScriptSource} from a stream. Version 5.3+ will read - * all of the lang, code, and options parameters. For versions prior to 5.3, - * only the code parameter will be read in as a bytes reference. + * all of the lang, source, and options parameters. For versions prior to 5.3, + * only the source parameter will be read in as a bytes reference. */ public StoredScriptSource(StreamInput in) throws IOException { if (in.getVersion().onOrAfter(Version.V_5_3_0)) { this.lang = in.readString(); - this.code = in.readString(); + this.source = in.readString(); @SuppressWarnings("unchecked") Map options = (Map)(Map)in.readMap(); this.options = options; } else { this.lang = null; - this.code = in.readBytesReference().utf8ToString(); + this.source = in.readBytesReference().utf8ToString(); this.options = null; } } /** * Writes a {@link StoredScriptSource} to a stream. Version 5.3+ will write - * all of the lang, code, and options parameters. For versions prior to 5.3, - * only the code parameter will be read in as a bytes reference. + * all of the lang, source, and options parameters. For versions prior to 5.3, + * only the source parameter will be read in as a bytes reference. */ @Override public void writeTo(StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(Version.V_5_3_0)) { out.writeString(lang); - out.writeString(code); + out.writeString(source); @SuppressWarnings("unchecked") Map options = (Map)(Map)this.options; out.writeMap(options); } else { - out.writeBytesReference(new BytesArray(code)); + out.writeBytesReference(new BytesArray(source)); } } @@ -403,7 +419,7 @@ public class StoredScriptSource extends AbstractDiffable imp * { * "script" : { * "lang" : "", - * "code" : "", + * "source" : "", * "options" : { * "option0" : "", * "option1" : "", @@ -413,13 +429,13 @@ public class StoredScriptSource extends AbstractDiffable imp * } * } * - * Note that the 'code' parameter can also handle templates written as complex JSON. + * Note that the 'source' parameter can also handle templates written as complex JSON. */ @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(LANG_PARSE_FIELD.getPreferredName(), lang); - builder.field(CODE_PARSE_FIELD.getPreferredName(), code); + builder.field(SOURCE_PARSE_FIELD.getPreferredName(), source); builder.field(OPTIONS_PARSE_FIELD.getPreferredName(), options); builder.endObject(); @@ -434,10 +450,10 @@ public class StoredScriptSource extends AbstractDiffable imp } /** - * @return The code used for compiling this script. + * @return The source used for compiling this script. */ - public String getCode() { - return code; + public String getSource() { + return source; } /** @@ -455,7 +471,7 @@ public class StoredScriptSource extends AbstractDiffable imp StoredScriptSource that = (StoredScriptSource)o; if (lang != null ? !lang.equals(that.lang) : that.lang != null) return false; - if (code != null ? !code.equals(that.code) : that.code != null) return false; + if (source != null ? !source.equals(that.source) : that.source != null) return false; return options != null ? options.equals(that.options) : that.options == null; } @@ -463,7 +479,7 @@ public class StoredScriptSource extends AbstractDiffable imp @Override public int hashCode() { int result = lang != null ? lang.hashCode() : 0; - result = 31 * result + (code != null ? code.hashCode() : 0); + result = 31 * result + (source != null ? source.hashCode() : 0); result = 31 * result + (options != null ? options.hashCode() : 0); return result; } @@ -472,7 +488,7 @@ public class StoredScriptSource extends AbstractDiffable imp public String toString() { return "StoredScriptSource{" + "lang='" + lang + '\'' + - ", code='" + code + '\'' + + ", source='" + source + '\'' + ", options=" + options + '}'; } diff --git a/core/src/main/java/org/elasticsearch/template/CompiledTemplate.java b/core/src/main/java/org/elasticsearch/script/TemplateScript.java similarity index 57% rename from core/src/main/java/org/elasticsearch/template/CompiledTemplate.java rename to core/src/main/java/org/elasticsearch/script/TemplateScript.java index d03993583c4..c053cf2b509 100644 --- a/core/src/main/java/org/elasticsearch/template/CompiledTemplate.java +++ b/core/src/main/java/org/elasticsearch/script/TemplateScript.java @@ -17,15 +17,33 @@ * under the License. */ -package org.elasticsearch.template; +package org.elasticsearch.script; import java.util.Map; /** - * A template that may be executed. + * A string template rendered as a script. */ -public interface CompiledTemplate { +public abstract class TemplateScript { + private final Map params; + + public TemplateScript(Map params) { + this.params = params; + } + + /** Return the parameters for this script. */ + public Map getParams() { + return params; + } + + public static final String[] PARAMETERS = {}; /** Run a template and return the resulting string, encoded in utf8 bytes. */ - String run(Map params); + public abstract String execute(); + + public interface Factory { + TemplateScript newInstance(Map params); + } + + public static final ScriptContext CONTEXT = new ScriptContext<>("template", Factory.class); } diff --git a/core/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index 45b89675454..8e0536adfb4 100644 --- a/core/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -114,6 +114,7 @@ final class DefaultSearchContext extends SearchContext { private SortAndFormats sort; private Float minimumScore; private boolean trackScores = false; // when sorting, track scores as well... + private boolean trackTotalHits = true; private FieldDoc searchAfter; private CollapseContext collapse; private boolean lowLevelCancellation; @@ -548,6 +549,17 @@ final class DefaultSearchContext extends SearchContext { return this.trackScores; } + @Override + public SearchContext trackTotalHits(boolean trackTotalHits) { + this.trackTotalHits = trackTotalHits; + return this; + } + + @Override + public boolean trackTotalHits() { + return trackTotalHits; + } + @Override public SearchContext searchAfter(FieldDoc searchAfter) { this.searchAfter = searchAfter; diff --git a/core/src/main/java/org/elasticsearch/search/SearchHit.java b/core/src/main/java/org/elasticsearch/search/SearchHit.java index 6172f974b14..81cba7d8db7 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/core/src/main/java/org/elasticsearch/search/SearchHit.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.shard.ShardId; @@ -65,8 +66,8 @@ import static org.elasticsearch.common.lucene.Lucene.writeExplanation; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName; import static org.elasticsearch.common.xcontent.XContentParserUtils.parseStoredFieldsValue; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; import static org.elasticsearch.search.fetch.subphase.highlight.HighlightField.readHighlightField; /** @@ -482,7 +483,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable, Void> MAP_PARSER = new ObjectParser<>("innerHitsParser", HashMap::new); + private static ObjectParser, Void> MAP_PARSER = new ObjectParser<>("innerHitParser", true, HashMap::new); static { declareInnerHitsParseFields(MAP_PARSER); @@ -614,7 +615,10 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable innerHits = new HashMap<>(); while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation); - innerHits.put(parser.currentName(), SearchHits.fromXContent(parser)); + String name = parser.currentName(); + ensureExpectedToken(Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + ensureFieldName(parser, parser.nextToken(), SearchHits.Fields.HITS); + innerHits.put(name, SearchHits.fromXContent(parser)); ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation); } return innerHits; @@ -649,7 +653,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable PARSER = new ConstructingObjectParser<>( - "nested_identity", + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("nested_identity", true, ctorArgs -> new NestedIdentity((String) ctorArgs[0], (int) ctorArgs[1], (NestedIdentity) ctorArgs[2])); static { PARSER.declareString(constructorArg(), new ParseField(FIELD)); diff --git a/core/src/main/java/org/elasticsearch/search/SearchHits.java b/core/src/main/java/org/elasticsearch/search/SearchHits.java index 0b49ba8ec12..650ac7ea804 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchHits.java +++ b/core/src/main/java/org/elasticsearch/search/SearchHits.java @@ -19,6 +19,7 @@ package org.elasticsearch.search; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -34,7 +35,6 @@ import java.util.List; import java.util.Objects; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; public final class SearchHits implements Streamable, ToXContent, Iterable { @@ -148,19 +148,21 @@ public final class SearchHits implements Streamable, ToXContent, Iterable= 0; + out.writeBoolean(hasTotalHits); + } else { + assert totalHits >= 0; + hasTotalHits = true; + } + if (hasTotalHits) { + out.writeVLong(totalHits); + } out.writeFloat(maxScore); out.writeVInt(hits.length); if (hits.length > 0) { diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index fea834c02ac..87d937c0995 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -52,7 +52,6 @@ import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.NestedQueryBuilder; -import org.elasticsearch.index.query.ParentIdQueryBuilder; import org.elasticsearch.index.query.PrefixQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; @@ -230,7 +229,6 @@ import org.elasticsearch.search.fetch.subphase.highlight.FastVectorHighlighter; import org.elasticsearch.search.fetch.subphase.highlight.HighlightPhase; import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; import org.elasticsearch.search.fetch.subphase.highlight.PlainHighlighter; -import org.elasticsearch.search.fetch.subphase.highlight.PostingsHighlighter; import org.elasticsearch.search.fetch.subphase.highlight.UnifiedHighlighter; import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.search.rescore.RescoreBuilder; @@ -575,7 +573,6 @@ public class SearchModule { NamedRegistry highlighters = new NamedRegistry<>("highlighter"); highlighters.register("fvh", new FastVectorHighlighter(settings)); highlighters.register("plain", new PlainHighlighter()); - highlighters.register("postings", new PostingsHighlighter()); highlighters.register("unified", new UnifiedHighlighter()); highlighters.extractAndRegister(plugins, SearchPlugin::getHighlighters); @@ -743,7 +740,6 @@ public class SearchModule { registerQuery(new QuerySpec<>(GeoPolygonQueryBuilder.NAME, GeoPolygonQueryBuilder::new, GeoPolygonQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(ExistsQueryBuilder.NAME, ExistsQueryBuilder::new, ExistsQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(MatchNoneQueryBuilder.NAME, MatchNoneQueryBuilder::new, MatchNoneQueryBuilder::fromXContent)); - registerQuery(new QuerySpec<>(ParentIdQueryBuilder.NAME, ParentIdQueryBuilder::new, ParentIdQueryBuilder::fromXContent)); if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { registerQuery(new QuerySpec<>(GeoShapeQueryBuilder.NAME, GeoShapeQueryBuilder::new, GeoShapeQueryBuilder::fromXContent)); diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index fef20f44f52..7e691e441b5 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -85,6 +85,7 @@ import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool.Cancellable; import org.elasticsearch.threadpool.ThreadPool.Names; +import org.elasticsearch.transport.TransportRequest; import java.io.IOException; import java.util.Collections; @@ -309,7 +310,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv } public ScrollQuerySearchResult executeQueryPhase(InternalScrollSearchRequest request, SearchTask task) { - final SearchContext context = findContext(request.id()); + final SearchContext context = findContext(request.id(), request); SearchOperationListener operationListener = context.indexShard().getSearchOperationListener(); context.incRef(); try { @@ -333,7 +334,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv } public QuerySearchResult executeQueryPhase(QuerySearchRequest request, SearchTask task) { - final SearchContext context = findContext(request.id()); + final SearchContext context = findContext(request.id(), request); context.setTask(task); IndexShard indexShard = context.indexShard(); SearchOperationListener operationListener = indexShard.getSearchOperationListener(); @@ -374,7 +375,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv } public ScrollQueryFetchSearchResult executeFetchPhase(InternalScrollSearchRequest request, SearchTask task) { - final SearchContext context = findContext(request.id()); + final SearchContext context = findContext(request.id(), request); context.incRef(); try { context.setTask(task); @@ -405,7 +406,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv } public FetchSearchResult executeFetchPhase(ShardFetchRequest request, SearchTask task) { - final SearchContext context = findContext(request.id()); + final SearchContext context = findContext(request.id(), request); final SearchOperationListener operationListener = context.indexShard().getSearchOperationListener(); context.incRef(); try { @@ -435,7 +436,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv } } - private SearchContext findContext(long id) throws SearchContextMissingException { + private SearchContext findContext(long id, TransportRequest request) throws SearchContextMissingException { SearchContext context = activeContexts.get(id); if (context == null) { throw new SearchContextMissingException(id); @@ -443,7 +444,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv SearchOperationListener operationListener = context.indexShard().getSearchOperationListener(); try { - operationListener.validateSearchContext(context); + operationListener.validateSearchContext(context, request); return context; } catch (Exception e) { processFailure(context, e); @@ -634,6 +635,10 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv } } context.trackScores(source.trackScores()); + if (source.trackTotalHits() == false && context.scrollContext() != null) { + throw new SearchContextException(context, "disabling [track_total_hits] is not allowed in a scroll context"); + } + context.trackTotalHits(source.trackTotalHits()); if (source.minScore() != null) { context.minimumScore(source.minScore()); } @@ -689,7 +694,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv if (source.scriptFields() != null) { for (org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField field : source.scriptFields()) { SearchScript.Factory factory = scriptService.compile(field.script(), SearchScript.CONTEXT); - SearchScript searchScript = factory.newInstance(field.script().getParams(), context.lookup()); + SearchScript.LeafFactory searchScript = factory.newFactory(field.script().getParams(), context.lookup()); context.scriptFields().add(new ScriptField(field.fieldName(), searchScript, field.ignoreFailure())); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java index a9af38ef6e0..18cc2ffc2f6 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java @@ -109,26 +109,30 @@ public abstract class AggregatorBase extends Aggregator { } /** - * Increment the number of bytes that have been allocated to service this request - * and potentially trigger a {@link CircuitBreakingException}. The number of bytes - * allocated is automatically decremented with the circuit breaker service on - * closure of this aggregator. + * Increment or decrement the number of bytes that have been allocated to service + * this request and potentially trigger a {@link CircuitBreakingException}. The + * number of bytes allocated is automatically decremented with the circuit breaker + * service on closure of this aggregator. + * If memory has been returned, decrement it without tripping the breaker. * For performance reasons subclasses should not call this millions of times * each with small increments and instead batch up into larger allocations. * - * @param bytesAllocated the number of additional bytes allocated + * @param bytes the number of bytes to register or negative to deregister the bytes * @return the cumulative size in bytes allocated by this aggregator to service this request */ - protected long addRequestCircuitBreakerBytes(long bytesAllocated) { - try { + protected long addRequestCircuitBreakerBytes(long bytes) { + // Only use the potential to circuit break if bytes are being incremented + if (bytes > 0) { this.breakerService .getBreaker(CircuitBreaker.REQUEST) - .addEstimateBytesAndMaybeBreak(bytesAllocated, ""); - this.requestBytesUsed += bytesAllocated; - return requestBytesUsed; - } catch (CircuitBreakingException cbe) { - throw cbe; - } + .addEstimateBytesAndMaybeBreak(bytes, ""); + } else { + this.breakerService + .getBreaker(CircuitBreaker.REQUEST) + .addWithoutBreaking(bytes); + } + this.requestBytesUsed += bytes; + return requestBytesUsed; } /** * Most aggregators don't need scores, make sure to extend this method if diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/BucketOrder.java b/core/src/main/java/org/elasticsearch/search/aggregations/BucketOrder.java index 482a90f0852..d1e57e12321 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/BucketOrder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/BucketOrder.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.search.aggregations; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -124,4 +125,9 @@ public abstract class BucketOrder implements ToXContentObject, Writeable { public void writeTo(StreamOutput out) throws IOException { InternalOrder.Streams.writeOrder(this, out); } + + @Override + public String toString() { + return Strings.toString(this); + } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java index 8f8ed143ca3..6732db9cecf 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java @@ -172,7 +172,7 @@ public class AdjacencyMatrixAggregator extends BucketsAggregator { // no need to provide deleted docs to the filter final Bits[] bits = new Bits[filters.length + totalNumIntersections]; for (int i = 0; i < filters.length; ++i) { - bits[i] = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), filters[i].scorer(ctx)); + bits[i] = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), filters[i].scorerSupplier(ctx)); } // Add extra Bits for intersections int pos = filters.length; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregator.java index 5313bdade80..46a9049711f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregator.java @@ -56,7 +56,7 @@ public class FilterAggregator extends SingleBucketAggregator { public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { // no need to provide deleted docs to the filter - final Bits bits = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), filter.scorer(ctx)); + final Bits bits = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), filter.scorerSupplier(ctx)); return new LeafBucketCollectorBase(sub, null) { @Override public void collect(int doc, long bucket) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java index a225d5f92f0..8e65ca4fbe3 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filters/FiltersAggregator.java @@ -144,7 +144,7 @@ public class FiltersAggregator extends BucketsAggregator { // no need to provide deleted docs to the filter final Bits[] bits = new Bits[filters.length]; for (int i = 0; i < filters.length; ++i) { - bits[i] = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), filters[i].scorer(ctx)); + bits[i] = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), filters[i].scorerSupplier(ctx)); } return new LeafBucketCollectorBase(sub, null) { @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java index 0592ccf7cfc..7be81c08120 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java @@ -33,6 +33,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; +import java.util.Arrays; import java.util.List; import java.util.Map; @@ -114,7 +115,7 @@ public class SamplerAggregator extends SingleBucketAggregator { return mode; } } - throw new IllegalArgumentException("Unknown `execution_hint`: [" + value + "], expected any of " + values()); + throw new IllegalArgumentException("Unknown `execution_hint`: [" + value + "], expected any of " + Arrays.toString(values())); } private final ParseField parseField; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/GlobalOrdinalsSignificantTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/GlobalOrdinalsSignificantTermsAggregator.java index 98effdcfd54..50f114b35b5 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/GlobalOrdinalsSignificantTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/GlobalOrdinalsSignificantTermsAggregator.java @@ -53,22 +53,28 @@ public class GlobalOrdinalsSignificantTermsAggregator extends GlobalOrdinalsStri protected final SignificantTermsAggregatorFactory termsAggFactory; private final SignificanceHeuristic significanceHeuristic; - public GlobalOrdinalsSignificantTermsAggregator(String name, AggregatorFactories factories, - ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, DocValueFormat format, - BucketCountThresholds bucketCountThresholds, IncludeExclude.OrdinalsFilter includeExclude, - SearchContext context, Aggregator parent, - SignificanceHeuristic significanceHeuristic, SignificantTermsAggregatorFactory termsAggFactory, - List pipelineAggregators, Map metaData) throws IOException { - + public GlobalOrdinalsSignificantTermsAggregator(String name, + AggregatorFactories factories, + ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, + DocValueFormat format, + BucketCountThresholds bucketCountThresholds, + IncludeExclude.OrdinalsFilter includeExclude, + SearchContext context, + Aggregator parent, + boolean forceRemapGlobalOrds, + SignificanceHeuristic significanceHeuristic, + SignificantTermsAggregatorFactory termsAggFactory, + List pipelineAggregators, + Map metaData) throws IOException { super(name, factories, valuesSource, null, format, bucketCountThresholds, includeExclude, context, parent, - SubAggCollectionMode.DEPTH_FIRST, false, pipelineAggregators, metaData); + forceRemapGlobalOrds, SubAggCollectionMode.DEPTH_FIRST, false, pipelineAggregators, metaData); this.significanceHeuristic = significanceHeuristic; this.termsAggFactory = termsAggFactory; + this.numCollectedDocs = 0; } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { return new LeafBucketCollectorBase(super.getLeafCollector(ctx, sub), null) { @Override public void collect(int doc, long bucket) throws IOException { @@ -78,18 +84,17 @@ public class GlobalOrdinalsSignificantTermsAggregator extends GlobalOrdinalsStri }; } - @Override public SignificantStringTerms buildAggregation(long owningBucketOrdinal) throws IOException { assert owningBucketOrdinal == 0; - if (globalOrds == null) { // no context in this reader + if (valueCount == 0) { // no context in this reader return buildEmptyAggregation(); } final int size; if (bucketCountThresholds.getMinDocCount() == 0) { // if minDocCount == 0 then we can end up with more buckets then maxBucketOrd() returns - size = (int) Math.min(globalOrds.getValueCount(), bucketCountThresholds.getShardSize()); + size = (int) Math.min(valueCount, bucketCountThresholds.getShardSize()); } else { size = (int) Math.min(maxBucketOrd(), bucketCountThresholds.getShardSize()); } @@ -98,7 +103,7 @@ public class GlobalOrdinalsSignificantTermsAggregator extends GlobalOrdinalsStri BucketSignificancePriorityQueue ordered = new BucketSignificancePriorityQueue<>(size); SignificantStringTerms.Bucket spare = null; - for (long globalTermOrd = 0; globalTermOrd < globalOrds.getValueCount(); ++globalTermOrd) { + for (long globalTermOrd = 0; globalTermOrd < valueCount; ++globalTermOrd) { if (includeExclude != null && !acceptedGlobalOrdinals.get(globalTermOrd)) { continue; } @@ -115,7 +120,7 @@ public class GlobalOrdinalsSignificantTermsAggregator extends GlobalOrdinalsStri spare = new SignificantStringTerms.Bucket(new BytesRef(), 0, 0, 0, 0, null, format); } spare.bucketOrd = bucketOrd; - copy(globalOrds.lookupOrd(globalTermOrd), spare.termBytes); + copy(lookupGlobalOrd.apply(globalTermOrd), spare.termBytes); spare.subsetDf = bucketDocCount; spare.subsetSize = subsetSize; spare.supersetDf = termsAggFactory.getBackgroundFrequency(spare.termBytes); @@ -148,63 +153,13 @@ public class GlobalOrdinalsSignificantTermsAggregator extends GlobalOrdinalsStri IndexReader topReader = searcher.getIndexReader(); int supersetSize = topReader.numDocs(); return new SignificantStringTerms(name, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getMinDocCount(), - pipelineAggregators(), metaData(), format, 0, supersetSize, significanceHeuristic, emptyList()); + pipelineAggregators(), metaData(), format, numCollectedDocs, supersetSize, significanceHeuristic, emptyList()); } @Override protected void doClose() { + super.doClose(); Releasables.close(termsAggFactory); } - - public static class WithHash extends GlobalOrdinalsSignificantTermsAggregator { - - private final LongHash bucketOrds; - - public WithHash(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals.FieldData valuesSource, - DocValueFormat format, BucketCountThresholds bucketCountThresholds, IncludeExclude.OrdinalsFilter includeExclude, - SearchContext context, Aggregator parent, SignificanceHeuristic significanceHeuristic, - SignificantTermsAggregatorFactory termsAggFactory, List pipelineAggregators, - Map metaData) throws IOException { - super(name, factories, valuesSource, format, bucketCountThresholds, includeExclude, context, parent, significanceHeuristic, - termsAggFactory, pipelineAggregators, metaData); - bucketOrds = new LongHash(1, context.bigArrays()); - } - - @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { - return new LeafBucketCollectorBase(super.getLeafCollector(ctx, sub), null) { - @Override - public void collect(int doc, long bucket) throws IOException { - assert bucket == 0; - numCollectedDocs++; - if (globalOrds.advanceExact(doc)) { - for (long globalOrd = globalOrds.nextOrd(); - globalOrd != SortedSetDocValues.NO_MORE_ORDS; - globalOrd = globalOrds.nextOrd()) { - long bucketOrd = bucketOrds.add(globalOrd); - if (bucketOrd < 0) { - bucketOrd = -1 - bucketOrd; - collectExistingBucket(sub, doc, bucketOrd); - } else { - collectBucket(sub, doc, bucketOrd); - } - } - } - } - }; - } - - @Override - protected long getBucketOrd(long termOrd) { - return bucketOrds.find(termOrd); - } - - @Override - protected void doClose() { - Releasables.close(termsAggFactory, bucketOrds); - } - } - } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/InternalMappedSignificantTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/InternalMappedSignificantTerms.java index da346a8a1e4..e1bd4defd3e 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/InternalMappedSignificantTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/InternalMappedSignificantTerms.java @@ -128,6 +128,7 @@ public abstract class InternalMappedSignificantTerms< @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.field(CommonFields.DOC_COUNT.getPreferredName(), subsetSize); + builder.field(BG_COUNT, supersetSize); builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (Bucket bucket : buckets) { //There is a condition (presumably when only one shard has a bucket?) where reduce is not called diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantLongTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantLongTerms.java index 2d512632f50..9592d80c776 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantLongTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantLongTerms.java @@ -39,9 +39,7 @@ public class ParsedSignificantLongTerms extends ParsedSignificantTerms { } public static ParsedSignificantLongTerms fromXContent(XContentParser parser, String name) throws IOException { - ParsedSignificantLongTerms aggregation = PARSER.parse(parser, null); - aggregation.setName(name); - return aggregation; + return parseSignificantTermsXContent(() -> PARSER.parse(parser, null), name); } public static class ParsedBucket extends ParsedSignificantTerms.ParsedBucket { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantStringTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantStringTerms.java index fb1c7728e0c..008a5a28e5d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantStringTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantStringTerms.java @@ -40,9 +40,7 @@ public class ParsedSignificantStringTerms extends ParsedSignificantTerms { } public static ParsedSignificantStringTerms fromXContent(XContentParser parser, String name) throws IOException { - ParsedSignificantStringTerms aggregation = PARSER.parse(parser, null); - aggregation.setName(name); - return aggregation; + return parseSignificantTermsXContent(() -> PARSER.parse(parser, null), name); } public static class ParsedBucket extends ParsedSignificantTerms.ParsedBucket { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantTerms.java index 56be0aa6071..8991ca09932 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantTerms.java @@ -21,6 +21,8 @@ package org.elasticsearch.search.aggregations.bucket.significant; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.CheckedSupplier; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -42,11 +44,16 @@ public abstract class ParsedSignificantTerms extends ParsedMultiBucketAggregatio private Map bucketMap; protected long subsetSize; + protected long supersetSize; protected long getSubsetSize() { return subsetSize; } + protected long getSupersetSize() { + return supersetSize; + } + @Override public List getBuckets() { return buckets; @@ -68,6 +75,7 @@ public abstract class ParsedSignificantTerms extends ParsedMultiBucketAggregatio @Override protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.field(CommonFields.DOC_COUNT.getPreferredName(), subsetSize); + builder.field(InternalMappedSignificantTerms.BG_COUNT, supersetSize); builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (SignificantTerms.Bucket bucket : buckets) { bucket.toXContent(builder, params); @@ -76,16 +84,31 @@ public abstract class ParsedSignificantTerms extends ParsedMultiBucketAggregatio return builder; } + static T parseSignificantTermsXContent(final CheckedSupplier aggregationSupplier, + final String name) throws IOException { + T aggregation = aggregationSupplier.get(); + aggregation.setName(name); + for (ParsedBucket bucket : aggregation.buckets) { + bucket.subsetSize = aggregation.subsetSize; + bucket.supersetSize = aggregation.supersetSize; + } + return aggregation; + } + static void declareParsedSignificantTermsFields(final ObjectParser objectParser, final CheckedFunction bucketParser) { declareMultiBucketAggregationFields(objectParser, bucketParser::apply, bucketParser::apply); objectParser.declareLong((parsedTerms, value) -> parsedTerms.subsetSize = value , CommonFields.DOC_COUNT); + objectParser.declareLong((parsedTerms, value) -> parsedTerms.supersetSize = value , + new ParseField(InternalMappedSignificantTerms.BG_COUNT)); } public abstract static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements SignificantTerms.Bucket { protected long subsetDf; + protected long subsetSize; protected long supersetDf; + protected long supersetSize; protected double score; @Override @@ -110,12 +133,12 @@ public abstract class ParsedSignificantTerms extends ParsedMultiBucketAggregatio @Override public long getSupersetSize() { - throw new UnsupportedOperationException(); + return supersetSize; } @Override public long getSubsetSize() { - throw new UnsupportedOperationException(); + return subsetSize; } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTerms.java index 8c0da8b890e..61cb4a9ca0a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTerms.java @@ -29,17 +29,39 @@ public interface SignificantTerms extends MultiBucketsAggregation, Iterable config, IncludeExclude includeExclude, - String executionHint, QueryBuilder filterBuilder, TermsAggregator.BucketCountThresholds bucketCountThresholds, - SignificanceHeuristic significanceHeuristic, SearchContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { + public SignificantTermsAggregatorFactory(String name, + ValuesSourceConfig config, + IncludeExclude includeExclude, + String executionHint, + QueryBuilder filterBuilder, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + SignificanceHeuristic significanceHeuristic, + SearchContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); this.includeExclude = includeExclude; this.executionHint = executionHint; @@ -246,44 +254,71 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac MAP(new ParseField("map")) { @Override - Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, DocValueFormat format, - TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, - SearchContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic, - SignificantTermsAggregatorFactory termsAggregatorFactory, List pipelineAggregators, - Map metaData) throws IOException { + Aggregator create(String name, + AggregatorFactories factories, + ValuesSource valuesSource, + DocValueFormat format, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + SearchContext aggregationContext, + Aggregator parent, + SignificanceHeuristic significanceHeuristic, + SignificantTermsAggregatorFactory termsAggregatorFactory, + List pipelineAggregators, + Map metaData) throws IOException { + final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(format); return new SignificantStringTermsAggregator(name, factories, valuesSource, format, bucketCountThresholds, filter, aggregationContext, parent, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData); + } }, GLOBAL_ORDINALS(new ParseField("global_ordinals")) { @Override - Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, DocValueFormat format, - TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, - SearchContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic, - SignificantTermsAggregatorFactory termsAggregatorFactory, List pipelineAggregators, - Map metaData) throws IOException { + Aggregator create(String name, + AggregatorFactories factories, + ValuesSource valuesSource, + DocValueFormat format, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + SearchContext aggregationContext, + Aggregator parent, + SignificanceHeuristic significanceHeuristic, + SignificantTermsAggregatorFactory termsAggregatorFactory, + List pipelineAggregators, + Map metaData) throws IOException { + final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format); return new GlobalOrdinalsSignificantTermsAggregator(name, factories, (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, format, bucketCountThresholds, filter, - aggregationContext, parent, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData); + aggregationContext, parent, false, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData); + } }, GLOBAL_ORDINALS_HASH(new ParseField("global_ordinals_hash")) { @Override - Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, DocValueFormat format, - TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, - SearchContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic, - SignificantTermsAggregatorFactory termsAggregatorFactory, List pipelineAggregators, - Map metaData) throws IOException { + Aggregator create(String name, + AggregatorFactories factories, + ValuesSource valuesSource, + DocValueFormat format, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + SearchContext aggregationContext, + Aggregator parent, + SignificanceHeuristic significanceHeuristic, + SignificantTermsAggregatorFactory termsAggregatorFactory, + List pipelineAggregators, + Map metaData) throws IOException { + final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format); - return new GlobalOrdinalsSignificantTermsAggregator.WithHash(name, factories, - (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, format, bucketCountThresholds, filter, - aggregationContext, parent, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData); + return new GlobalOrdinalsSignificantTermsAggregator(name, factories, + (ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, format, bucketCountThresholds, filter, aggregationContext, parent, + true, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData); + } }; @@ -293,7 +328,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac return mode; } } - throw new IllegalArgumentException("Unknown `execution_hint`: [" + value + "], expected any of " + values()); + throw new IllegalArgumentException("Unknown `execution_hint`: [" + value + "], expected any of " + Arrays.toString(values())); } private final ParseField parseField; @@ -302,11 +337,18 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac this.parseField = parseField; } - abstract Aggregator create(String name, AggregatorFactories factories, ValuesSource valuesSource, DocValueFormat format, - TermsAggregator.BucketCountThresholds bucketCountThresholds, IncludeExclude includeExclude, - SearchContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic, - SignificantTermsAggregatorFactory termsAggregatorFactory, List pipelineAggregators, - Map metaData) throws IOException; + abstract Aggregator create(String name, + AggregatorFactories factories, + ValuesSource valuesSource, + DocValueFormat format, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + SearchContext aggregationContext, + Aggregator parent, + SignificanceHeuristic significanceHeuristic, + SignificantTermsAggregatorFactory termsAggregatorFactory, + List pipelineAggregators, + Map metaData) throws IOException; @Override public String toString() { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregator.java index c7539a4ca02..8b58678b676 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregator.java @@ -113,45 +113,40 @@ public class SignificantTextAggregator extends BucketsAggregator { } } - private void processTokenStream(int doc, long bucket, TokenStream ts, String fieldText) throws IOException{ + private void processTokenStream(int doc, long bucket, TokenStream ts, BytesRefHash inDocTerms, String fieldText) + throws IOException{ if (dupSequenceSpotter != null) { ts = new DeDuplicatingTokenFilter(ts, dupSequenceSpotter); } CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class); ts.reset(); try { - //Assume tokens will average 5 bytes in length to size number of tokens - BytesRefHash inDocTerms = new BytesRefHash(1+(fieldText.length()/5), context.bigArrays()); - - try{ - while (ts.incrementToken()) { - if (dupSequenceSpotter != null) { - long newTrieSize = dupSequenceSpotter.getEstimatedSizeInBytes(); - long growth = newTrieSize - lastTrieSize; - // Only update the circuitbreaker after - if (growth > MEMORY_GROWTH_REPORTING_INTERVAL_BYTES) { - addRequestCircuitBreakerBytes(growth); - lastTrieSize = newTrieSize; - } + while (ts.incrementToken()) { + if (dupSequenceSpotter != null) { + long newTrieSize = dupSequenceSpotter.getEstimatedSizeInBytes(); + long growth = newTrieSize - lastTrieSize; + // Only update the circuitbreaker after + if (growth > MEMORY_GROWTH_REPORTING_INTERVAL_BYTES) { + addRequestCircuitBreakerBytes(growth); + lastTrieSize = newTrieSize; } - previous.clear(); - previous.copyChars(termAtt); - BytesRef bytes = previous.get(); - if (inDocTerms.add(bytes) >= 0) { - if (includeExclude == null || includeExclude.accept(bytes)) { - long bucketOrdinal = bucketOrds.add(bytes); - if (bucketOrdinal < 0) { // already seen - bucketOrdinal = -1 - bucketOrdinal; - collectExistingBucket(sub, doc, bucketOrdinal); - } else { - collectBucket(sub, doc, bucketOrdinal); - } + } + previous.clear(); + previous.copyChars(termAtt); + BytesRef bytes = previous.get(); + if (inDocTerms.add(bytes) >= 0) { + if (includeExclude == null || includeExclude.accept(bytes)) { + long bucketOrdinal = bucketOrds.add(bytes); + if (bucketOrdinal < 0) { // already seen + bucketOrdinal = -1 - bucketOrdinal; + collectExistingBucket(sub, doc, bucketOrdinal); + } else { + collectBucket(sub, doc, bucketOrdinal); } } } - } finally{ - Releasables.close(inDocTerms); } + } finally{ ts.close(); } @@ -166,23 +161,28 @@ public class SignificantTextAggregator extends BucketsAggregator { SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument(ctx, doc); + BytesRefHash inDocTerms = new BytesRefHash(256, context.bigArrays()); - for (String sourceField : sourceFieldNames) { - List textsToHighlight = sourceLookup.extractRawValues(sourceField); - textsToHighlight = textsToHighlight.stream().map(obj -> { - if (obj instanceof BytesRef) { - return fieldType.valueForDisplay(obj).toString(); - } else { - return obj; - } - }).collect(Collectors.toList()); - - Analyzer analyzer = fieldType.indexAnalyzer(); - for (Object fieldValue : textsToHighlight) { - String fieldText = fieldValue.toString(); - TokenStream ts = analyzer.tokenStream(indexedFieldName, fieldText); - processTokenStream(doc, bucket, ts, fieldText); - } + try { + for (String sourceField : sourceFieldNames) { + List textsToHighlight = sourceLookup.extractRawValues(sourceField); + textsToHighlight = textsToHighlight.stream().map(obj -> { + if (obj instanceof BytesRef) { + return fieldType.valueForDisplay(obj).toString(); + } else { + return obj; + } + }).collect(Collectors.toList()); + + Analyzer analyzer = fieldType.indexAnalyzer(); + for (Object fieldValue : textsToHighlight) { + String fieldText = fieldValue.toString(); + TokenStream ts = analyzer.tokenStream(indexedFieldName, fieldText); + processTokenStream(doc, bucket, ts, inDocTerms, fieldText); + } + } + } finally{ + Releasables.close(inDocTerms); } } }; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java index e1fb72288bb..99f3193b564 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/heuristics/ScriptHeuristic.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; @@ -97,8 +98,9 @@ public class ScriptHeuristic extends SignificanceHeuristic { @Override public SignificanceHeuristic rewrite(SearchContext context) { - return new ExecutableScriptHeuristic(script, - context.getQueryShardContext().getExecutableScript(script, ExecutableScript.AGGS_CONTEXT)); + QueryShardContext shardContext = context.getQueryShardContext(); + ExecutableScript.Factory compiledScript = shardContext.getScriptService().compile(script, ExecutableScript.AGGS_CONTEXT); + return new ExecutableScriptHeuristic(script, compiledScript.newInstance(script.getParams())); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsAggregator.java index 0ae42abd9a4..098f523e514 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsAggregator.java @@ -25,9 +25,9 @@ import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; import org.elasticsearch.search.internal.SearchContext; @@ -39,7 +39,7 @@ import java.util.stream.Collectors; public class DoubleTermsAggregator extends LongTermsAggregator { - public DoubleTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, DocValueFormat format, + DoubleTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, DocValueFormat format, BucketOrder order, BucketCountThresholds bucketCountThresholds, SearchContext aggregationContext, Aggregator parent, SubAggCollectionMode collectionMode, boolean showTermDocCountError, IncludeExclude.LongFilter longFilter, List pipelineAggregators, Map metaData) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 33bbc370c6e..d9c60da7acc 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedSetDocValues; @@ -52,6 +53,8 @@ import java.util.Arrays; import java.util.List; import java.util.Map; +import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; + /** * An aggregator of string values that relies on global ordinals in order to build buckets. */ @@ -66,67 +69,104 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr // first defined one. // So currently for each instance of this aggregator the acceptedglobalValues will be computed, this is unnecessary // especially if this agg is on a second layer or deeper. - protected LongBitSet acceptedGlobalOrdinals; + protected final LongBitSet acceptedGlobalOrdinals; + protected final long valueCount; + protected final GlobalOrdLookupFunction lookupGlobalOrd; - protected SortedSetDocValues globalOrds; + private final LongHash bucketOrds; - public GlobalOrdinalsStringTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource, - BucketOrder order, DocValueFormat format, BucketCountThresholds bucketCountThresholds, - IncludeExclude.OrdinalsFilter includeExclude, SearchContext context, Aggregator parent, - SubAggCollectionMode collectionMode, boolean showTermDocCountError, List pipelineAggregators, - Map metaData) throws IOException { + public interface GlobalOrdLookupFunction { + BytesRef apply(long ord) throws IOException; + } + + public GlobalOrdinalsStringTermsAggregator(String name, AggregatorFactories factories, + ValuesSource.Bytes.WithOrdinals valuesSource, + BucketOrder order, + DocValueFormat format, + BucketCountThresholds bucketCountThresholds, + IncludeExclude.OrdinalsFilter includeExclude, + SearchContext context, + Aggregator parent, + boolean forceRemapGlobalOrds, + SubAggCollectionMode collectionMode, + boolean showTermDocCountError, + List pipelineAggregators, + Map metaData) throws IOException { super(name, factories, context, parent, order, format, bucketCountThresholds, collectionMode, showTermDocCountError, pipelineAggregators, metaData); this.valuesSource = valuesSource; this.includeExclude = includeExclude; + final IndexReader reader = context.searcher().getIndexReader(); + final SortedSetDocValues values = reader.leaves().size() > 0 ? + valuesSource.globalOrdinalsValues(context.searcher().getIndexReader().leaves().get(0)) : DocValues.emptySortedSet(); + this.valueCount = values.getValueCount(); + this.lookupGlobalOrd = values::lookupOrd; + this.acceptedGlobalOrdinals = includeExclude != null ? includeExclude.acceptedGlobalOrdinals(values) : null; + + /** + * Remap global ords to dense bucket ordinals if any sub-aggregator cannot be deferred. + * Sub-aggregators expect dense buckets and allocate memories based on this assumption. + * Deferred aggregators are safe because the selected ordinals are remapped when the buckets + * are replayed. + */ + boolean remapGlobalOrds = forceRemapGlobalOrds || Arrays.stream(subAggregators).anyMatch((a) -> shouldDefer(a) == false); + this.bucketOrds = remapGlobalOrds ? new LongHash(1, context.bigArrays()) : null; } - protected long getBucketOrd(long termOrd) { - return termOrd; + + boolean remapGlobalOrds() { + return bucketOrds != null; + } + + protected final long getBucketOrd(long globalOrd) { + return bucketOrds == null ? globalOrd : bucketOrds.find(globalOrd); + } + + private void collectGlobalOrd(int doc, long globalOrd, LeafBucketCollector sub) throws IOException { + if (bucketOrds == null) { + collectExistingBucket(sub, doc, globalOrd); + } else { + long bucketOrd = bucketOrds.add(globalOrd); + if (bucketOrd < 0) { + bucketOrd = -1 - bucketOrd; + collectExistingBucket(sub, doc, bucketOrd); + } else { + collectBucket(sub, doc, bucketOrd); + } + } + } + + private SortedSetDocValues getGlobalOrds(LeafReaderContext ctx) throws IOException { + return acceptedGlobalOrdinals == null ? + valuesSource.globalOrdinalsValues(ctx) : new FilteredOrdinals(valuesSource.globalOrdinalsValues(ctx), acceptedGlobalOrdinals); } @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { - - globalOrds = valuesSource.globalOrdinalsValues(ctx); - - if (acceptedGlobalOrdinals == null && includeExclude != null) { - acceptedGlobalOrdinals = includeExclude.acceptedGlobalOrdinals(globalOrds); + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { + final SortedSetDocValues globalOrds = getGlobalOrds(ctx); + if (bucketOrds == null) { + grow(globalOrds.getValueCount()); } - - if (acceptedGlobalOrdinals != null) { - globalOrds = new FilteredOrdinals(globalOrds, acceptedGlobalOrdinals); - } - - return newCollector(globalOrds, sub); - } - - protected LeafBucketCollector newCollector(final SortedSetDocValues ords, - final LeafBucketCollector sub) { - grow(ords.getValueCount()); - final SortedDocValues singleValues = DocValues.unwrapSingleton(ords); + final SortedDocValues singleValues = DocValues.unwrapSingleton(globalOrds); if (singleValues != null) { - return new LeafBucketCollectorBase(sub, ords) { + return new LeafBucketCollectorBase(sub, globalOrds) { @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; if (singleValues.advanceExact(doc)) { final int ord = singleValues.ordValue(); - collectExistingBucket(sub, doc, ord); + collectGlobalOrd(doc, ord, sub); } } }; } else { - return new LeafBucketCollectorBase(sub, ords) { + return new LeafBucketCollectorBase(sub, globalOrds) { @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; - if (ords.advanceExact(doc)) { - for (long globalOrd = ords.nextOrd(); - globalOrd != SortedSetDocValues.NO_MORE_ORDS; - globalOrd = ords.nextOrd()) { - collectExistingBucket(sub, doc, globalOrd); + if (globalOrds.advanceExact(doc)) { + for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + collectGlobalOrd(doc, globalOrd, sub); } } } @@ -145,21 +185,21 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException { - if (globalOrds == null) { // no context in this reader + if (valueCount == 0) { // no context in this reader return buildEmptyAggregation(); } final int size; if (bucketCountThresholds.getMinDocCount() == 0) { // if minDocCount == 0 then we can end up with more buckets then maxBucketOrd() returns - size = (int) Math.min(globalOrds.getValueCount(), bucketCountThresholds.getShardSize()); + size = (int) Math.min(valueCount, bucketCountThresholds.getShardSize()); } else { size = (int) Math.min(maxBucketOrd(), bucketCountThresholds.getShardSize()); } long otherDocCount = 0; BucketPriorityQueue ordered = new BucketPriorityQueue<>(size, order.comparator(this)); OrdBucket spare = new OrdBucket(-1, 0, null, showTermDocCountError, 0); - for (long globalTermOrd = 0; globalTermOrd < globalOrds.getValueCount(); ++globalTermOrd) { + for (long globalTermOrd = 0; globalTermOrd < valueCount; ++globalTermOrd) { if (includeExclude != null && !acceptedGlobalOrdinals.get(globalTermOrd)) { continue; } @@ -184,10 +224,10 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr final StringTerms.Bucket[] list = new StringTerms.Bucket[ordered.size()]; long survivingBucketOrds[] = new long[ordered.size()]; for (int i = ordered.size() - 1; i >= 0; --i) { - final OrdBucket bucket = (OrdBucket) ordered.pop(); + final OrdBucket bucket = ordered.pop(); survivingBucketOrds[i] = bucket.bucketOrd; BytesRef scratch = new BytesRef(); - copy(globalOrds.lookupOrd(bucket.globalOrd), scratch); + copy(lookupGlobalOrd.apply(bucket.globalOrd), scratch); list[i] = new StringTerms.Bucket(scratch, bucket.docCount, null, showTermDocCountError, 0, format); list[i].bucketOrd = bucket.bucketOrd; otherDocCount -= list[i].docCount; @@ -254,76 +294,9 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr } } - /** - * Variant of {@link GlobalOrdinalsStringTermsAggregator} that rebases hashes in order to make them dense. Might be - * useful in case few hashes are visited. - */ - public static class WithHash extends GlobalOrdinalsStringTermsAggregator { - - private final LongHash bucketOrds; - - public WithHash(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource, BucketOrder order, - DocValueFormat format, BucketCountThresholds bucketCountThresholds, IncludeExclude.OrdinalsFilter includeExclude, - SearchContext context, Aggregator parent, SubAggCollectionMode collectionMode, - boolean showTermDocCountError, List pipelineAggregators, Map metaData) - throws IOException { - super(name, factories, valuesSource, order, format, bucketCountThresholds, includeExclude, context, parent, collectionMode, - showTermDocCountError, pipelineAggregators, metaData); - bucketOrds = new LongHash(1, context.bigArrays()); - } - - @Override - protected LeafBucketCollector newCollector(final SortedSetDocValues ords, - final LeafBucketCollector sub) { - final SortedDocValues singleValues = DocValues.unwrapSingleton(ords); - if (singleValues != null) { - return new LeafBucketCollectorBase(sub, ords) { - @Override - public void collect(int doc, long bucket) throws IOException { - if (singleValues.advanceExact(doc)) { - final int globalOrd = singleValues.ordValue(); - long bucketOrd = bucketOrds.add(globalOrd); - if (bucketOrd < 0) { - bucketOrd = -1 - bucketOrd; - collectExistingBucket(sub, doc, bucketOrd); - } else { - collectBucket(sub, doc, bucketOrd); - } - } - } - }; - } else { - return new LeafBucketCollectorBase(sub, ords) { - @Override - public void collect(int doc, long bucket) throws IOException { - if (ords.advanceExact(doc)) { - for (long globalOrd = ords.nextOrd(); - globalOrd != SortedSetDocValues.NO_MORE_ORDS; - globalOrd = ords.nextOrd()) { - long bucketOrd = bucketOrds.add(globalOrd); - if (bucketOrd < 0) { - bucketOrd = -1 - bucketOrd; - collectExistingBucket(sub, doc, bucketOrd); - } else { - collectBucket(sub, doc, bucketOrd); - } - } - } - } - }; - } - } - - @Override - protected long getBucketOrd(long termOrd) { - return bucketOrds.find(termOrd); - } - - @Override - protected void doClose() { - Releasables.close(bucketOrds); - } - + @Override + protected void doClose() { + Releasables.close(bucketOrds); } /** @@ -331,32 +304,44 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr * instead of on the fly for each match.This is beneficial for low cardinality fields, because it can reduce * the amount of look-ups significantly. */ - public static class LowCardinality extends GlobalOrdinalsStringTermsAggregator { + static class LowCardinality extends GlobalOrdinalsStringTermsAggregator { private IntArray segmentDocCounts; - + private SortedSetDocValues globalOrds; private SortedSetDocValues segmentOrds; - public LowCardinality(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource, - BucketOrder order, DocValueFormat format, - BucketCountThresholds bucketCountThresholds, SearchContext context, Aggregator parent, - SubAggCollectionMode collectionMode, boolean showTermDocCountError, List pipelineAggregators, - Map metaData) throws IOException { - super(name, factories, valuesSource, order, format, bucketCountThresholds, null, context, parent, collectionMode, - showTermDocCountError, pipelineAggregators, metaData); + LowCardinality(String name, + AggregatorFactories factories, + ValuesSource.Bytes.WithOrdinals valuesSource, + BucketOrder order, + DocValueFormat format, + BucketCountThresholds bucketCountThresholds, + SearchContext context, + Aggregator parent, + boolean forceDenseMode, + SubAggCollectionMode collectionMode, + boolean showTermDocCountError, + List pipelineAggregators, + Map metaData) throws IOException { + super(name, factories, valuesSource, order, format, bucketCountThresholds, null, + context, parent, forceDenseMode, collectionMode, showTermDocCountError, pipelineAggregators, metaData); assert factories == null || factories.countAggregators() == 0; this.segmentDocCounts = context.bigArrays().newIntArray(1, true); } - // bucketOrd is ord + 1 to avoid a branch to deal with the missing ord @Override - protected LeafBucketCollector newCollector(final SortedSetDocValues ords, - LeafBucketCollector sub) { - segmentDocCounts = context.bigArrays().grow(segmentDocCounts, 1 + ords.getValueCount()); + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, + final LeafBucketCollector sub) throws IOException { + if (segmentOrds != null) { + mapSegmentCountsToGlobalCounts(); + } + globalOrds = valuesSource.globalOrdinalsValues(ctx); + segmentOrds = valuesSource.ordinalsValues(ctx); + segmentDocCounts = context.bigArrays().grow(segmentDocCounts, 1 + segmentOrds.getValueCount()); assert sub == LeafBucketCollector.NO_OP_COLLECTOR; - final SortedDocValues singleValues = DocValues.unwrapSingleton(ords); + final SortedDocValues singleValues = DocValues.unwrapSingleton(segmentOrds); if (singleValues != null) { - return new LeafBucketCollectorBase(sub, ords) { + return new LeafBucketCollectorBase(sub, segmentOrds) { @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; @@ -367,14 +352,12 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr } }; } else { - return new LeafBucketCollectorBase(sub, ords) { + return new LeafBucketCollectorBase(sub, segmentOrds) { @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; - if (ords.advanceExact(doc)) { - for (long segmentOrd = ords.nextOrd(); - segmentOrd != SortedSetDocValues.NO_MORE_ORDS; - segmentOrd = ords.nextOrd()) { + if (segmentOrds.advanceExact(doc)) { + for (long segmentOrd = segmentOrds.nextOrd(); segmentOrd != NO_MORE_ORDS; segmentOrd = segmentOrds.nextOrd()) { segmentDocCounts.increment(segmentOrd + 1, 1); } } @@ -383,18 +366,6 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr } } - @Override - public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, - final LeafBucketCollector sub) throws IOException { - if (segmentOrds != null) { - mapSegmentCountsToGlobalCounts(); - } - - globalOrds = valuesSource.globalOrdinalsValues(ctx); - segmentOrds = valuesSource.ordinalsValues(ctx); - return newCollector(segmentOrds, sub); - } - @Override protected void doPostCollection() { if (segmentOrds != null) { @@ -426,7 +397,8 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr } final long ord = i - 1; // remember we do +1 when counting final long globalOrd = mapping == null ? ord : mapping.getGlobalOrd(ord); - incrementBucketDocCount(globalOrd, inc); + long bucketOrd = getBucketOrd(globalOrd); + incrementBucketDocCount(bucketOrd, inc); } } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java index 6161f7912a8..064850c4413 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java @@ -27,14 +27,14 @@ import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalOrder; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.bucket.terms.support.BucketPriorityQueue; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.BucketOrder; -import org.elasticsearch.search.aggregations.InternalOrder; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; @@ -153,7 +153,7 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator { final StringTerms.Bucket[] list = new StringTerms.Bucket[ordered.size()]; long survivingBucketOrds[] = new long[ordered.size()]; for (int i = ordered.size() - 1; i >= 0; --i) { - final StringTerms.Bucket bucket = (StringTerms.Bucket) ordered.pop(); + final StringTerms.Bucket bucket = ordered.pop(); survivingBucketOrds[i] = bucket.bucketOrd; list[i] = bucket; otherDocCount -= bucket.docCount; @@ -163,7 +163,7 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator { // Now build the aggs for (int i = 0; i < list.length; i++) { - final StringTerms.Bucket bucket = (StringTerms.Bucket)list[i]; + final StringTerms.Bucket bucket = list[i]; bucket.termBytes = BytesRef.deepCopyOf(bucket.termBytes); bucket.aggregations = bucketAggregations(bucket.bucketOrd); bucket.docCountError = 0; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java index 9a06dfe66f5..086bbd9f022 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java @@ -27,20 +27,21 @@ import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalOrder; import org.elasticsearch.search.aggregations.NonCollectingAggregator; import org.elasticsearch.search.aggregations.bucket.BucketUtils; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.BucketOrder; -import org.elasticsearch.search.aggregations.InternalOrder; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; +import java.util.Arrays; import java.util.List; import java.util.Map; @@ -53,10 +54,18 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory config, BucketOrder order, - IncludeExclude includeExclude, String executionHint, SubAggCollectionMode collectMode, - TermsAggregator.BucketCountThresholds bucketCountThresholds, boolean showTermDocCountError, SearchContext context, - AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { + TermsAggregatorFactory(String name, + ValuesSourceConfig config, + BucketOrder order, + IncludeExclude includeExclude, + String executionHint, + SubAggCollectionMode collectMode, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + boolean showTermDocCountError, + SearchContext context, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); this.order = order; this.includeExclude = includeExclude; @@ -225,14 +234,24 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData) - throws IOException { + Aggregator create(String name, + AggregatorFactories factories, + ValuesSource valuesSource, + BucketOrder order, + DocValueFormat format, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + SearchContext context, + Aggregator parent, + SubAggCollectionMode subAggCollectMode, + boolean showTermDocCountError, + List pipelineAggregators, + Map metaData) throws IOException { + final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(format); return new StringTermsAggregator(name, factories, valuesSource, order, format, bucketCountThresholds, filter, context, parent, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData); + } @Override @@ -244,15 +263,24 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData) - throws IOException { + Aggregator create(String name, + AggregatorFactories factories, + ValuesSource valuesSource, + BucketOrder order, + DocValueFormat format, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + SearchContext context, Aggregator parent, + SubAggCollectionMode subAggCollectMode, + boolean showTermDocCountError, + List pipelineAggregators, + Map metaData) throws IOException { + final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format); return new GlobalOrdinalsStringTermsAggregator(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource, order, - format, bucketCountThresholds, filter, context, parent, subAggCollectMode, showTermDocCountError, + format, bucketCountThresholds, filter, context, parent, false, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData); + } @Override @@ -264,15 +292,25 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData) - throws IOException { + Aggregator create(String name, + AggregatorFactories factories, + ValuesSource valuesSource, + BucketOrder order, + DocValueFormat format, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + SearchContext context, + Aggregator parent, + SubAggCollectionMode subAggCollectMode, + boolean showTermDocCountError, + List pipelineAggregators, + Map metaData) throws IOException { + final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format); - return new GlobalOrdinalsStringTermsAggregator.WithHash(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource, - order, format, bucketCountThresholds, filter, context, parent, subAggCollectMode, showTermDocCountError, - pipelineAggregators, metaData); + return new GlobalOrdinalsStringTermsAggregator(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource, + order, format, bucketCountThresholds, filter, context, parent, true, subAggCollectMode, + showTermDocCountError, pipelineAggregators, metaData); + } @Override @@ -283,21 +321,31 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData) - throws IOException { + Aggregator create(String name, + AggregatorFactories factories, + ValuesSource valuesSource, + BucketOrder order, + DocValueFormat format, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + SearchContext context, + Aggregator parent, + SubAggCollectionMode subAggCollectMode, + boolean showTermDocCountError, + List pipelineAggregators, + Map metaData) throws IOException { + if (includeExclude != null || factories.countAggregators() > 0 - // we need the FieldData impl to be able to extract the - // segment to global ord mapping + // we need the FieldData impl to be able to extract the + // segment to global ord mapping || valuesSource.getClass() != ValuesSource.Bytes.FieldData.class) { return GLOBAL_ORDINALS.create(name, factories, valuesSource, order, format, bucketCountThresholds, includeExclude, context, parent, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData); } return new GlobalOrdinalsStringTermsAggregator.LowCardinality(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource, order, format, bucketCountThresholds, context, parent, - subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData); + false, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData); + } @Override @@ -312,7 +360,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData) - throws IOException; + abstract Aggregator create(String name, + AggregatorFactories factories, + ValuesSource valuesSource, + BucketOrder order, + DocValueFormat format, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + IncludeExclude includeExclude, + SearchContext context, + Aggregator parent, + SubAggCollectionMode subAggCollectMode, + boolean showTermDocCountError, + List pipelineAggregators, + Map metaData) throws IOException; abstract boolean needsGlobalOrdinals(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java index 46e371a3dfe..dd0785b2d70 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java @@ -233,16 +233,14 @@ public class IncludeExclude implements Writeable, ToXContent { } public abstract static class OrdinalsFilter { - public abstract LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) - throws IOException; + public abstract LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) throws IOException; } class PartitionedOrdinalsFilter extends OrdinalsFilter { @Override - public LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) - throws IOException { + public LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) throws IOException { final long numOrds = globalOrdinals.getValueCount(); final LongBitSet acceptedGlobalOrdinals = new LongBitSet(numOrds); final TermsEnum termEnum = globalOrdinals.termsEnum(); @@ -271,8 +269,7 @@ public class IncludeExclude implements Writeable, ToXContent { * */ @Override - public LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) - throws IOException { + public LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) throws IOException { LongBitSet acceptedGlobalOrdinals = new LongBitSet(globalOrdinals.getValueCount()); TermsEnum globalTermsEnum; Terms globalTerms = new DocValuesTerms(globalOrdinals); @@ -297,8 +294,7 @@ public class IncludeExclude implements Writeable, ToXContent { } @Override - public LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) - throws IOException { + public LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) throws IOException { LongBitSet acceptedGlobalOrdinals = new LongBitSet(globalOrdinals.getValueCount()); if (includeValues != null) { for (BytesRef term : includeValues) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java index 34c06ec3c55..4753884e437 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java @@ -184,22 +184,21 @@ public class ScriptedMetricAggregationBuilder extends AbstractAggregationBuilder Builder subfactoriesBuilder) throws IOException { QueryShardContext queryShardContext = context.getQueryShardContext(); - Function, ExecutableScript> executableInitScript; + ExecutableScript.Factory executableInitScript; if (initScript != null) { - executableInitScript = queryShardContext.getLazyExecutableScript(initScript, ExecutableScript.AGGS_CONTEXT); + executableInitScript = queryShardContext.getScriptService().compile(initScript, ExecutableScript.AGGS_CONTEXT); } else { - executableInitScript = (p) -> null; + executableInitScript = p -> null; } - Function, SearchScript> searchMapScript = - queryShardContext.getLazySearchScript(mapScript, SearchScript.AGGS_CONTEXT); - Function, ExecutableScript> executableCombineScript; + SearchScript.Factory searchMapScript = queryShardContext.getScriptService().compile(mapScript, SearchScript.AGGS_CONTEXT); + ExecutableScript.Factory executableCombineScript; if (combineScript != null) { - executableCombineScript = queryShardContext.getLazyExecutableScript(combineScript, ExecutableScript.AGGS_CONTEXT); + executableCombineScript =queryShardContext.getScriptService().compile(combineScript, ExecutableScript.AGGS_CONTEXT); } else { - executableCombineScript = (p) -> null; + executableCombineScript = p -> null; } return new ScriptedMetricAggregatorFactory(name, searchMapScript, executableInitScript, executableCombineScript, reduceScript, - params, context, parent, subfactoriesBuilder, metaData); + params, queryShardContext.lookup(), context, parent, subfactoriesBuilder, metaData); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java index cee7b3402f3..bebe9f892b6 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.metrics.scripted; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.script.ExecutableScript; -import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.Aggregator; @@ -38,12 +37,12 @@ import java.util.Map; public class ScriptedMetricAggregator extends MetricsAggregator { - private final SearchScript mapScript; + private final SearchScript.LeafFactory mapScript; private final ExecutableScript combineScript; private final Script reduceScript; private Map params; - protected ScriptedMetricAggregator(String name, SearchScript mapScript, ExecutableScript combineScript, + protected ScriptedMetricAggregator(String name, SearchScript.LeafFactory mapScript, ExecutableScript combineScript, Script reduceScript, Map params, SearchContext context, Aggregator parent, List pipelineAggregators, Map metaData) throws IOException { @@ -62,7 +61,7 @@ public class ScriptedMetricAggregator extends MetricsAggregator { @Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { - final LeafSearchScript leafMapScript = mapScript.getLeafSearchScript(ctx); + final SearchScript leafMapScript = mapScript.newInstance(ctx); return new LeafBucketCollectorBase(sub, leafMapScript) { @Override public void collect(int doc, long bucket) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java index bac2becc8e4..2d9e02d08cb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java @@ -28,6 +28,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; import java.util.ArrayList; @@ -38,21 +39,23 @@ import java.util.function.Function; public class ScriptedMetricAggregatorFactory extends AggregatorFactory { - private final Function, SearchScript> mapScript; - private final Function, ExecutableScript> combineScript; + private final SearchScript.Factory mapScript; + private final ExecutableScript.Factory combineScript; private final Script reduceScript; private final Map params; - private final Function, ExecutableScript> initScript; + private final SearchLookup lookup; + private final ExecutableScript.Factory initScript; - public ScriptedMetricAggregatorFactory(String name, Function, SearchScript> mapScript, - Function, ExecutableScript> initScript, Function, ExecutableScript> combineScript, - Script reduceScript, Map params, SearchContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactories, Map metaData) throws IOException { + public ScriptedMetricAggregatorFactory(String name, SearchScript.Factory mapScript, ExecutableScript.Factory initScript, + ExecutableScript.Factory combineScript, Script reduceScript, Map params, + SearchLookup lookup, SearchContext context, AggregatorFactory parent, + AggregatorFactories.Builder subFactories, Map metaData) throws IOException { super(name, context, parent, subFactories, metaData); this.mapScript = mapScript; this.initScript = initScript; this.combineScript = combineScript; this.reduceScript = reduceScript; + this.lookup = lookup; this.params = params; } @@ -70,9 +73,9 @@ public class ScriptedMetricAggregatorFactory extends AggregatorFactory()); } - final ExecutableScript initScript = this.initScript.apply(params); - final SearchScript mapScript = this.mapScript.apply(params); - final ExecutableScript combineScript = this.combineScript.apply(params); + final ExecutableScript initScript = this.initScript.newInstance(params); + final SearchScript.LeafFactory mapScript = this.mapScript.newFactory(params, lookup); + final ExecutableScript combineScript = this.combineScript.newInstance(params); final Script reduceScript = deepCopyScript(this.reduceScript, context); if (initScript != null) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java index 94467add511..2739427cfe9 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.SearchScript; @@ -533,8 +534,9 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder fields = new ArrayList<>(); if (scriptFields != null) { for (ScriptField field : scriptFields) { - SearchScript searchScript = context.getQueryShardContext().getSearchScript(field.script(), - SearchScript.CONTEXT); + QueryShardContext shardContext = context.getQueryShardContext(); + SearchScript.Factory factory = shardContext.getScriptService().compile(field.script(), SearchScript.CONTEXT); + SearchScript.LeafFactory searchScript = factory.newFactory(field.script().getParams(), shardContext.lookup()); fields.add(new org.elasticsearch.search.fetch.subphase.ScriptFieldsContext.ScriptField( field.fieldName(), searchScript, field.ignoreFailure())); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java index 49422995c95..196f7cca473 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/movavg/MovAvgPipelineAggregator.java @@ -161,7 +161,7 @@ public class MovAvgPipelineAggregator extends PipelineAggregator { }).collect(Collectors.toList()); aggs.add(new InternalSimpleValue(name(), predictions[i], formatter, new ArrayList(), metaData())); - Bucket newBucket = factory.createBucket(newKey, 0, new InternalAggregations(aggs)); + Bucket newBucket = factory.createBucket(newKey, bucket.getDocCount(), new InternalAggregations(aggs)); // Overwrite the existing bucket with the new version newBuckets.set(lastValidPosition + i + 1, newBucket); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java index a8aaa259408..5fca34beff2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java @@ -40,7 +40,6 @@ import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.SortingBinaryDocValues; import org.elasticsearch.index.fielddata.SortingNumericDoubleValues; -import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.support.ValuesSource.WithScript.BytesValues; import org.elasticsearch.search.aggregations.support.values.ScriptBytesValues; @@ -162,15 +161,15 @@ public abstract class ValuesSource { public static class Script extends Bytes { - private final SearchScript script; + private final SearchScript.LeafFactory script; - public Script(SearchScript script) { + public Script(SearchScript.LeafFactory script) { this.script = script; } @Override public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException { - return new ScriptBytesValues(script.getLeafSearchScript(context)); + return new ScriptBytesValues(script.newInstance(context)); } @Override @@ -233,9 +232,9 @@ public abstract class ValuesSource { public static class WithScript extends Numeric { private final Numeric delegate; - private final SearchScript script; + private final SearchScript.LeafFactory script; - public WithScript(Numeric delegate, SearchScript script) { + public WithScript(Numeric delegate, SearchScript.LeafFactory script) { this.delegate = delegate; this.script = script; } @@ -252,25 +251,25 @@ public abstract class ValuesSource { @Override public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException { - return new ValuesSource.WithScript.BytesValues(delegate.bytesValues(context), script.getLeafSearchScript(context)); + return new ValuesSource.WithScript.BytesValues(delegate.bytesValues(context), script.newInstance(context)); } @Override public SortedNumericDocValues longValues(LeafReaderContext context) throws IOException { - return new LongValues(delegate.longValues(context), script.getLeafSearchScript(context)); + return new LongValues(delegate.longValues(context), script.newInstance(context)); } @Override public SortedNumericDoubleValues doubleValues(LeafReaderContext context) throws IOException { - return new DoubleValues(delegate.doubleValues(context), script.getLeafSearchScript(context)); + return new DoubleValues(delegate.doubleValues(context), script.newInstance(context)); } static class LongValues extends AbstractSortingNumericDocValues implements ScorerAware { private final SortedNumericDocValues longValues; - private final LeafSearchScript script; + private final SearchScript script; - LongValues(SortedNumericDocValues values, LeafSearchScript script) { + LongValues(SortedNumericDocValues values, SearchScript script) { this.longValues = values; this.script = script; } @@ -299,9 +298,9 @@ public abstract class ValuesSource { static class DoubleValues extends SortingNumericDoubleValues implements ScorerAware { private final SortedNumericDoubleValues doubleValues; - private final LeafSearchScript script; + private final SearchScript script; - DoubleValues(SortedNumericDoubleValues values, LeafSearchScript script) { + DoubleValues(SortedNumericDoubleValues values, SearchScript script) { this.doubleValues = values; this.script = script; } @@ -358,10 +357,10 @@ public abstract class ValuesSource { } public static class Script extends Numeric { - private final SearchScript script; + private final SearchScript.LeafFactory script; private final ValueType scriptValueType; - public Script(SearchScript script, ValueType scriptValueType) { + public Script(SearchScript.LeafFactory script, ValueType scriptValueType) { this.script = script; this.scriptValueType = scriptValueType; } @@ -373,17 +372,17 @@ public abstract class ValuesSource { @Override public SortedNumericDocValues longValues(LeafReaderContext context) throws IOException { - return new ScriptLongValues(script.getLeafSearchScript(context)); + return new ScriptLongValues(script.newInstance(context)); } @Override public SortedNumericDoubleValues doubleValues(LeafReaderContext context) throws IOException { - return new ScriptDoubleValues(script.getLeafSearchScript(context)); + return new ScriptDoubleValues(script.newInstance(context)); } @Override public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException { - return new ScriptBytesValues(script.getLeafSearchScript(context)); + return new ScriptBytesValues(script.newInstance(context)); } @Override @@ -398,9 +397,9 @@ public abstract class ValuesSource { public static class WithScript extends Bytes { private final ValuesSource delegate; - private final SearchScript script; + private final SearchScript.LeafFactory script; - public WithScript(ValuesSource delegate, SearchScript script) { + public WithScript(ValuesSource delegate, SearchScript.LeafFactory script) { this.delegate = delegate; this.script = script; } @@ -412,15 +411,15 @@ public abstract class ValuesSource { @Override public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException { - return new BytesValues(delegate.bytesValues(context), script.getLeafSearchScript(context)); + return new BytesValues(delegate.bytesValues(context), script.newInstance(context)); } static class BytesValues extends SortingBinaryDocValues implements ScorerAware { private final SortedBinaryDocValues bytesValues; - private final LeafSearchScript script; + private final SearchScript script; - BytesValues(SortedBinaryDocValues bytesValues, LeafSearchScript script) { + BytesValues(SortedBinaryDocValues bytesValues, SearchScript script) { this.bytesValues = bytesValues; this.script = script; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java index 6404cae0e4c..d8c2167c16c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java @@ -116,11 +116,12 @@ public class ValuesSourceConfig { return config; } - private static SearchScript createScript(Script script, QueryShardContext context) { + private static SearchScript.LeafFactory createScript(Script script, QueryShardContext context) { if (script == null) { return null; } else { - return context.getSearchScript(script, SearchScript.AGGS_CONTEXT); + SearchScript.Factory factory = context.getScriptService().compile(script, SearchScript.AGGS_CONTEXT); + return factory.newFactory(script.getParams(), context.lookup()); } } @@ -137,7 +138,7 @@ public class ValuesSourceConfig { private final ValuesSourceType valueSourceType; private FieldContext fieldContext; - private SearchScript script; + private SearchScript.LeafFactory script; private ValueType scriptValueType; private boolean unmapped = false; private DocValueFormat format = DocValueFormat.RAW; @@ -156,7 +157,7 @@ public class ValuesSourceConfig { return fieldContext; } - public SearchScript script() { + public SearchScript.LeafFactory script() { return script; } @@ -173,7 +174,7 @@ public class ValuesSourceConfig { return this; } - public ValuesSourceConfig script(SearchScript script) { + public ValuesSourceConfig script(SearchScript.LeafFactory script) { this.script = script; return this; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptBytesValues.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptBytesValues.java index 78685ed0e82..38950325daa 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptBytesValues.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptBytesValues.java @@ -22,7 +22,7 @@ import org.apache.lucene.search.Scorer; import org.elasticsearch.common.lucene.ScorerAware; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortingBinaryDocValues; -import org.elasticsearch.script.LeafSearchScript; +import org.elasticsearch.script.SearchScript; import java.io.IOException; import java.lang.reflect.Array; @@ -33,9 +33,9 @@ import java.util.Collection; */ public class ScriptBytesValues extends SortingBinaryDocValues implements ScorerAware { - private final LeafSearchScript script; + private final SearchScript script; - public ScriptBytesValues(LeafSearchScript script) { + public ScriptBytesValues(SearchScript script) { super(); this.script = script; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptDoubleValues.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptDoubleValues.java index 619ffde0a1e..ac3c8f682ba 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptDoubleValues.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptDoubleValues.java @@ -21,7 +21,7 @@ package org.elasticsearch.search.aggregations.support.values; import org.apache.lucene.search.Scorer; import org.elasticsearch.common.lucene.ScorerAware; import org.elasticsearch.index.fielddata.SortingNumericDoubleValues; -import org.elasticsearch.script.LeafSearchScript; +import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.joda.time.ReadableInstant; @@ -34,9 +34,9 @@ import java.util.Collection; */ public class ScriptDoubleValues extends SortingNumericDoubleValues implements ScorerAware { - final LeafSearchScript script; + final SearchScript script; - public ScriptDoubleValues(LeafSearchScript script) { + public ScriptDoubleValues(SearchScript script) { super(); this.script = script; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java index 6247e96c7ec..818a9d9fd8d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java @@ -22,7 +22,7 @@ import org.apache.lucene.search.Scorer; import org.apache.lucene.util.LongValues; import org.elasticsearch.common.lucene.ScorerAware; import org.elasticsearch.index.fielddata.AbstractSortingNumericDocValues; -import org.elasticsearch.script.LeafSearchScript; +import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.joda.time.ReadableInstant; @@ -36,9 +36,9 @@ import java.util.Iterator; */ public class ScriptLongValues extends AbstractSortingNumericDocValues implements ScorerAware { - final LeafSearchScript script; + final SearchScript script; - public ScriptLongValues(LeafSearchScript script) { + public ScriptLongValues(SearchScript script) { super(); this.script = script; } diff --git a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 684350076b8..be39ce3698c 100644 --- a/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -92,6 +92,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ public static final ParseField IGNORE_FAILURE_FIELD = new ParseField("ignore_failure"); public static final ParseField SORT_FIELD = new ParseField("sort"); public static final ParseField TRACK_SCORES_FIELD = new ParseField("track_scores"); + public static final ParseField TRACK_TOTAL_HITS_FIELD = new ParseField("track_total_hits"); public static final ParseField INDICES_BOOST_FIELD = new ParseField("indices_boost"); public static final ParseField AGGREGATIONS_FIELD = new ParseField("aggregations"); public static final ParseField AGGS_FIELD = new ParseField("aggs"); @@ -142,6 +143,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ private boolean trackScores = false; + private boolean trackTotalHits = true; + private SearchAfterBuilder searchAfterBuilder; private SliceBuilder sliceBuilder; @@ -224,6 +227,11 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ if (in.getVersion().onOrAfter(Version.V_5_3_0)) { collapse = in.readOptionalWriteable(CollapseBuilder::new); } + if (in.getVersion().onOrAfter(Version.V_6_0_0_alpha3)) { + trackTotalHits = in.readBoolean(); + } else { + trackTotalHits = true; + } } @Override @@ -275,6 +283,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ if (out.getVersion().onOrAfter(Version.V_5_3_0)) { out.writeOptionalWriteable(collapse); } + if (out.getVersion().onOrAfter(Version.V_6_0_0_alpha3)) { + out.writeBoolean(trackTotalHits); + } } /** @@ -489,6 +500,17 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ return trackScores; } + /** + * Indicates if the total hit count for the query should be tracked. + */ + public boolean trackTotalHits() { + return trackTotalHits; + } + + public SearchSourceBuilder trackTotalHits(boolean trackTotalHits) { + this.trackTotalHits = trackTotalHits; + return this; + } /** * The sort values that indicates which docs this request should "search after". @@ -926,6 +948,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ rewrittenBuilder.terminateAfter = terminateAfter; rewrittenBuilder.timeout = timeout; rewrittenBuilder.trackScores = trackScores; + rewrittenBuilder.trackTotalHits = trackTotalHits; rewrittenBuilder.version = version; rewrittenBuilder.collapse = collapse; return rewrittenBuilder; @@ -964,6 +987,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ explain = parser.booleanValue(); } else if (TRACK_SCORES_FIELD.match(currentFieldName)) { trackScores = parser.booleanValue(); + } else if (TRACK_TOTAL_HITS_FIELD.match(currentFieldName)) { + trackTotalHits = parser.booleanValue(); } else if (_SOURCE_FIELD.match(currentFieldName)) { fetchSourceContext = FetchSourceContext.fromXContent(context.parser()); } else if (STORED_FIELDS_FIELD.match(currentFieldName)) { @@ -1174,6 +1199,10 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ builder.field(TRACK_SCORES_FIELD.getPreferredName(), true); } + if (trackTotalHits == false) { + builder.field(TRACK_TOTAL_HITS_FIELD.getPreferredName(), false); + } + if (searchAfterBuilder != null) { builder.array(SEARCH_AFTER.getPreferredName(), searchAfterBuilder.getSortValues()); } @@ -1433,7 +1462,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldsContext, from, highlightBuilder, indexBoosts, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, size, sorts, searchAfterBuilder, sliceBuilder, stats, suggestBuilder, terminateAfter, timeout, trackScores, version, - profile, extBuilders, collapse); + profile, extBuilders, collapse, trackTotalHits); } @Override @@ -1470,6 +1499,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ && Objects.equals(version, other.version) && Objects.equals(profile, other.profile) && Objects.equals(extBuilders, other.extBuilders) - && Objects.equals(collapse, other.collapse); + && Objects.equals(collapse, other.collapse) + && Objects.equals(trackTotalHits, other.trackTotalHits); } } diff --git a/core/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java b/core/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java index cb1587cd7d9..9e329da9b00 100644 --- a/core/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java +++ b/core/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java @@ -56,7 +56,7 @@ public class CollapseContext { return innerHits; } - public CollapsingTopDocsCollector createTopDocs(Sort sort, int topN, boolean trackMaxScore) throws IOException { + public CollapsingTopDocsCollector createTopDocs(Sort sort, int topN, boolean trackMaxScore) { if (fieldType instanceof KeywordFieldMapper.KeywordFieldType) { return CollapsingTopDocsCollector.createKeyword(fieldType.name(), sort, topN, trackMaxScore); } else if (fieldType instanceof NumberFieldMapper.NumberFieldType) { diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java index e38090ee4d8..c28e07ff455 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java @@ -22,7 +22,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.Query; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; import org.elasticsearch.ExceptionsHelper; @@ -78,8 +78,8 @@ public final class MatchedQueriesFetchSubPhase implements FetchSubPhase { LeafReaderContext ctx = indexReader.leaves().get(readerIndex); docBase = ctx.docBase; // scorers can be costly to create, so reuse them across docs of the same segment - Scorer scorer = weight.scorer(ctx); - matchingDocs = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), scorer); + ScorerSupplier scorerSupplier = weight.scorerSupplier(ctx); + matchingDocs = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), scorerSupplier); } if (matchingDocs.get(hit.docId() - docBase)) { matchedQueries[i].add(name); diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsContext.java index 79bacd7f938..9e43b0bdd32 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsContext.java @@ -28,10 +28,10 @@ public class ScriptFieldsContext { public static class ScriptField { private final String name; - private final SearchScript script; + private final SearchScript.LeafFactory script; private final boolean ignoreException; - public ScriptField(String name, SearchScript script, boolean ignoreException) { + public ScriptField(String name, SearchScript.LeafFactory script, boolean ignoreException) { this.name = name; this.script = script; this.ignoreException = ignoreException; @@ -41,7 +41,7 @@ public class ScriptFieldsContext { return name; } - public SearchScript script() { + public SearchScript.LeafFactory script() { return this.script; } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java index 6bed20e6b3e..61c1c802de8 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsFetchSubPhase.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.search.fetch.subphase; -import org.elasticsearch.script.LeafSearchScript; +import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; @@ -40,9 +40,9 @@ public final class ScriptFieldsFetchSubPhase implements FetchSubPhase { for (ScriptFieldsContext.ScriptField scriptField : context.scriptFields().fields()) { /* Because this is called once per document we end up creating new ScriptDocValues for every document which is important because * the values inside ScriptDocValues might be reused for different documents (Dates do this). */ - LeafSearchScript leafScript; + SearchScript leafScript; try { - leafScript = scriptField.script().getLeafSearchScript(hitContext.readerContext()); + leafScript = scriptField.script().newInstance(hitContext.readerContext()); } catch (IOException e1) { throw new IllegalStateException("Failed to load script", e1); } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java index e5db6639ad8..e6e50cc37e6 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java @@ -262,8 +262,8 @@ public abstract class AbstractHighlighterBuilderplain, fvh and postings. - * The default option selected is dependent on the mappings defined for your index. + * are unified, plain and fvj. + * Defaults to unified. * Details of the different highlighter types are covered in the reference guide. */ @SuppressWarnings("unchecked") diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java index b1d557e851a..c08eea2e588 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FastVectorHighlighter.java @@ -50,7 +50,6 @@ import java.util.Locale; import java.util.Map; public class FastVectorHighlighter implements Highlighter { - private static final BoundaryScanner DEFAULT_SIMPLE_BOUNDARY_SCANNER = new SimpleBoundaryScanner(); private static final BoundaryScanner DEFAULT_SENTENCE_BOUNDARY_SCANNER = new BreakIteratorBoundaryScanner(BreakIterator.getSentenceInstance(Locale.ROOT)); diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index 701b981e0f0..6b9121b8f7b 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -39,8 +39,6 @@ import java.util.List; import java.util.Map; public class HighlightPhase extends AbstractComponent implements FetchSubPhase { - private static final List STANDARD_HIGHLIGHTERS_BY_PRECEDENCE = Arrays.asList("fvh", "postings", "plain"); - private final Map highlighters; public HighlightPhase(Settings settings, Map highlighters) { @@ -94,13 +92,7 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { } String highlighterType = field.fieldOptions().highlighterType(); if (highlighterType == null) { - for(String highlighterCandidate : STANDARD_HIGHLIGHTERS_BY_PRECEDENCE) { - if (highlighters.get(highlighterCandidate).canHighlight(fieldMapper)) { - highlighterType = highlighterCandidate; - break; - } - } - assert highlighterType != null; + highlighterType = "unified"; } Highlighter highlighter = highlighters.get(highlighterType); if (highlighter == null) { diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java index 4a6e991b9a3..b241a686a24 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java @@ -35,7 +35,7 @@ import static java.util.Collections.singleton; public final class HighlightUtils { - //U+2029 PARAGRAPH SEPARATOR (PS): each value holds a discrete passage for highlighting (postings highlighter) + //U+2029 PARAGRAPH SEPARATOR (PS): each value holds a discrete passage for highlighting (unified highlighter) public static final char PARAGRAPH_SEPARATOR = 8233; public static final char NULL_SEPARATOR = '\u0000'; diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java index deb1464b703..c7943367d31 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java @@ -49,7 +49,6 @@ import java.util.List; import java.util.Map; public class PlainHighlighter implements Highlighter { - private static final String CACHE_KEY = "highlight-plain"; @Override diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PostingsHighlighter.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PostingsHighlighter.java deleted file mode 100644 index 34997912feb..00000000000 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PostingsHighlighter.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.fetch.subphase.highlight; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.highlight.Encoder; -import org.apache.lucene.search.postingshighlight.CustomPassageFormatter; -import org.apache.lucene.search.postingshighlight.CustomPostingsHighlighter; -import org.apache.lucene.search.postingshighlight.CustomSeparatorBreakIterator; -import org.apache.lucene.search.highlight.Snippet; -import org.apache.lucene.util.CollectionUtil; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.text.Text; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.search.fetch.FetchPhaseExecutionException; -import org.elasticsearch.search.fetch.FetchSubPhase; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.fetch.subphase.highlight.HighlightUtils.Encoders; - -import java.io.IOException; -import java.text.BreakIterator; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; - -public class PostingsHighlighter implements Highlighter { - - private static final String CACHE_KEY = "highlight-postings"; - - @Override - public HighlightField highlight(HighlighterContext highlighterContext) { - - FieldMapper fieldMapper = highlighterContext.mapper; - SearchContextHighlight.Field field = highlighterContext.field; - if (canHighlight(fieldMapper) == false) { - throw new IllegalArgumentException("the field [" + highlighterContext.fieldName - + "] should be indexed with positions and offsets in the postings list to be used with postings highlighter"); - } - - SearchContext context = highlighterContext.context; - FetchSubPhase.HitContext hitContext = highlighterContext.hitContext; - - if (!hitContext.cache().containsKey(CACHE_KEY)) { - hitContext.cache().put(CACHE_KEY, new HighlighterEntry()); - } - - HighlighterEntry highlighterEntry = (HighlighterEntry) hitContext.cache().get(CACHE_KEY); - MapperHighlighterEntry mapperHighlighterEntry = highlighterEntry.mappers.get(fieldMapper); - - if (mapperHighlighterEntry == null) { - Encoder encoder = field.fieldOptions().encoder().equals("html") ? Encoders.HTML : Encoders.DEFAULT; - CustomPassageFormatter passageFormatter = new CustomPassageFormatter( - field.fieldOptions().preTags()[0], field.fieldOptions().postTags()[0], encoder); - mapperHighlighterEntry = new MapperHighlighterEntry(passageFormatter); - } - - List snippets = new ArrayList<>(); - int numberOfFragments; - try { - Analyzer analyzer = context.mapperService().documentMapper(hitContext.hit().getType()).mappers().indexAnalyzer(); - List fieldValues = HighlightUtils.loadFieldValues(field, fieldMapper, context, hitContext); - CustomPostingsHighlighter highlighter; - if (field.fieldOptions().numberOfFragments() == 0) { - //we use a control char to separate values, which is the only char that the custom break iterator breaks the text on, - //so we don't lose the distinction between the different values of a field and we get back a snippet per value - String fieldValue = mergeFieldValues(fieldValues, HighlightUtils.NULL_SEPARATOR); - CustomSeparatorBreakIterator breakIterator = new CustomSeparatorBreakIterator(HighlightUtils.NULL_SEPARATOR); - highlighter = new CustomPostingsHighlighter(analyzer, mapperHighlighterEntry.passageFormatter, breakIterator, - fieldValue, field.fieldOptions().noMatchSize() > 0); - numberOfFragments = fieldValues.size(); //we are highlighting the whole content, one snippet per value - } else { - //using paragraph separator we make sure that each field value holds a discrete passage for highlighting - String fieldValue = mergeFieldValues(fieldValues, HighlightUtils.PARAGRAPH_SEPARATOR); - highlighter = new CustomPostingsHighlighter(analyzer, mapperHighlighterEntry.passageFormatter, - fieldValue, field.fieldOptions().noMatchSize() > 0); - numberOfFragments = field.fieldOptions().numberOfFragments(); - } - - IndexSearcher searcher = new IndexSearcher(hitContext.reader()); - Snippet[] fieldSnippets = highlighter.highlightField(fieldMapper.fieldType().name(), highlighterContext.query, searcher, - hitContext.docId(), numberOfFragments); - for (Snippet fieldSnippet : fieldSnippets) { - if (Strings.hasText(fieldSnippet.getText())) { - snippets.add(fieldSnippet); - } - } - - } catch(IOException e) { - throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); - } - - snippets = filterSnippets(snippets, field.fieldOptions().numberOfFragments()); - - if (field.fieldOptions().scoreOrdered()) { - //let's sort the snippets by score if needed - CollectionUtil.introSort(snippets, new Comparator() { - @Override - public int compare(Snippet o1, Snippet o2) { - return (int) Math.signum(o2.getScore() - o1.getScore()); - } - }); - } - - String[] fragments = new String[snippets.size()]; - for (int i = 0; i < fragments.length; i++) { - fragments[i] = snippets.get(i).getText(); - } - - if (fragments.length > 0) { - return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments)); - } - - return null; - } - - @Override - public boolean canHighlight(FieldMapper fieldMapper) { - return fieldMapper.fieldType().indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS; - } - - static String mergeFieldValues(List fieldValues, char valuesSeparator) { - //postings highlighter accepts all values in a single string, as offsets etc. need to match with content - //loaded from stored fields, we merge all values using a proper separator - String rawValue = Strings.collectionToDelimitedString(fieldValues, String.valueOf(valuesSeparator)); - return rawValue.substring(0, Math.min(rawValue.length(), Integer.MAX_VALUE - 1)); - } - - static List filterSnippets(List snippets, int numberOfFragments) { - - //We need to filter the snippets as due to no_match_size we could have - //either highlighted snippets or non highlighted ones and we don't want to mix those up - List filteredSnippets = new ArrayList<>(snippets.size()); - for (Snippet snippet : snippets) { - if (snippet.isHighlighted()) { - filteredSnippets.add(snippet); - } - } - - //if there's at least one highlighted snippet, we return all the highlighted ones - //otherwise we return the first non highlighted one if available - if (filteredSnippets.size() == 0) { - if (snippets.size() > 0) { - Snippet snippet = snippets.get(0); - //if we tried highlighting the whole content using whole break iterator (as number_of_fragments was 0) - //we need to return the first sentence of the content rather than the whole content - if (numberOfFragments == 0) { - BreakIterator bi = BreakIterator.getSentenceInstance(Locale.ROOT); - String text = snippet.getText(); - bi.setText(text); - int next = bi.next(); - if (next != BreakIterator.DONE) { - String newText = text.substring(0, next).trim(); - snippet = new Snippet(newText, snippet.getScore(), snippet.isHighlighted()); - } - } - filteredSnippets.add(snippet); - } - } - - return filteredSnippets; - } - - static class HighlighterEntry { - Map mappers = new HashMap<>(); - } - - static class MapperHighlighterEntry { - final CustomPassageFormatter passageFormatter; - - private MapperHighlighterEntry(CustomPassageFormatter passageFormatter) { - this.passageFormatter = passageFormatter; - } - } -} diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java index d3a94d0411b..684c7ddbddd 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java @@ -21,7 +21,7 @@ package org.elasticsearch.search.fetch.subphase.highlight; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.highlight.Encoder; -import org.apache.lucene.search.highlight.Snippet; +import org.apache.lucene.search.uhighlight.Snippet; import org.apache.lucene.search.uhighlight.BoundedBreakIteratorScanner; import org.apache.lucene.search.uhighlight.CustomPassageFormatter; import org.apache.lucene.search.uhighlight.CustomUnifiedHighlighter; @@ -44,8 +44,6 @@ import java.util.Map; import java.util.stream.Collectors; import static org.apache.lucene.search.uhighlight.CustomUnifiedHighlighter.MULTIVAL_SEP_CHAR; -import static org.elasticsearch.search.fetch.subphase.highlight.PostingsHighlighter.filterSnippets; -import static org.elasticsearch.search.fetch.subphase.highlight.PostingsHighlighter.mergeFieldValues; public class UnifiedHighlighter implements Highlighter { private static final String CACHE_KEY = "highlight-unified"; @@ -174,6 +172,49 @@ public class UnifiedHighlighter implements Highlighter { } } + private static List filterSnippets(List snippets, int numberOfFragments) { + + //We need to filter the snippets as due to no_match_size we could have + //either highlighted snippets or non highlighted ones and we don't want to mix those up + List filteredSnippets = new ArrayList<>(snippets.size()); + for (Snippet snippet : snippets) { + if (snippet.isHighlighted()) { + filteredSnippets.add(snippet); + } + } + + //if there's at least one highlighted snippet, we return all the highlighted ones + //otherwise we return the first non highlighted one if available + if (filteredSnippets.size() == 0) { + if (snippets.size() > 0) { + Snippet snippet = snippets.get(0); + //if we tried highlighting the whole content using whole break iterator (as number_of_fragments was 0) + //we need to return the first sentence of the content rather than the whole content + if (numberOfFragments == 0) { + BreakIterator bi = BreakIterator.getSentenceInstance(Locale.ROOT); + String text = snippet.getText(); + bi.setText(text); + int next = bi.next(); + if (next != BreakIterator.DONE) { + String newText = text.substring(0, next).trim(); + snippet = new Snippet(newText, snippet.getScore(), snippet.isHighlighted()); + } + } + filteredSnippets.add(snippet); + } + } + + return filteredSnippets; + } + + private static String mergeFieldValues(List fieldValues, char valuesSeparator) { + //postings highlighter accepts all values in a single string, as offsets etc. need to match with content + //loaded from stored fields, we merge all values using a proper separator + String rawValue = Strings.collectionToDelimitedString(fieldValues, String.valueOf(valuesSeparator)); + return rawValue.substring(0, Math.min(rawValue.length(), Integer.MAX_VALUE - 1)); + } + + private static class HighlighterEntry { Map mappers = new HashMap<>(); } diff --git a/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index d3b1951846c..9f2df13592b 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -33,6 +33,7 @@ import org.apache.lucene.search.Weight; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.search.dfs.AggregatedDfs; +import org.elasticsearch.search.profile.Timer; import org.elasticsearch.search.profile.query.ProfileWeight; import org.elasticsearch.search.profile.query.QueryProfileBreakdown; import org.elasticsearch.search.profile.query.QueryProfiler; @@ -116,12 +117,13 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { // each invocation so that it can build an internal representation of the query // tree QueryProfileBreakdown profile = profiler.getQueryBreakdown(query); - profile.startTime(QueryTimingType.CREATE_WEIGHT); + Timer timer = profile.getTimer(QueryTimingType.CREATE_WEIGHT); + timer.start(); final Weight weight; try { weight = super.createWeight(query, needsScores, boost); } finally { - profile.stopAndRecordTime(); + timer.stop(); profiler.pollLastElement(); } return new ProfileWeight(query, weight, profile); diff --git a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java index fadf979d911..5e5108c3225 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/FilteredSearchContext.java @@ -321,6 +321,16 @@ public abstract class FilteredSearchContext extends SearchContext { return in.trackScores(); } + @Override + public SearchContext trackTotalHits(boolean trackTotalHits) { + return in.trackTotalHits(trackTotalHits); + } + + @Override + public boolean trackTotalHits() { + return in.trackTotalHits(); + } + @Override public SearchContext searchAfter(FieldDoc searchAfter) { return in.searchAfter(searchAfter); diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java index ebb2157d981..d3c281ac23b 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -240,6 +240,13 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas public abstract boolean trackScores(); + public abstract SearchContext trackTotalHits(boolean trackTotalHits); + + /** + * Indicates if the total hit count for the query should be tracked. Defaults to true + */ + public abstract boolean trackTotalHits(); + public abstract SearchContext searchAfter(FieldDoc searchAfter); public abstract FieldDoc searchAfter(); diff --git a/core/src/main/java/org/elasticsearch/search/profile/AbstractProfileBreakdown.java b/core/src/main/java/org/elasticsearch/search/profile/AbstractProfileBreakdown.java index 3698ea07da1..1ca23b44734 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/AbstractProfileBreakdown.java +++ b/core/src/main/java/org/elasticsearch/search/profile/AbstractProfileBreakdown.java @@ -33,77 +33,29 @@ public abstract class AbstractProfileBreakdown> { /** * The accumulated timings for this query node */ - private final long[] timings; - - private final long[] counts; - - /** Scratch to store the current timing type. */ - private T currentTimingType; - - /** - * The temporary scratch space for holding start-times - */ - private long scratch; - - private T[] timingTypes; + private final Timer[] timings; + private final T[] timingTypes; /** Sole constructor. */ - public AbstractProfileBreakdown(T[] timingTypes) { - this.timingTypes = timingTypes; - timings = new long[timingTypes.length]; - counts = new long[timingTypes.length]; + public AbstractProfileBreakdown(Class clazz) { + this.timingTypes = clazz.getEnumConstants(); + timings = new Timer[timingTypes.length]; + for (int i = 0; i < timings.length; ++i) { + timings[i] = new Timer(); + } } - /** - * Begin timing a query for a specific Timing context - * @param timing The timing context being profiled - */ - public void startTime(T timing) { - assert currentTimingType == null; - assert scratch == 0; - counts[timing.ordinal()] += 1; - currentTimingType = timing; - scratch = System.nanoTime(); - } - - /** - * Halt the timing process and save the elapsed time. - * startTime() must be called for a particular context prior to calling - * stopAndRecordTime(), otherwise the elapsed time will be negative and - * nonsensical - * - * @return The elapsed time - */ - public long stopAndRecordTime() { - long time = Math.max(1, System.nanoTime() - scratch); - timings[currentTimingType.ordinal()] += time; - currentTimingType = null; - scratch = 0L; - return time; + public Timer getTimer(T timing) { + return timings[timing.ordinal()]; } /** Convert this record to a map from timingType to times. */ public Map toTimingMap() { Map map = new HashMap<>(); for (T timingType : timingTypes) { - map.put(timingType.toString(), timings[timingType.ordinal()]); - map.put(timingType.toString() + "_count", counts[timingType.ordinal()]); + map.put(timingType.toString(), timings[timingType.ordinal()].getTiming()); + map.put(timingType.toString() + "_count", timings[timingType.ordinal()].getCount()); } return Collections.unmodifiableMap(map); } - - /** - * Add other's timings into this breakdown - * @param other Another Breakdown to merge with this one - */ - public void merge(AbstractProfileBreakdown other) { - assert(timings.length == other.timings.length); - for (int i = 0; i < timings.length; ++i) { - timings[i] += other.timings[i]; - } - assert(counts.length == other.counts.length); - for (int i = 0; i < counts.length; ++i) { - counts[i] += other.counts[i]; - } - } } diff --git a/core/src/main/java/org/elasticsearch/search/profile/ProfileResult.java b/core/src/main/java/org/elasticsearch/search/profile/ProfileResult.java index 3b1bfe3c27a..16a2f8c8ebf 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/ProfileResult.java +++ b/core/src/main/java/org/elasticsearch/search/profile/ProfileResult.java @@ -37,7 +37,6 @@ import java.util.Objects; import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; /** * This class is the internal representation of a profiled Query, corresponding @@ -50,12 +49,12 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknown */ public final class ProfileResult implements Writeable, ToXContentObject { - private static final ParseField TYPE = new ParseField("type"); - private static final ParseField DESCRIPTION = new ParseField("description"); - private static final ParseField NODE_TIME = new ParseField("time"); - private static final ParseField NODE_TIME_RAW = new ParseField("time_in_nanos"); - private static final ParseField CHILDREN = new ParseField("children"); - private static final ParseField BREAKDOWN = new ParseField("breakdown"); + static final ParseField TYPE = new ParseField("type"); + static final ParseField DESCRIPTION = new ParseField("description"); + static final ParseField NODE_TIME = new ParseField("time"); + static final ParseField NODE_TIME_RAW = new ParseField("time_in_nanos"); + static final ParseField CHILDREN = new ParseField("children"); + static final ParseField BREAKDOWN = new ParseField("breakdown"); private final String type; private final String description; @@ -188,7 +187,7 @@ public final class ProfileResult implements Writeable, ToXContentObject { // skip, total time is calculate by adding up 'timings' values in ProfileResult ctor parser.longValue(); } else { - throwUnknownField(currentFieldName, parser.getTokenLocation()); + parser.skipChildren(); } } else if (token == XContentParser.Token.START_OBJECT) { if (BREAKDOWN.match(currentFieldName)) { @@ -200,7 +199,7 @@ public final class ProfileResult implements Writeable, ToXContentObject { timings.put(name, value); } } else { - throwUnknownField(currentFieldName, parser.getTokenLocation()); + parser.skipChildren(); } } else if (token == XContentParser.Token.START_ARRAY) { if (CHILDREN.match(currentFieldName)) { @@ -208,7 +207,7 @@ public final class ProfileResult implements Writeable, ToXContentObject { children.add(ProfileResult.fromXContent(parser)); } } else { - throwUnknownField(currentFieldName, parser.getTokenLocation()); + parser.skipChildren(); } } } diff --git a/core/src/main/java/org/elasticsearch/search/profile/SearchProfileShardResults.java b/core/src/main/java/org/elasticsearch/search/profile/SearchProfileShardResults.java index eb3017bd1e7..b7fa39c42f3 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/SearchProfileShardResults.java +++ b/core/src/main/java/org/elasticsearch/search/profile/SearchProfileShardResults.java @@ -39,9 +39,6 @@ import java.util.Map; import java.util.TreeSet; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken; /** * A container class to hold all the profile results across all shards. Internally @@ -111,12 +108,19 @@ public final class SearchProfileShardResults implements Writeable, ToXContent{ XContentParser.Token token = parser.currentToken(); ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation); Map searchProfileResults = new HashMap<>(); - ensureFieldName(parser, parser.nextToken(), SHARDS_FIELD); - ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser::getTokenLocation); - while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - parseSearchProfileResultsEntry(parser, searchProfileResults); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.START_ARRAY) { + if (SHARDS_FIELD.equals(parser.currentName())) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + parseSearchProfileResultsEntry(parser, searchProfileResults); + } + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_OBJECT) { + parser.skipChildren(); + } } - ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation); return new SearchProfileShardResults(searchProfileResults); } @@ -135,7 +139,7 @@ public final class SearchProfileShardResults implements Writeable, ToXContent{ if (ID_FIELD.equals(currentFieldName)) { id = parser.text(); } else { - throwUnknownField(currentFieldName, parser.getTokenLocation()); + parser.skipChildren(); } } else if (token == XContentParser.Token.START_ARRAY) { if (SEARCHES_FIELD.equals(currentFieldName)) { @@ -145,10 +149,10 @@ public final class SearchProfileShardResults implements Writeable, ToXContent{ } else if (AggregationProfileShardResult.AGGREGATIONS.equals(currentFieldName)) { aggProfileShardResult = AggregationProfileShardResult.fromXContent(parser); } else { - throwUnknownField(currentFieldName, parser.getTokenLocation()); + parser.skipChildren(); } } else { - throwUnknownToken(token, parser.getTokenLocation()); + parser.skipChildren(); } } searchProfileResults.put(id, new ProfileShardResult(queryProfileResults, aggProfileShardResult)); diff --git a/core/src/main/java/org/elasticsearch/search/profile/Timer.java b/core/src/main/java/org/elasticsearch/search/profile/Timer.java new file mode 100644 index 00000000000..41763552a6d --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/profile/Timer.java @@ -0,0 +1,70 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.profile; + +/** Helps measure how much time is spent running some methods. + * The {@link #start()} and {@link #stop()} methods should typically be called + * in a try/finally clause with {@link #start()} being called right before the + * try block and {@link #stop()} being called at the beginning of the finally + * block: + *
+ *  timer.start();
+ *  try {
+ *    // code to time
+ *  } finally {
+ *    timer.stop();
+ *  }
+ *  
+ */ +// TODO: do not time every single call as discussed in https://github.com/elastic/elasticsearch/issues/24799 +public final class Timer { + + private long timing, count, start; + + /** Start the timer. */ + public void start() { + assert start == 0 : "#start call misses a matching #stop call"; + count++; + start = System.nanoTime(); + } + + /** Stop the timer. */ + public void stop() { + timing += Math.max(System.nanoTime() - start, 1L); + start = 0; + } + + /** Return the number of times that {@link #start()} has been called. */ + public long getCount() { + if (start != 0) { + throw new IllegalStateException("#start call misses a matching #stop call"); + } + return count; + } + + /** Return an approximation of the total time spend between consecutive calls of #start and #stop. */ + public long getTiming() { + if (start != 0) { + throw new IllegalStateException("#start call misses a matching #stop call"); + } + return timing; + } + +} diff --git a/core/src/main/java/org/elasticsearch/search/profile/aggregation/AggregationProfileBreakdown.java b/core/src/main/java/org/elasticsearch/search/profile/aggregation/AggregationProfileBreakdown.java index b4cb1efe5d3..84a525bf907 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/aggregation/AggregationProfileBreakdown.java +++ b/core/src/main/java/org/elasticsearch/search/profile/aggregation/AggregationProfileBreakdown.java @@ -24,7 +24,7 @@ import org.elasticsearch.search.profile.AbstractProfileBreakdown; public class AggregationProfileBreakdown extends AbstractProfileBreakdown { public AggregationProfileBreakdown() { - super(AggregationTimingType.values()); + super(AggregationTimingType.class); } } diff --git a/core/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java b/core/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java index 63928334b14..d96fbe0d866 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java @@ -24,6 +24,7 @@ import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.profile.Timer; import java.io.IOException; @@ -70,9 +71,14 @@ public class ProfilingAggregator extends Aggregator { @Override public InternalAggregation buildAggregation(long bucket) throws IOException { - profileBreakdown.startTime(AggregationTimingType.BUILD_AGGREGATION); - InternalAggregation result = delegate.buildAggregation(bucket); - profileBreakdown.stopAndRecordTime(); + Timer timer = profileBreakdown.getTimer(AggregationTimingType.BUILD_AGGREGATION); + timer.start(); + InternalAggregation result; + try { + result = delegate.buildAggregation(bucket); + } finally { + timer.stop(); + } return result; } @@ -89,9 +95,13 @@ public class ProfilingAggregator extends Aggregator { @Override public void preCollection() throws IOException { this.profileBreakdown = profiler.getQueryBreakdown(delegate); - profileBreakdown.startTime(AggregationTimingType.INITIALIZE); - delegate.preCollection(); - profileBreakdown.stopAndRecordTime(); + Timer timer = profileBreakdown.getTimer(AggregationTimingType.INITIALIZE); + timer.start(); + try { + delegate.preCollection(); + } finally { + timer.stop(); + } profiler.pollLastElement(); } diff --git a/core/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingLeafBucketCollector.java b/core/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingLeafBucketCollector.java index addf910bc56..4db67967dcb 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingLeafBucketCollector.java +++ b/core/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingLeafBucketCollector.java @@ -21,24 +21,28 @@ package org.elasticsearch.search.profile.aggregation; import org.apache.lucene.search.Scorer; import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.profile.Timer; import java.io.IOException; public class ProfilingLeafBucketCollector extends LeafBucketCollector { private LeafBucketCollector delegate; - private AggregationProfileBreakdown profileBreakdown; + private Timer collectTimer; public ProfilingLeafBucketCollector(LeafBucketCollector delegate, AggregationProfileBreakdown profileBreakdown) { this.delegate = delegate; - this.profileBreakdown = profileBreakdown; + this.collectTimer = profileBreakdown.getTimer(AggregationTimingType.COLLECT); } @Override public void collect(int doc, long bucket) throws IOException { - profileBreakdown.startTime(AggregationTimingType.COLLECT); - delegate.collect(doc, bucket); - profileBreakdown.stopAndRecordTime(); + collectTimer.start(); + try { + delegate.collect(doc, bucket); + } finally { + collectTimer.stop(); + } } @Override diff --git a/core/src/main/java/org/elasticsearch/search/profile/query/CollectorResult.java b/core/src/main/java/org/elasticsearch/search/profile/query/CollectorResult.java index 1fa56bde7fe..0d4ae0384ba 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/query/CollectorResult.java +++ b/core/src/main/java/org/elasticsearch/search/profile/query/CollectorResult.java @@ -34,8 +34,6 @@ import java.util.List; import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField; -import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownToken; /** * Public interface and serialization container for profiled timings of the @@ -181,7 +179,7 @@ public class CollectorResult implements ToXContentObject, Writeable { } else if (TIME_NANOS.match(currentFieldName)) { time = parser.longValue(); } else { - throwUnknownField(currentFieldName, parser.getTokenLocation()); + parser.skipChildren(); } } else if (token == XContentParser.Token.START_ARRAY) { if (CHILDREN.match(currentFieldName)) { @@ -189,10 +187,10 @@ public class CollectorResult implements ToXContentObject, Writeable { children.add(CollectorResult.fromXContent(parser)); } } else { - throwUnknownField(currentFieldName, parser.getTokenLocation()); + parser.skipChildren(); } } else { - throwUnknownToken(token, parser.getTokenLocation()); + parser.skipChildren(); } } return new CollectorResult(name, reason, time, children); diff --git a/core/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java b/core/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java index fa76608eb89..e475bb6b7d9 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java +++ b/core/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; +import org.elasticsearch.search.profile.Timer; import java.io.IOException; import java.util.Collection; @@ -35,13 +36,16 @@ final class ProfileScorer extends Scorer { private final Scorer scorer; private ProfileWeight profileWeight; - private final QueryProfileBreakdown profile; + private final Timer scoreTimer, nextDocTimer, advanceTimer, matchTimer; ProfileScorer(ProfileWeight w, Scorer scorer, QueryProfileBreakdown profile) throws IOException { super(w); this.scorer = scorer; this.profileWeight = w; - this.profile = profile; + scoreTimer = profile.getTimer(QueryTimingType.SCORE); + nextDocTimer = profile.getTimer(QueryTimingType.NEXT_DOC); + advanceTimer = profile.getTimer(QueryTimingType.ADVANCE); + matchTimer = profile.getTimer(QueryTimingType.MATCH); } @Override @@ -51,11 +55,11 @@ final class ProfileScorer extends Scorer { @Override public float score() throws IOException { - profile.startTime(QueryTimingType.SCORE); + scoreTimer.start(); try { return scorer.score(); } finally { - profile.stopAndRecordTime(); + scoreTimer.stop(); } } @@ -81,21 +85,21 @@ final class ProfileScorer extends Scorer { @Override public int advance(int target) throws IOException { - profile.startTime(QueryTimingType.ADVANCE); + advanceTimer.start(); try { return in.advance(target); } finally { - profile.stopAndRecordTime(); + advanceTimer.stop(); } } @Override public int nextDoc() throws IOException { - profile.startTime(QueryTimingType.NEXT_DOC); + nextDocTimer.start(); try { return in.nextDoc(); } finally { - profile.stopAndRecordTime(); + nextDocTimer.stop(); } } @@ -122,21 +126,21 @@ final class ProfileScorer extends Scorer { @Override public int advance(int target) throws IOException { - profile.startTime(QueryTimingType.ADVANCE); + advanceTimer.start(); try { return inApproximation.advance(target); } finally { - profile.stopAndRecordTime(); + advanceTimer.stop(); } } @Override public int nextDoc() throws IOException { - profile.startTime(QueryTimingType.NEXT_DOC); + nextDocTimer.start(); try { return inApproximation.nextDoc(); } finally { - profile.stopAndRecordTime(); + nextDocTimer.stop(); } } @@ -153,11 +157,11 @@ final class ProfileScorer extends Scorer { return new TwoPhaseIterator(approximation) { @Override public boolean matches() throws IOException { - profile.startTime(QueryTimingType.MATCH); + matchTimer.start(); try { return in.matches(); } finally { - profile.stopAndRecordTime(); + matchTimer.stop(); } } diff --git a/core/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java b/core/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java index 716f68da86f..4361267bfe6 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java +++ b/core/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; +import org.elasticsearch.search.profile.Timer; import java.io.IOException; import java.util.Set; @@ -48,12 +49,13 @@ public final class ProfileWeight extends Weight { @Override public Scorer scorer(LeafReaderContext context) throws IOException { - profile.startTime(QueryTimingType.BUILD_SCORER); + Timer timer = profile.getTimer(QueryTimingType.BUILD_SCORER); + timer.start(); final Scorer subQueryScorer; try { subQueryScorer = subQueryWeight.scorer(context); } finally { - profile.stopAndRecordTime(); + timer.stop(); } if (subQueryScorer == null) { return null; diff --git a/core/src/main/java/org/elasticsearch/search/profile/query/QueryProfileBreakdown.java b/core/src/main/java/org/elasticsearch/search/profile/query/QueryProfileBreakdown.java index d0608eb01af..21ec507dbaf 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/query/QueryProfileBreakdown.java +++ b/core/src/main/java/org/elasticsearch/search/profile/query/QueryProfileBreakdown.java @@ -30,6 +30,6 @@ public final class QueryProfileBreakdown extends AbstractProfileBreakdownmaxCountHits docs have been collected. + */ +public class EarlyTerminatingCollector extends FilterCollector { + private final int maxCountHits; + private int numCollected; + private boolean terminatedEarly = false; + + EarlyTerminatingCollector(final Collector delegate, int maxCountHits) { + super(delegate); + this.maxCountHits = maxCountHits; + } + + @Override + public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { + if (numCollected >= maxCountHits) { + throw new CollectionTerminatedException(); + } + return new FilterLeafCollector(super.getLeafCollector(context)) { + @Override + public void collect(int doc) throws IOException { + super.collect(doc); + if (++numCollected >= maxCountHits) { + terminatedEarly = true; + throw new CollectionTerminatedException(); + } + }; + }; + } + + public boolean terminatedEarly() { + return terminatedEarly; + } +} diff --git a/core/src/main/java/org/elasticsearch/search/query/QueryCollectorContext.java b/core/src/main/java/org/elasticsearch/search/query/QueryCollectorContext.java new file mode 100644 index 00000000000..eaaf07ce305 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/query/QueryCollectorContext.java @@ -0,0 +1,275 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.query; + +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.EarlyTerminatingSortingCollector; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MultiCollector; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.TimeLimitingCollector; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TotalHitCountCollector; +import org.apache.lucene.search.Weight; +import org.apache.lucene.util.Counter; +import org.elasticsearch.common.inject.Provider; +import org.elasticsearch.common.lucene.MinimumScoreCollector; +import org.elasticsearch.common.lucene.search.FilteredCollector; +import org.elasticsearch.search.profile.query.InternalProfileCollector; +import org.elasticsearch.tasks.TaskCancelledException; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.function.BooleanSupplier; +import java.util.function.IntSupplier; + +import static org.elasticsearch.search.profile.query.CollectorResult.REASON_SEARCH_CANCELLED; +import static org.elasticsearch.search.profile.query.CollectorResult.REASON_SEARCH_MIN_SCORE; +import static org.elasticsearch.search.profile.query.CollectorResult.REASON_SEARCH_MULTI; +import static org.elasticsearch.search.profile.query.CollectorResult.REASON_SEARCH_POST_FILTER; +import static org.elasticsearch.search.profile.query.CollectorResult.REASON_SEARCH_TERMINATE_AFTER_COUNT; +import static org.elasticsearch.search.profile.query.CollectorResult.REASON_SEARCH_TIMEOUT; +import static org.elasticsearch.search.query.TopDocsCollectorContext.shortcutTotalHitCount; + +abstract class QueryCollectorContext { + private String profilerName; + + QueryCollectorContext(String profilerName) { + this.profilerName = profilerName; + } + + /** + * Creates a collector that delegates documents to the provided in collector. + * @param in The delegate collector + */ + abstract Collector create(Collector in) throws IOException; + + /** + * Wraps this collector with a profiler + */ + protected InternalProfileCollector createWithProfiler(InternalProfileCollector in) throws IOException { + final Collector collector = create(in); + return new InternalProfileCollector(collector, profilerName, in != null ? Collections.singletonList(in) : Collections.emptyList()); + } + + /** + * A value of false indicates that the underlying collector can infer + * its results directly from the context (search is not needed). + * Default to true (search is needed). + */ + boolean shouldCollect() { + return true; + } + + /** + * Post-process result after search execution. + * + * @param result The query search result to populate + * @param hasCollected True if search was executed + */ + void postProcess(QuerySearchResult result, boolean hasCollected) throws IOException {} + + /** + * Creates the collector tree from the provided collectors + * @param collectors Ordered list of collector context + */ + static Collector createQueryCollector(List collectors) throws IOException { + Collector collector = null; + for (QueryCollectorContext ctx : collectors) { + collector = ctx.create(collector); + } + return collector; + } + + /** + * Creates the collector tree from the provided collectors and wraps each collector with a profiler + * @param collectors Ordered list of collector context + */ + static InternalProfileCollector createQueryCollectorWithProfiler(List collectors) throws IOException { + InternalProfileCollector collector = null; + for (QueryCollectorContext ctx : collectors) { + collector = ctx.createWithProfiler(collector); + } + return collector; + } + + /** + * Filters documents with a query score greater than minScore + * @param minScore The minimum score filter + */ + static QueryCollectorContext createMinScoreCollectorContext(float minScore) { + return new QueryCollectorContext(REASON_SEARCH_MIN_SCORE) { + @Override + Collector create(Collector in) { + return new MinimumScoreCollector(in, minScore); + } + }; + } + + /** + * Filters documents based on the provided query + */ + static QueryCollectorContext createFilteredCollectorContext(IndexSearcher searcher, Query query) { + return new QueryCollectorContext(REASON_SEARCH_POST_FILTER) { + @Override + Collector create(Collector in ) throws IOException { + final Weight filterWeight = searcher.createNormalizedWeight(query, false); + return new FilteredCollector(in, filterWeight); + } + }; + } + + /** + * Creates a multi collector from the provided subs + */ + static QueryCollectorContext createMultiCollectorContext(Collection subs) { + return new QueryCollectorContext(REASON_SEARCH_MULTI) { + @Override + Collector create(Collector in) throws IOException { + List subCollectors = new ArrayList<> (); + subCollectors.add(in); + subCollectors.addAll(subs); + return MultiCollector.wrap(subCollectors); + } + + @Override + protected InternalProfileCollector createWithProfiler(InternalProfileCollector in) throws IOException { + final List subCollectors = new ArrayList<> (); + subCollectors.add(in); + if (subs.stream().anyMatch((col) -> col instanceof InternalProfileCollector == false)) { + throw new IllegalArgumentException("non-profiling collector"); + } + for (Collector collector : subs) { + subCollectors.add((InternalProfileCollector) collector); + } + final Collector collector = MultiCollector.wrap(subCollectors); + return new InternalProfileCollector(collector, REASON_SEARCH_MULTI, subCollectors); + } + }; + } + + /** + * Creates a time limiting collector limiting the collection to timeOutMillisms. + */ + static QueryCollectorContext createTimeoutCollectorContext(Counter timeEstimate, long timeoutMillis) { + return new QueryCollectorContext(REASON_SEARCH_TIMEOUT) { + @Override + Collector create(Collector in) throws IOException { + return new TimeLimitingCollector(in, timeEstimate, timeoutMillis); + } + + @Override + boolean shouldCollect() { + return false; + } + }; + } + + /** + * Creates a collector that throws {@link TaskCancelledException} if the search is cancelled + */ + static QueryCollectorContext createCancellableCollectorContext(Provider cancelled, boolean lowLevelCancellation) { + return new QueryCollectorContext(REASON_SEARCH_CANCELLED) { + @Override + Collector create(Collector in) throws IOException { + return new CancellableCollector(cancelled, lowLevelCancellation, in); + } + + @Override + boolean shouldCollect() { + return false; + } + }; + } + + /** + * Creates collector limiting the collection to the first numHits documents + */ + static QueryCollectorContext createEarlyTerminationCollectorContext(int numHits) { + return new QueryCollectorContext(REASON_SEARCH_TERMINATE_AFTER_COUNT) { + private EarlyTerminatingCollector collector; + + @Override + Collector create(Collector in) throws IOException { + assert collector == null; + this.collector = new EarlyTerminatingCollector(in, numHits); + return collector; + } + + @Override + void postProcess(QuerySearchResult result, boolean hasCollected) throws IOException { + if (hasCollected && collector.terminatedEarly()) { + result.terminatedEarly(true); + } + } + }; + } + + /** + * Creates a sorting termination collector limiting the collection to the first numHits per segment. + * The total hit count matching the query is also computed if trackTotalHits is true. + */ + static QueryCollectorContext createEarlySortingTerminationCollectorContext(IndexReader reader, + Query query, + Sort indexSort, + int numHits, + boolean trackTotalHits, + boolean shouldCollect) { + return new QueryCollectorContext(REASON_SEARCH_TERMINATE_AFTER_COUNT) { + private BooleanSupplier terminatedEarlySupplier; + private IntSupplier countSupplier = null; + + @Override + Collector create(Collector in) throws IOException { + EarlyTerminatingSortingCollector sortingCollector = new EarlyTerminatingSortingCollector(in, indexSort, numHits); + terminatedEarlySupplier = sortingCollector::terminatedEarly; + Collector collector = sortingCollector; + if (trackTotalHits) { + int count = shouldCollect ? -1 : shortcutTotalHitCount(reader, query); + if (count == -1) { + TotalHitCountCollector countCollector = new TotalHitCountCollector(); + collector = MultiCollector.wrap(sortingCollector, countCollector); + this.countSupplier = countCollector::getTotalHits; + } else { + this.countSupplier = () -> count; + } + } + return collector; + } + + @Override + void postProcess(QuerySearchResult result, boolean hasCollected) throws IOException { + if (terminatedEarlySupplier.getAsBoolean()) { + result.terminatedEarly(true); + } + if (countSupplier != null) { + final TopDocs topDocs = result.topDocs(); + topDocs.totalHits = countSupplier.getAsInt(); + result.topDocs(topDocs, result.sortValueFormats()); + } + } + }; + } +} diff --git a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 272c57fe980..82e572a180e 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -19,35 +19,25 @@ package org.elasticsearch.search.query; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.queries.MinDocQuery; +import org.apache.lucene.queries.SearchAfterSortedDocQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Collector; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.EarlyTerminatingSortingCollector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; -import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TimeLimitingCollector; import org.apache.lucene.search.TopDocs; -import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopFieldCollector; -import org.apache.lucene.search.TopScoreDocCollector; -import org.apache.lucene.search.TotalHitCountCollector; -import org.apache.lucene.search.Weight; -import org.apache.lucene.search.grouping.CollapsingTopDocsCollector; -import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.lucene.MinimumScoreCollector; -import org.elasticsearch.common.lucene.search.FilteredCollector; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.search.collapse.CollapseContext; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchPhase; import org.elasticsearch.search.SearchService; @@ -56,18 +46,21 @@ import org.elasticsearch.search.internal.ScrollContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.profile.ProfileShardResult; import org.elasticsearch.search.profile.SearchProfileShardResults; -import org.elasticsearch.search.profile.query.CollectorResult; import org.elasticsearch.search.profile.query.InternalProfileCollector; import org.elasticsearch.search.rescore.RescorePhase; -import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestPhase; -import java.util.AbstractList; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.concurrent.Callable; +import java.util.LinkedList; + +import static org.elasticsearch.search.query.QueryCollectorContext.createCancellableCollectorContext; +import static org.elasticsearch.search.query.QueryCollectorContext.createEarlySortingTerminationCollectorContext; +import static org.elasticsearch.search.query.QueryCollectorContext.createEarlyTerminationCollectorContext; +import static org.elasticsearch.search.query.QueryCollectorContext.createFilteredCollectorContext; +import static org.elasticsearch.search.query.QueryCollectorContext.createMinScoreCollectorContext; +import static org.elasticsearch.search.query.QueryCollectorContext.createMultiCollectorContext; +import static org.elasticsearch.search.query.QueryCollectorContext.createTimeoutCollectorContext; +import static org.elasticsearch.search.query.TopDocsCollectorContext.createTopDocsCollectorContext; /** * Query phase of a search request, used to run the query and get back from each shard information about the matching documents @@ -104,8 +97,9 @@ public class QueryPhase implements SearchPhase { // request, preProcess is called on the DFS phase phase, this is why we pre-process them // here to make sure it happens during the QUERY phase aggregationPhase.preProcess(searchContext); - - boolean rescore = execute(searchContext, searchContext.searcher()); + Sort indexSort = searchContext.mapperService().getIndexSettings().getIndexSortConfig() + .buildIndexSort(searchContext.mapperService()::fullName, searchContext.fieldData()::getForField); + boolean rescore = execute(searchContext, searchContext.searcher(), indexSort); if (rescore) { // only if we do a regular search rescorePhase.execute(searchContext); @@ -120,298 +114,164 @@ public class QueryPhase implements SearchPhase { } } - private static boolean returnsDocsInOrder(Query query, SortAndFormats sf) { + /** + * In a package-private method so that it can be tested without having to + * wire everything (mapperService, etc.) + * @return whether the rescoring phase should be executed + */ + static boolean execute(SearchContext searchContext, final IndexSearcher searcher, @Nullable Sort indexSort) throws QueryPhaseExecutionException { + QuerySearchResult queryResult = searchContext.queryResult(); + queryResult.searchTimedOut(false); + + try { + queryResult.from(searchContext.from()); + queryResult.size(searchContext.size()); + Query query = searchContext.query(); + assert query == searcher.rewrite(query); // already rewritten + + final ScrollContext scrollContext = searchContext.scrollContext(); + if (scrollContext != null) { + if (scrollContext.totalHits == -1) { + // first round + assert scrollContext.lastEmittedDoc == null; + // there is not much that we can optimize here since we want to collect all + // documents in order to get the total number of hits + + } else { + final ScoreDoc after = scrollContext.lastEmittedDoc; + if (returnsDocsInOrder(query, searchContext.sort())) { + // now this gets interesting: since we sort in index-order, we can directly + // skip to the desired doc + if (after != null) { + BooleanQuery bq = new BooleanQuery.Builder() + .add(query, BooleanClause.Occur.MUST) + .add(new MinDocQuery(after.doc + 1), BooleanClause.Occur.FILTER) + .build(); + query = bq; + } + // ... and stop collecting after ${size} matches + searchContext.terminateAfter(searchContext.size()); + searchContext.trackTotalHits(false); + } else if (canEarlyTerminate(indexSort, searchContext)) { + // now this gets interesting: since the index sort matches the search sort, we can directly + // skip to the desired doc + if (after != null) { + BooleanQuery bq = new BooleanQuery.Builder() + .add(query, BooleanClause.Occur.MUST) + .add(new SearchAfterSortedDocQuery(indexSort, (FieldDoc) after), BooleanClause.Occur.FILTER) + .build(); + query = bq; + } + searchContext.trackTotalHits(false); + } + } + } + + final LinkedList collectors = new LinkedList<>(); + if (searchContext.parsedPostFilter() != null) { + // add post filters before aggregations + // it will only be applied to top hits + collectors.add(createFilteredCollectorContext(searcher, searchContext.parsedPostFilter().query())); + } + if (searchContext.queryCollectors().isEmpty() == false) { + // plug in additional collectors, like aggregations + collectors.add(createMultiCollectorContext(searchContext.queryCollectors().values())); + } + if (searchContext.minimumScore() != null) { + // apply the minimum score after multi collector so we filter aggs as well + collectors.add(createMinScoreCollectorContext(searchContext.minimumScore())); + } + if (searchContext.terminateAfter() != SearchContext.DEFAULT_TERMINATE_AFTER) { + // apply terminate after after all filters collectors + collectors.add(createEarlyTerminationCollectorContext(searchContext.terminateAfter())); + } + + boolean timeoutSet = scrollContext == null && searchContext.timeout() != null && + searchContext.timeout().equals(SearchService.NO_TIMEOUT) == false; + if (timeoutSet) { + // TODO: change to use our own counter that uses the scheduler in ThreadPool + // throws TimeLimitingCollector.TimeExceededException when timeout has reached + collectors.add(createTimeoutCollectorContext(searchContext.timeEstimateCounter(), searchContext.timeout().millis())); + } + // add cancellable + collectors.add(createCancellableCollectorContext(searchContext.getTask()::isCancelled, searchContext.lowLevelCancellation())); + + final IndexReader reader = searcher.getIndexReader(); + final boolean doProfile = searchContext.getProfilers() != null; + // create the top docs collector last when the other collectors are known + final TopDocsCollectorContext topDocsFactory = createTopDocsCollectorContext(searchContext, reader, + collectors.stream().anyMatch(QueryCollectorContext::shouldCollect)); + final boolean shouldCollect = topDocsFactory.shouldCollect(); + + if (topDocsFactory.numHits() > 0 && + (scrollContext == null || scrollContext.totalHits != -1) && + canEarlyTerminate(indexSort, searchContext)) { + // top docs collection can be early terminated based on index sort + // add the collector context first so we don't early terminate aggs but only top docs + collectors.addFirst(createEarlySortingTerminationCollectorContext(reader, searchContext.query(), indexSort, + topDocsFactory.numHits(), searchContext.trackTotalHits(), shouldCollect)); + } + // add the top docs collector, the first collector context in the chain + collectors.addFirst(topDocsFactory); + + final Collector queryCollector; + if (doProfile) { + InternalProfileCollector profileCollector = QueryCollectorContext.createQueryCollectorWithProfiler(collectors); + searchContext.getProfilers().getCurrentQueryProfiler().setCollector(profileCollector); + queryCollector = profileCollector; + } else { + queryCollector = QueryCollectorContext.createQueryCollector(collectors); + } + + try { + if (shouldCollect) { + searcher.search(query, queryCollector); + } + } catch (TimeLimitingCollector.TimeExceededException e) { + assert timeoutSet : "TimeExceededException thrown even though timeout wasn't set"; + queryResult.searchTimedOut(true); + } finally { + searchContext.clearReleasables(SearchContext.Lifetime.COLLECTION); + } + + final QuerySearchResult result = searchContext.queryResult(); + for (QueryCollectorContext ctx : collectors) { + ctx.postProcess(result, shouldCollect); + } + if (searchContext.getProfilers() != null) { + ProfileShardResult shardResults = SearchProfileShardResults.buildShardResults(searchContext.getProfilers()); + result.profileResults(shardResults); + } + return topDocsFactory.shouldRescore(); + } catch (Exception e) { + throw new QueryPhaseExecutionException(searchContext, "Failed to execute main query", e); + } + } + + /** + * Returns true if the provided query returns docs in index order (internal doc ids). + * @param query The query to execute + * @param sf The query sort + */ + static boolean returnsDocsInOrder(Query query, SortAndFormats sf) { if (sf == null || Sort.RELEVANCE.equals(sf.sort)) { // sort by score // queries that return constant scores will return docs in index // order since Lucene tie-breaks on the doc id return query.getClass() == ConstantScoreQuery.class - || query.getClass() == MatchAllDocsQuery.class; + || query.getClass() == MatchAllDocsQuery.class; } else { return Sort.INDEXORDER.equals(sf.sort); } } /** - * In a package-private method so that it can be tested without having to - * wire everything (mapperService, etc.) - * @return whether the rescoring phase should be executed + * Returns true if the provided searchContext can early terminate based on indexSort + * @param indexSort The index sort specification + * @param context The search context for the request */ - static boolean execute(SearchContext searchContext, final IndexSearcher searcher) throws QueryPhaseExecutionException { - QuerySearchResult queryResult = searchContext.queryResult(); - queryResult.searchTimedOut(false); - - final boolean doProfile = searchContext.getProfilers() != null; - boolean rescore = false; - try { - queryResult.from(searchContext.from()); - queryResult.size(searchContext.size()); - - Query query = searchContext.query(); - - final int totalNumDocs = searcher.getIndexReader().numDocs(); - int numDocs = Math.min(searchContext.from() + searchContext.size(), totalNumDocs); - - Collector collector; - Callable topDocsCallable; - DocValueFormat[] sortValueFormats = new DocValueFormat[0]; - - assert query == searcher.rewrite(query); // already rewritten - - if (searchContext.size() == 0) { // no matter what the value of from is - final TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector(); - collector = totalHitCountCollector; - if (searchContext.getProfilers() != null) { - collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_COUNT, Collections.emptyList()); - } - topDocsCallable = () -> new TopDocs(totalHitCountCollector.getTotalHits(), Lucene.EMPTY_SCORE_DOCS, 0); - } else { - // Perhaps have a dedicated scroll phase? - final ScrollContext scrollContext = searchContext.scrollContext(); - assert (scrollContext != null) == (searchContext.request().scroll() != null); - final Collector topDocsCollector; - ScoreDoc after = null; - if (searchContext.request().scroll() != null) { - numDocs = Math.min(searchContext.size(), totalNumDocs); - after = scrollContext.lastEmittedDoc; - - if (returnsDocsInOrder(query, searchContext.sort())) { - if (scrollContext.totalHits == -1) { - // first round - assert scrollContext.lastEmittedDoc == null; - // there is not much that we can optimize here since we want to collect all - // documents in order to get the total number of hits - } else { - // now this gets interesting: since we sort in index-order, we can directly - // skip to the desired doc and stop collecting after ${size} matches - if (scrollContext.lastEmittedDoc != null) { - BooleanQuery bq = new BooleanQuery.Builder() - .add(query, BooleanClause.Occur.MUST) - .add(new MinDocQuery(after.doc + 1), BooleanClause.Occur.FILTER) - .build(); - query = bq; - } - searchContext.terminateAfter(numDocs); - } - } - } else { - after = searchContext.searchAfter(); - } - if (totalNumDocs == 0) { - // top collectors don't like a size of 0 - numDocs = 1; - } - assert numDocs > 0; - if (searchContext.collapse() == null) { - if (searchContext.sort() != null) { - SortAndFormats sf = searchContext.sort(); - topDocsCollector = TopFieldCollector.create(sf.sort, numDocs, - (FieldDoc) after, true, searchContext.trackScores(), searchContext.trackScores()); - sortValueFormats = sf.formats; - } else { - rescore = !searchContext.rescore().isEmpty(); - for (RescoreSearchContext rescoreContext : searchContext.rescore()) { - numDocs = Math.max(rescoreContext.window(), numDocs); - } - topDocsCollector = TopScoreDocCollector.create(numDocs, after); - } - } else { - Sort sort = Sort.RELEVANCE; - if (searchContext.sort() != null) { - sort = searchContext.sort().sort; - } - CollapseContext collapse = searchContext.collapse(); - topDocsCollector = collapse.createTopDocs(sort, numDocs, searchContext.trackScores()); - if (searchContext.sort() == null) { - sortValueFormats = new DocValueFormat[] {DocValueFormat.RAW}; - } else { - sortValueFormats = searchContext.sort().formats; - } - } - collector = topDocsCollector; - if (doProfile) { - collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_TOP_HITS, Collections.emptyList()); - } - topDocsCallable = () -> { - final TopDocs topDocs; - if (topDocsCollector instanceof TopDocsCollector) { - topDocs = ((TopDocsCollector) topDocsCollector).topDocs(); - } else if (topDocsCollector instanceof CollapsingTopDocsCollector) { - topDocs = ((CollapsingTopDocsCollector) topDocsCollector).getTopDocs(); - } else { - throw new IllegalStateException("Unknown top docs collector " + topDocsCollector.getClass().getName()); - } - if (scrollContext != null) { - if (scrollContext.totalHits == -1) { - // first round - scrollContext.totalHits = topDocs.totalHits; - scrollContext.maxScore = topDocs.getMaxScore(); - } else { - // subsequent round: the total number of hits and - // the maximum score were computed on the first round - topDocs.totalHits = scrollContext.totalHits; - topDocs.setMaxScore(scrollContext.maxScore); - } - if (searchContext.request().numberOfShards() == 1) { - // if we fetch the document in the same roundtrip, we already know the last emitted doc - if (topDocs.scoreDocs.length > 0) { - // set the last emitted doc - scrollContext.lastEmittedDoc = topDocs.scoreDocs[topDocs.scoreDocs.length - 1]; - } - } - } - return topDocs; - }; - } - - final boolean terminateAfterSet = searchContext.terminateAfter() != SearchContext.DEFAULT_TERMINATE_AFTER; - if (terminateAfterSet) { - final Collector child = collector; - // throws Lucene.EarlyTerminationException when given count is reached - collector = Lucene.wrapCountBasedEarlyTerminatingCollector(collector, searchContext.terminateAfter()); - if (doProfile) { - collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_TERMINATE_AFTER_COUNT, - Collections.singletonList((InternalProfileCollector) child)); - } - } - - if (searchContext.parsedPostFilter() != null) { - final Collector child = collector; - // this will only get applied to the actual search collector and not - // to any scoped collectors, also, it will only be applied to the main collector - // since that is where the filter should only work - final Weight filterWeight = searcher.createNormalizedWeight(searchContext.parsedPostFilter().query(), false); - collector = new FilteredCollector(collector, filterWeight); - if (doProfile) { - collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_POST_FILTER, - Collections.singletonList((InternalProfileCollector) child)); - } - } - - // plug in additional collectors, like aggregations - final List subCollectors = new ArrayList<>(); - subCollectors.add(collector); - subCollectors.addAll(searchContext.queryCollectors().values()); - collector = MultiCollector.wrap(subCollectors); - if (doProfile && collector instanceof InternalProfileCollector == false) { - // When there is a single collector to wrap, MultiCollector returns it - // directly, so only wrap in the case that there are several sub collectors - final List children = new AbstractList() { - @Override - public InternalProfileCollector get(int index) { - return (InternalProfileCollector) subCollectors.get(index); - } - @Override - public int size() { - return subCollectors.size(); - } - }; - collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_MULTI, children); - } - - // apply the minimum score after multi collector so we filter aggs as well - if (searchContext.minimumScore() != null) { - final Collector child = collector; - collector = new MinimumScoreCollector(collector, searchContext.minimumScore()); - if (doProfile) { - collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_MIN_SCORE, - Collections.singletonList((InternalProfileCollector) child)); - } - } - - if (collector.getClass() == TotalHitCountCollector.class) { - // Optimize counts in simple cases to return in constant time - // instead of using a collector - while (true) { - // remove wrappers that don't matter for counts - // this is necessary so that we don't only optimize match_all - // queries but also match_all queries that are nested in - // a constant_score query - if (query instanceof ConstantScoreQuery) { - query = ((ConstantScoreQuery) query).getQuery(); - } else { - break; - } - } - - if (query.getClass() == MatchAllDocsQuery.class) { - collector = null; - topDocsCallable = new Callable() { - @Override - public TopDocs call() throws Exception { - int count = searcher.getIndexReader().numDocs(); - return new TopDocs(count, Lucene.EMPTY_SCORE_DOCS, 0); - } - }; - } else if (query.getClass() == TermQuery.class && searcher.getIndexReader().hasDeletions() == false) { - final Term term = ((TermQuery) query).getTerm(); - collector = null; - topDocsCallable = new Callable() { - @Override - public TopDocs call() throws Exception { - int count = 0; - for (LeafReaderContext context : searcher.getIndexReader().leaves()) { - count += context.reader().docFreq(term); - } - return new TopDocs(count, Lucene.EMPTY_SCORE_DOCS, 0); - } - }; - } - } - - final boolean timeoutSet = searchContext.timeout() != null && !searchContext.timeout().equals(SearchService.NO_TIMEOUT); - if (timeoutSet && collector != null) { // collector might be null if no collection is actually needed - final Collector child = collector; - // TODO: change to use our own counter that uses the scheduler in ThreadPool - // throws TimeLimitingCollector.TimeExceededException when timeout has reached - collector = Lucene.wrapTimeLimitingCollector(collector, searchContext.timeEstimateCounter(), searchContext.timeout().millis()); - if (doProfile) { - collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_TIMEOUT, - Collections.singletonList((InternalProfileCollector) child)); - } - } - - if (collector != null) { - final Collector child = collector; - collector = new CancellableCollector(searchContext.getTask()::isCancelled, searchContext.lowLevelCancellation(), collector); - if (doProfile) { - collector = new InternalProfileCollector(collector, CollectorResult.REASON_SEARCH_CANCELLED, - Collections.singletonList((InternalProfileCollector) child)); - } - } - - try { - if (collector != null) { - if (doProfile) { - searchContext.getProfilers().getCurrentQueryProfiler().setCollector((InternalProfileCollector) collector); - } - searcher.search(query, collector); - } - } catch (TimeLimitingCollector.TimeExceededException e) { - assert timeoutSet : "TimeExceededException thrown even though timeout wasn't set"; - queryResult.searchTimedOut(true); - } catch (Lucene.EarlyTerminationException e) { - assert terminateAfterSet : "EarlyTerminationException thrown even though terminateAfter wasn't set"; - queryResult.terminatedEarly(true); - } finally { - searchContext.clearReleasables(SearchContext.Lifetime.COLLECTION); - } - if (terminateAfterSet && queryResult.terminatedEarly() == null) { - queryResult.terminatedEarly(false); - } - - queryResult.topDocs(topDocsCallable.call(), sortValueFormats); - - if (searchContext.getProfilers() != null) { - ProfileShardResult shardResults = SearchProfileShardResults - .buildShardResults(searchContext.getProfilers()); - searchContext.queryResult().profileResults(shardResults); - } - - return rescore; - - } catch (Exception e) { - throw new QueryPhaseExecutionException(searchContext, "Failed to execute main query", e); - } + static boolean canEarlyTerminate(Sort indexSort, SearchContext context) { + final Sort sort = context.sort() == null ? Sort.RELEVANCE : context.sort().sort; + return indexSort != null && EarlyTerminatingSortingCollector.canEarlyTerminate(sort, indexSort); } } diff --git a/core/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java b/core/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java new file mode 100644 index 00000000000..93c2aa17de6 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java @@ -0,0 +1,306 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.query; + +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.Collector; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TopDocsCollector; +import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TotalHitCountCollector; +import org.apache.lucene.search.grouping.CollapsingTopDocsCollector; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.collapse.CollapseContext; +import org.elasticsearch.search.internal.ScrollContext; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.rescore.RescoreSearchContext; +import org.elasticsearch.search.sort.SortAndFormats; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.search.profile.query.CollectorResult.REASON_SEARCH_COUNT; +import static org.elasticsearch.search.profile.query.CollectorResult.REASON_SEARCH_TOP_HITS; + +/** + * A {@link QueryCollectorContext} that creates top docs collector + */ +abstract class TopDocsCollectorContext extends QueryCollectorContext { + protected final int numHits; + + TopDocsCollectorContext(String profilerName, int numHits) { + super(profilerName); + this.numHits = numHits; + } + + /** + * Returns the number of top docs to retrieve + */ + final int numHits() { + return numHits; + } + + /** + * Returns true if the top docs should be re-scored after initial search + */ + boolean shouldRescore() { + return false; + } + + static class TotalHitCountCollectorContext extends TopDocsCollectorContext { + private final TotalHitCountCollector collector; + private final int hitCount; + + /** + * Ctr + * @param reader The index reader + * @param query The query to execute + * @param shouldCollect True if any previous collector context in the chain forces the search to be executed, false otherwise + */ + private TotalHitCountCollectorContext(IndexReader reader, Query query, boolean shouldCollect) throws IOException { + super(REASON_SEARCH_COUNT, 0); + this.collector = new TotalHitCountCollector(); + // implicit total hit counts are valid only when there is no filter collector in the chain + // so we check the shortcut only if shouldCollect is true + this.hitCount = shouldCollect ? -1 : shortcutTotalHitCount(reader, query); + } + + @Override + boolean shouldCollect() { + return hitCount == -1; + } + + Collector create(Collector in) { + assert in == null; + return collector; + } + + @Override + void postProcess(QuerySearchResult result, boolean hasCollected) { + final int totalHitCount; + if (hasCollected) { + totalHitCount = collector.getTotalHits(); + } else { + assert hitCount != -1; + totalHitCount = hitCount; + } + result.topDocs(new TopDocs(totalHitCount, Lucene.EMPTY_SCORE_DOCS, 0), null); + } + } + + static class CollapsingTopDocsCollectorContext extends TopDocsCollectorContext { + private final DocValueFormat[] sortFmt; + private final CollapsingTopDocsCollector topDocsCollector; + + /** + * Ctr + * @param collapseContext The collapsing context + * @param sortAndFormats The query sort + * @param numHits The number of collapsed top hits to retrieve. + * @param trackMaxScore True if max score should be tracked + */ + private CollapsingTopDocsCollectorContext(CollapseContext collapseContext, + @Nullable SortAndFormats sortAndFormats, + int numHits, + boolean trackMaxScore) { + super(REASON_SEARCH_TOP_HITS, numHits); + assert numHits > 0; + assert collapseContext != null; + Sort sort = sortAndFormats == null ? Sort.RELEVANCE : sortAndFormats.sort; + this.sortFmt = sortAndFormats == null ? new DocValueFormat[] { DocValueFormat.RAW } : sortAndFormats.formats; + this.topDocsCollector = collapseContext.createTopDocs(sort, numHits, trackMaxScore); + } + + @Override + Collector create(Collector in) throws IOException { + assert in == null; + return topDocsCollector; + } + + @Override + void postProcess(QuerySearchResult result, boolean hasCollected) throws IOException { + assert hasCollected; + result.topDocs(topDocsCollector.getTopDocs(), sortFmt); + } + } + + abstract static class SimpleTopDocsCollectorContext extends TopDocsCollectorContext { + private final @Nullable SortAndFormats sortAndFormats; + private final TopDocsCollector topDocsCollector; + + /** + * Ctr + * @param sortAndFormats The query sort + * @param numHits The number of top hits to retrieve + * @param searchAfter The doc this request should "search after" + * @param trackMaxScore True if max score should be tracked + */ + private SimpleTopDocsCollectorContext(@Nullable SortAndFormats sortAndFormats, + @Nullable ScoreDoc searchAfter, + int numHits, + boolean trackMaxScore) throws IOException { + super(REASON_SEARCH_TOP_HITS, numHits); + this.sortAndFormats = sortAndFormats; + if (sortAndFormats == null) { + this.topDocsCollector = TopScoreDocCollector.create(numHits, searchAfter); + } else { + this.topDocsCollector = TopFieldCollector.create(sortAndFormats.sort, numHits, + (FieldDoc) searchAfter, true, trackMaxScore, trackMaxScore); + } + } + + @Override + Collector create(Collector in) { + assert in == null; + return topDocsCollector; + } + + @Override + void postProcess(QuerySearchResult result, boolean hasCollected) throws IOException { + assert hasCollected; + final TopDocs topDocs = topDocsCollector.topDocs(); + result.topDocs(topDocs, sortAndFormats == null ? null : sortAndFormats.formats); + } + } + + static class ScrollingTopDocsCollectorContext extends SimpleTopDocsCollectorContext { + private final ScrollContext scrollContext; + private final int numberOfShards; + + private ScrollingTopDocsCollectorContext(ScrollContext scrollContext, + @Nullable SortAndFormats sortAndFormats, + int numHits, + boolean trackMaxScore, + int numberOfShards) throws IOException { + super(sortAndFormats, scrollContext.lastEmittedDoc, numHits, trackMaxScore); + this.scrollContext = Objects.requireNonNull(scrollContext); + this.numberOfShards = numberOfShards; + } + + @Override + void postProcess(QuerySearchResult result, boolean hasCollected) throws IOException { + super.postProcess(result, hasCollected); + final TopDocs topDocs = result.topDocs(); + if (scrollContext.totalHits == -1) { + // first round + scrollContext.totalHits = topDocs.totalHits; + scrollContext.maxScore = topDocs.getMaxScore(); + } else { + // subsequent round: the total number of hits and + // the maximum score were computed on the first round + topDocs.totalHits = scrollContext.totalHits; + topDocs.setMaxScore(scrollContext.maxScore); + } + if (numberOfShards == 1) { + // if we fetch the document in the same roundtrip, we already know the last emitted doc + if (topDocs.scoreDocs.length > 0) { + // set the last emitted doc + scrollContext.lastEmittedDoc = topDocs.scoreDocs[topDocs.scoreDocs.length - 1]; + } + } + result.topDocs(topDocs, result.sortValueFormats()); + } + } + + /** + * Returns query total hit count if the query is a {@link MatchAllDocsQuery} + * or a {@link TermQuery} and the reader has no deletions, + * -1 otherwise. + */ + static int shortcutTotalHitCount(IndexReader reader, Query query) throws IOException { + while (true) { + // remove wrappers that don't matter for counts + // this is necessary so that we don't only optimize match_all + // queries but also match_all queries that are nested in + // a constant_score query + if (query instanceof ConstantScoreQuery) { + query = ((ConstantScoreQuery) query).getQuery(); + } else if (query instanceof BoostQuery) { + query = ((BoostQuery) query).getQuery(); + } else { + break; + } + } + if (query.getClass() == MatchAllDocsQuery.class) { + return reader.numDocs(); + } else if (query.getClass() == TermQuery.class && reader.hasDeletions() == false) { + final Term term = ((TermQuery) query).getTerm(); + int count = 0; + for (LeafReaderContext context : reader.leaves()) { + count += context.reader().docFreq(term); + } + return count; + } else { + return -1; + } + } + + /** + * Creates a {@link TopDocsCollectorContext} from the provided searchContext + */ + static TopDocsCollectorContext createTopDocsCollectorContext(SearchContext searchContext, + IndexReader reader, + boolean shouldCollect) throws IOException { + final Query query = searchContext.query(); + // top collectors don't like a size of 0 + final int totalNumDocs = Math.max(1, reader.numDocs()); + if (searchContext.size() == 0) { + // no matter what the value of from is + return new TotalHitCountCollectorContext(reader, query, shouldCollect); + } else if (searchContext.scrollContext() != null) { + // no matter what the value of from is + int numDocs = Math.min(searchContext.size(), totalNumDocs); + return new ScrollingTopDocsCollectorContext(searchContext.scrollContext(), + searchContext.sort(), numDocs, searchContext.trackScores(), searchContext.numberOfShards()); + } else if (searchContext.collapse() != null) { + int numDocs = Math.min(searchContext.from() + searchContext.size(), totalNumDocs); + return new CollapsingTopDocsCollectorContext(searchContext.collapse(), + searchContext.sort(), numDocs, searchContext.trackScores()); + } else { + int numDocs = Math.min(searchContext.from() + searchContext.size(), totalNumDocs); + final boolean rescore = searchContext.rescore().isEmpty() == false; + for (RescoreSearchContext rescoreContext : searchContext.rescore()) { + numDocs = Math.max(numDocs, rescoreContext.window()); + } + return new SimpleTopDocsCollectorContext(searchContext.sort(), + searchContext.searchAfter(), + numDocs, + searchContext.trackScores()) { + @Override + boolean shouldRescore() { + return rescore; + } + }; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index 4ac8b023d7f..749d2d13ba4 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -45,9 +45,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; -import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; @@ -242,7 +240,8 @@ public class ScriptSortBuilder extends SortBuilder { @Override public SortFieldAndFormat build(QueryShardContext context) throws IOException { - final SearchScript searchScript = context.getSearchScript(script, SearchScript.CONTEXT); + final SearchScript.Factory factory = context.getScriptService().compile(script, SearchScript.CONTEXT); + final SearchScript.LeafFactory searchScript = factory.newFactory(script.getParams(), context.lookup()); MultiValueMode valueMode = null; if (sortMode != null) { @@ -258,10 +257,10 @@ public class ScriptSortBuilder extends SortBuilder { switch (type) { case STRING: fieldComparatorSource = new BytesRefFieldComparatorSource(null, null, valueMode, nested) { - LeafSearchScript leafScript; + SearchScript leafScript; @Override protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOException { - leafScript = searchScript.getLeafSearchScript(context); + leafScript = searchScript.newInstance(context); final BinaryDocValues values = new AbstractBinaryDocValues() { final BytesRefBuilder spare = new BytesRefBuilder(); @Override @@ -285,10 +284,10 @@ public class ScriptSortBuilder extends SortBuilder { break; case NUMBER: fieldComparatorSource = new DoubleValuesComparatorSource(null, Double.MAX_VALUE, valueMode, nested) { - LeafSearchScript leafScript; + SearchScript leafScript; @Override protected SortedNumericDoubleValues getValues(LeafReaderContext context) throws IOException { - leafScript = searchScript.getLeafSearchScript(context); + leafScript = searchScript.newInstance(context); final NumericDoubleValues values = new NumericDoubleValues() { @Override public boolean advanceExact(int doc) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java index 38e31ec92a4..92091d8b697 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/CategoryContextMapping.java @@ -25,6 +25,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.query.QueryParseContext; @@ -136,10 +137,14 @@ public class CategoryContextMapping extends ContextMapping IndexableField[] fields = document.getFields(fieldName); values = new HashSet<>(fields.length); for (IndexableField field : fields) { - values.add(field.stringValue()); + if (field.fieldType() instanceof KeywordFieldMapper.KeywordFieldType) { + values.add(field.binaryValue().utf8ToString()); + } else { + values.add(field.stringValue()); + } } } - return (values == null) ? Collections.emptySet() : values; + return (values == null) ? Collections.emptySet() : values; } @Override diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java index 9e9e2c66f79..2ab60b0d8ec 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java @@ -37,6 +37,7 @@ import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.TemplateScript; import org.elasticsearch.search.suggest.Suggest.Suggestion; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry; import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; @@ -103,20 +104,19 @@ public final class PhraseSuggester extends Suggester { response.addTerm(resultEntry); final BytesRefBuilder byteSpare = new BytesRefBuilder(); - final Function, ExecutableScript> collateScript = suggestion.getCollateQueryScript(); - final boolean collatePrune = (collateScript != null) && suggestion.collatePrune(); + final TemplateScript.Factory scriptFactory = suggestion.getCollateQueryScript(); + final boolean collatePrune = (scriptFactory != null) && suggestion.collatePrune(); for (int i = 0; i < checkerResult.corrections.length; i++) { Correction correction = checkerResult.corrections[i]; spare.copyUTF8Bytes(correction.join(SEPARATOR, byteSpare, null, null)); boolean collateMatch = true; - if (collateScript != null) { + if (scriptFactory != null) { // Checks if the template query collateScript yields any documents // from the index for a correction, collateMatch is updated final Map vars = suggestion.getCollateScriptParams(); vars.put(SUGGESTION_TEMPLATE_VAR_NAME, spare.toString()); QueryShardContext shardContext = suggestion.getShardContext(); - final ExecutableScript executable = collateScript.apply(vars); - final String querySource = (String) executable.run(); + final String querySource = scriptFactory.newInstance(vars).execute(); try (XContentParser parser = XContentFactory.xContent(querySource).createParser(shardContext.getXContentRegistry(), querySource)) { QueryBuilder innerQueryBuilder = shardContext.newParseContext(parser).parseInnerQueryBuilder(); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java index 1194488e506..d59d1e1c8d3 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilder.java @@ -41,6 +41,7 @@ import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptType; +import org.elasticsearch.script.TemplateScript; import org.elasticsearch.search.suggest.SuggestionBuilder; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator; @@ -630,9 +631,8 @@ public class PhraseSuggestionBuilder extends SuggestionBuilder, ExecutableScript> compiledScript = context.getLazyExecutableScript(this.collateQuery, - ExecutableScript.CONTEXT); - suggestionContext.setCollateQueryScript(compiledScript); + TemplateScript.Factory scriptFactory = context.getScriptService().compile(this.collateQuery, TemplateScript.CONTEXT); + suggestionContext.setCollateQueryScript(scriptFactory); if (this.collateParams != null) { suggestionContext.setCollateScriptParams(this.collateParams); } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java index 31dcc22bec0..22ac3c26a18 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.Terms; import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.TemplateScript; import org.elasticsearch.search.suggest.DirectSpellcheckerSettings; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; @@ -53,7 +54,7 @@ class PhraseSuggestionContext extends SuggestionContext { private boolean requireUnigram = DEFAULT_REQUIRE_UNIGRAM; private BytesRef preTag; private BytesRef postTag; - private Function, ExecutableScript> collateQueryScript; + private TemplateScript.Factory scriptFactory; private boolean prune = DEFAULT_COLLATE_PRUNE; private List generators = new ArrayList<>(); private Map collateScriptParams = new HashMap<>(1); @@ -193,12 +194,12 @@ class PhraseSuggestionContext extends SuggestionContext { return postTag; } - Function, ExecutableScript> getCollateQueryScript() { - return collateQueryScript; + TemplateScript.Factory getCollateQueryScript() { + return scriptFactory; } - void setCollateQueryScript( Function, ExecutableScript> collateQueryScript) { - this.collateQueryScript = collateQueryScript; + void setCollateQueryScript(TemplateScript.Factory scriptFactory) { + this.scriptFactory = scriptFactory; } Map getCollateScriptParams() { diff --git a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 5f930ad4859..03c7eb3a4af 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -745,9 +745,6 @@ public class SnapshotsService extends AbstractLifecycleComponent implements Clus logger.warn("failed to clean up abandoned snapshot {} in INIT state", snapshot.snapshot()); } }, updatedSnapshot.getRepositoryStateId(), false); - } else if (snapshot.state() == State.SUCCESS && newMaster) { - // Finalize the snapshot - endSnapshot(snapshot); } } if (changed) { diff --git a/core/src/main/java/org/elasticsearch/transport/CompressibleBytesOutputStream.java b/core/src/main/java/org/elasticsearch/transport/CompressibleBytesOutputStream.java new file mode 100644 index 00000000000..8e5d5b027be --- /dev/null +++ b/core/src/main/java/org/elasticsearch/transport/CompressibleBytesOutputStream.java @@ -0,0 +1,109 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport; + +import org.apache.lucene.util.IOUtils; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressorFactory; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.io.stream.BytesStream; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lease.Releasable; + +import java.io.IOException; +import java.util.zip.DeflaterOutputStream; + +/** + * This class exists to provide a stream with optional compression. This is useful as using compression + * requires that the underlying {@link DeflaterOutputStream} be closed to write EOS bytes. However, the + * {@link BytesStream} should not be closed yet, as we have not used the bytes. This class handles these + * intricacies. + * + * {@link CompressibleBytesOutputStream#materializeBytes()} should be called when all the bytes have been + * written to this stream. If compression is enabled, the proper EOS bytes will be written at that point. + * The underlying {@link BytesReference} will be returned. + * + * {@link CompressibleBytesOutputStream#close()} should be called when the bytes are no longer needed and + * can be safely released. + */ +final class CompressibleBytesOutputStream extends StreamOutput implements Releasable { + + private final StreamOutput stream; + private final BytesStream bytesStreamOutput; + private final boolean shouldCompress; + + CompressibleBytesOutputStream(BytesStream bytesStreamOutput, boolean shouldCompress) throws IOException { + this.bytesStreamOutput = bytesStreamOutput; + this.shouldCompress = shouldCompress; + if (shouldCompress) { + this.stream = CompressorFactory.COMPRESSOR.streamOutput(Streams.flushOnCloseStream(bytesStreamOutput)); + } else { + this.stream = bytesStreamOutput; + } + } + + /** + * This method ensures that compression is complete and returns the underlying bytes. + * + * @return bytes underlying the stream + * @throws IOException if an exception occurs when writing or flushing + */ + BytesReference materializeBytes() throws IOException { + // If we are using compression the stream needs to be closed to ensure that EOS marker bytes are written. + // The actual ReleasableBytesStreamOutput will not be closed yet as it is wrapped in flushOnCloseStream when + // passed to the deflater stream. + if (shouldCompress) { + stream.close(); + } + + return bytesStreamOutput.bytes(); + } + + @Override + public void writeByte(byte b) throws IOException { + stream.write(b); + } + + @Override + public void writeBytes(byte[] b, int offset, int length) throws IOException { + stream.writeBytes(b, offset, length); + } + + @Override + public void flush() throws IOException { + stream.flush(); + } + + @Override + public void close() { + if (stream == bytesStreamOutput) { + assert shouldCompress == false : "If the streams are the same we should not be compressing"; + IOUtils.closeWhileHandlingException(stream); + } else { + assert shouldCompress : "If the streams are different we should be compressing"; + IOUtils.closeWhileHandlingException(stream, bytesStreamOutput); + } + } + + @Override + public void reset() throws IOException { + stream.reset(); + } +} diff --git a/core/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/core/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index 2b16c26931b..59da9bee7ef 100644 --- a/core/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/core/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -61,8 +61,10 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; +import java.util.stream.Stream; /** * Represents a connection to a single remote cluster. In contrast to a local cluster a remote cluster is not joined such that the @@ -206,6 +208,53 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo }); } + /** + * Collects all nodes on the connected cluster and returns / passes a nodeID to {@link DiscoveryNode} lookup function + * that returns null if the node ID is not found. + */ + void collectNodes(ActionListener> listener) { + Runnable runnable = () -> { + final ClusterStateRequest request = new ClusterStateRequest(); + request.clear(); + request.nodes(true); + request.local(true); // run this on the node that gets the request it's as good as any other + final DiscoveryNode node = nodeSupplier.get(); + transportService.sendRequest(node, ClusterStateAction.NAME, request, TransportRequestOptions.EMPTY, + new TransportResponseHandler() { + @Override + public ClusterStateResponse newInstance() { + return new ClusterStateResponse(); + } + + @Override + public void handleResponse(ClusterStateResponse response) { + DiscoveryNodes nodes = response.getState().nodes(); + listener.onResponse(nodes::get); + } + + @Override + public void handleException(TransportException exp) { + listener.onFailure(exp); + } + + @Override + public String executor() { + return ThreadPool.Names.SAME; + } + }); + }; + if (connectedNodes.isEmpty()) { + // just in case if we are not connected for some reason we try to connect and if we fail we have to notify the listener + // this will cause some back pressure on the search end and eventually will cause rejections but that's fine + // we can't proceed with a search on a cluster level. + // in the future we might want to just skip the remote nodes in such a case but that can already be implemented on the + // caller end since they provide the listener. + ensureConnected(ActionListener.wrap((x) -> runnable.run(), listener::onFailure)); + } else { + runnable.run(); + } + } + /** * Returns a connection to the remote cluster. This connection might be a proxy connection that redirects internally to the * given node. diff --git a/core/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/core/src/main/java/org/elasticsearch/transport/RemoteClusterService.java index 621713c8ab1..c4b64e860b2 100644 --- a/core/src/main/java/org/elasticsearch/transport/RemoteClusterService.java +++ b/core/src/main/java/org/elasticsearch/transport/RemoteClusterService.java @@ -18,17 +18,12 @@ */ package org.elasticsearch.transport; -import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.util.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.OriginalIndices; -import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsGroup; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchShardIterator; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.PlainActionFuture; @@ -40,15 +35,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.CountDown; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.internal.AliasFilter; import java.io.Closeable; import java.io.IOException; import java.net.InetSocketAddress; -import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -59,6 +49,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -346,4 +337,44 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl } } } + + /** + * Collects all nodes of the given clusters and returns / passes a (clusterAlias, nodeId) to {@link DiscoveryNode} + * function on success. + */ + public void collectNodes(Set clusters, ActionListener> listener) { + Map remoteClusters = this.remoteClusters; + for (String cluster : clusters) { + if (remoteClusters.containsKey(cluster) == false) { + listener.onFailure(new IllegalArgumentException("no such remote cluster: [" + cluster + "]")); + return; + } + } + + final Map> clusterMap = new HashMap<>(); + CountDown countDown = new CountDown(clusters.size()); + Function nullFunction = s -> null; + for (final String cluster : clusters) { + RemoteClusterConnection connection = remoteClusters.get(cluster); + connection.collectNodes(new ActionListener>() { + @Override + public void onResponse(Function nodeLookup) { + synchronized (clusterMap) { + clusterMap.put(cluster, nodeLookup); + } + if (countDown.countDown()) { + listener.onResponse((clusterAlias, nodeId) + -> clusterMap.getOrDefault(clusterAlias, nullFunction).apply(nodeId)); + } + } + + @Override + public void onFailure(Exception e) { + if (countDown.fastForward()) { // we need to check if it's true since we could have multiple failures + listener.onFailure(e); + } + } + }); + } + } } diff --git a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java index de083ead10e..22aced389f8 100644 --- a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -41,7 +41,6 @@ import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.compress.NotCompressedException; -import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -67,6 +66,7 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.KeyedLock; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.rest.RestStatus; @@ -86,7 +86,6 @@ import java.util.Collections; import java.util.EnumMap; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -113,6 +112,7 @@ import static org.elasticsearch.common.settings.Setting.timeSetting; import static org.elasticsearch.common.transport.NetworkExceptionHelper.isCloseConnectionException; import static org.elasticsearch.common.transport.NetworkExceptionHelper.isConnectException; import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap; +import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentSet; public abstract class TcpTransport extends AbstractLifecycleComponent implements Transport { @@ -160,6 +160,8 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i protected volatile TransportServiceAdapter transportServiceAdapter; // node id to actual channel protected final ConcurrentMap connectedNodes = newConcurrentMap(); + private final Set openConnections = newConcurrentSet(); + protected final Map> serverChannels = newConcurrentMap(); protected final ConcurrentMap profileBoundAddresses = newConcurrentMap(); @@ -358,9 +360,8 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i private final DiscoveryNode node; private final AtomicBoolean closed = new AtomicBoolean(false); private final Version version; - private final Consumer onClose; - public NodeChannels(DiscoveryNode node, Channel[] channels, ConnectionProfile connectionProfile, Consumer onClose) { + public NodeChannels(DiscoveryNode node, Channel[] channels, ConnectionProfile connectionProfile) { this.node = node; this.channels = channels; assert channels.length == connectionProfile.getNumConnections() : "expected channels size to be == " @@ -371,7 +372,6 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i typeMapping.put(type, handle); } version = node.getVersion(); - this.onClose = onClose; } NodeChannels(NodeChannels channels, Version handshakeVersion) { @@ -379,7 +379,6 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i this.channels = channels.channels; this.typeMapping = channels.typeMapping; this.version = handshakeVersion; - this.onClose = channels.onClose; } @Override @@ -414,7 +413,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i try { closeChannels(Arrays.stream(channels).filter(Objects::nonNull).collect(Collectors.toList())); } finally { - onClose.accept(this); + onNodeChannelsClosed(this); } } } @@ -456,27 +455,28 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i if (nodeChannels != null) { return; } + boolean success = false; try { - try { - nodeChannels = openConnection(node, connectionProfile); - connectionValidator.accept(nodeChannels, connectionProfile); - } catch (Exception e) { - logger.trace( - (Supplier) () -> new ParameterizedMessage( - "failed to connect to [{}], cleaning dangling connections", node), e); - IOUtils.closeWhileHandlingException(nodeChannels); - throw e; - } + nodeChannels = openConnection(node, connectionProfile); + connectionValidator.accept(nodeChannels, connectionProfile); // we acquire a connection lock, so no way there is an existing connection connectedNodes.put(node, nodeChannels); if (logger.isDebugEnabled()) { logger.debug("connected to node [{}]", node); } transportServiceAdapter.onNodeConnected(node); + success = true; } catch (ConnectTransportException e) { throw e; } catch (Exception e) { throw new ConnectTransportException(node, "general node connection failure", e); + } finally { + if (success == false) { // close the connection if there is a failure + logger.trace( + (Supplier) () -> new ParameterizedMessage( + "failed to connect to [{}], cleaning dangling connections", node)); + IOUtils.closeWhileHandlingException(nodeChannels); + } } } } finally { @@ -519,7 +519,20 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i try { ensureOpen(); try { - nodeChannels = connectToChannels(node, connectionProfile); + AtomicBoolean runOnce = new AtomicBoolean(false); + Consumer onClose = c -> { + assert isOpen(c) == false : "channel is still open when onClose is called"; + try { + onChannelClosed(c); + } finally { + // we only need to disconnect from the nodes once since all other channels + // will also try to run this we protect it from running multiple times. + if (runOnce.compareAndSet(false, true)) { + disconnectFromNodeChannel(c, "channel closed"); + } + } + }; + nodeChannels = connectToChannels(node, connectionProfile, onClose); final Channel channel = nodeChannels.getChannels().get(0); // one channel is guaranteed by the connection profile final TimeValue connectTimeout = connectionProfile.getConnectTimeout() == null ? defaultConnectionProfile.getConnectTimeout() : @@ -527,8 +540,9 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i final TimeValue handshakeTimeout = connectionProfile.getHandshakeTimeout() == null ? connectTimeout : connectionProfile.getHandshakeTimeout(); final Version version = executeHandshake(node, channel, handshakeTimeout); - transportServiceAdapter.onConnectionOpened(nodeChannels); nodeChannels = new NodeChannels(nodeChannels, version); // clone the channels - we now have the correct version + transportServiceAdapter.onConnectionOpened(nodeChannels); + openConnections.add(nodeChannels); success = true; return nodeChannels; } catch (ConnectTransportException e) { @@ -581,24 +595,37 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i /** * Disconnects from a node if a channel is found as part of that nodes channels. */ - protected final void disconnectFromNodeChannel(final Channel channel, final Exception failure) { + protected final void disconnectFromNodeChannel(final Channel channel, final String reason) { threadPool.generic().execute(() -> { try { - try { + if (isOpen(channel)) { closeChannels(Collections.singletonList(channel)); - } finally { - for (DiscoveryNode node : connectedNodes.keySet()) { - if (disconnectFromNode(node, channel, ExceptionsHelper.detailedMessage(failure))) { - // if we managed to find this channel and disconnect from it, then break, no need to check on - // the rest of the nodes - break; - } - } } } catch (IOException e) { logger.warn("failed to close channel", e); } finally { - onChannelClosed(channel); + outer: + { + for (Map.Entry entry : connectedNodes.entrySet()) { + if (disconnectFromNode(entry.getKey(), channel, reason)) { + // if we managed to find this channel and disconnect from it, then break, no need to check on + // the rest of the nodes + // #onNodeChannelsClosed will remove it.. + assert openConnections.contains(entry.getValue()) == false : "NodeChannel#close should remove the connetion"; + // we can only be connected and published to a single node with one connection. So if disconnectFromNode + // returns true we can safely break out from here since we cleaned up everything needed + break outer; + } + } + // now if we haven't found the right connection in the connected nodes we have to go through the open connections + // it might be that the channel belongs to a connection that is not published + for (NodeChannels channels : openConnections) { + if (channels.hasChannel(channel)) { + IOUtils.closeWhileHandlingException(channels); + break; + } + } + } } }); } @@ -902,12 +929,11 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i "Error closing serverChannel for profile [{}]", entry.getKey()), e); } } - - for (Iterator it = connectedNodes.values().iterator(); it.hasNext();) { - NodeChannels nodeChannels = it.next(); - it.remove(); - IOUtils.closeWhileHandlingException(nodeChannels); - } + // we are holding a write lock so nobody modifies the connectedNodes / openConnections map - it's safe to first close + // all instances and then clear them maps + IOUtils.closeWhileHandlingException(Iterables.concat(connectedNodes.values(), openConnections)); + openConnections.clear(); + connectedNodes.clear(); stopInternal(); } finally { globalLock.writeLock().unlock(); @@ -924,11 +950,13 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i } protected void onException(Channel channel, Exception e) { + String reason = ExceptionsHelper.detailedMessage(e); if (!lifecycle.started()) { // just close and ignore - we are already stopped and just need to make sure we release all resources - disconnectFromNodeChannel(channel, e); + disconnectFromNodeChannel(channel, reason); return; } + if (isCloseConnectionException(e)) { logger.trace( (Supplier) () -> new ParameterizedMessage( @@ -936,15 +964,15 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i channel), e); // close the channel, which will cause a node to be disconnected if relevant - disconnectFromNodeChannel(channel, e); + disconnectFromNodeChannel(channel, reason); } else if (isConnectException(e)) { logger.trace((Supplier) () -> new ParameterizedMessage("connect exception caught on transport layer [{}]", channel), e); // close the channel as safe measure, which will cause a node to be disconnected if relevant - disconnectFromNodeChannel(channel, e); + disconnectFromNodeChannel(channel, reason); } else if (e instanceof BindException) { logger.trace((Supplier) () -> new ParameterizedMessage("bind exception caught on transport layer [{}]", channel), e); // close the channel as safe measure, which will cause a node to be disconnected if relevant - disconnectFromNodeChannel(channel, e); + disconnectFromNodeChannel(channel, reason); } else if (e instanceof CancelledKeyException) { logger.trace( (Supplier) () -> new ParameterizedMessage( @@ -952,7 +980,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i channel), e); // close the channel as safe measure, which will cause a node to be disconnected if relevant - disconnectFromNodeChannel(channel, e); + disconnectFromNodeChannel(channel, reason); } else if (e instanceof TcpTransport.HttpOnTransportException) { // in case we are able to return data, serialize the exception content and sent it back to the client if (isOpen(channel)) { @@ -982,7 +1010,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i logger.warn( (Supplier) () -> new ParameterizedMessage("exception caught on transport layer [{}], closing connection", channel), e); // close the channel, which will cause a node to be disconnected if relevant - disconnectFromNodeChannel(channel, e); + disconnectFromNodeChannel(channel, reason); } } @@ -1013,7 +1041,8 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i */ protected abstract void sendMessage(Channel channel, BytesReference reference, ActionListener listener); - protected abstract NodeChannels connectToChannels(DiscoveryNode node, ConnectionProfile connectionProfile) throws IOException; + protected abstract NodeChannels connectToChannels(DiscoveryNode node, ConnectionProfile connectionProfile, + Consumer onChannelClose) throws IOException; /** * Called to tear down internal resources @@ -1031,16 +1060,18 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i if (compress) { options = TransportRequestOptions.builder(options).withCompress(true).build(); } + + // only compress if asked and the request is not bytes. Otherwise only + // the header part is compressed, and the "body" can't be extracted as compressed + final boolean compressMessage = options.compress() && canCompress(request); + status = TransportStatus.setRequest(status); ReleasableBytesStreamOutput bStream = new ReleasableBytesStreamOutput(bigArrays); + final CompressibleBytesOutputStream stream = new CompressibleBytesOutputStream(bStream, compressMessage); boolean addedReleaseListener = false; - StreamOutput stream = Streams.flushOnCloseStream(bStream); try { - // only compress if asked, and, the request is not bytes, since then only - // the header part is compressed, and the "body" can't be extracted as compressed - if (options.compress() && canCompress(request)) { + if (compressMessage) { status = TransportStatus.setCompress(status); - stream = CompressorFactory.COMPRESSOR.streamOutput(stream); } // we pick the smallest of the 2, to support both backward and forward compatibility @@ -1051,18 +1082,16 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i stream.setVersion(version); threadPool.getThreadContext().writeTo(stream); stream.writeString(action); - BytesReference message = buildMessage(requestId, status, node.getVersion(), request, stream, bStream); + BytesReference message = buildMessage(requestId, status, node.getVersion(), request, stream); final TransportRequestOptions finalOptions = options; - final StreamOutput finalStream = stream; // this might be called in a different thread - SendListener onRequestSent = new SendListener( - () -> IOUtils.closeWhileHandlingException(finalStream, bStream), + SendListener onRequestSent = new SendListener(stream, () -> transportServiceAdapter.onRequestSent(node, requestId, action, request, finalOptions)); internalSendMessage(targetChannel, message, onRequestSent); addedReleaseListener = true; } finally { if (!addedReleaseListener) { - IOUtils.close(stream, bStream); + IOUtils.close(stream); } } } @@ -1125,27 +1154,25 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i } status = TransportStatus.setResponse(status); // TODO share some code with sendRequest ReleasableBytesStreamOutput bStream = new ReleasableBytesStreamOutput(bigArrays); + CompressibleBytesOutputStream stream = new CompressibleBytesOutputStream(bStream, options.compress()); boolean addedReleaseListener = false; - StreamOutput stream = Streams.flushOnCloseStream(bStream); try { if (options.compress()) { status = TransportStatus.setCompress(status); - stream = CompressorFactory.COMPRESSOR.streamOutput(stream); } threadPool.getThreadContext().writeTo(stream); stream.setVersion(nodeVersion); - BytesReference reference = buildMessage(requestId, status, nodeVersion, response, stream, bStream); + BytesReference reference = buildMessage(requestId, status, nodeVersion, response, stream); final TransportResponseOptions finalOptions = options; - final StreamOutput finalStream = stream; // this might be called in a different thread - SendListener listener = new SendListener(() -> IOUtils.closeWhileHandlingException(finalStream, bStream), + SendListener listener = new SendListener(stream, () -> transportServiceAdapter.onResponseSent(requestId, action, response, finalOptions)); internalSendMessage(channel, reference, listener); addedReleaseListener = true; } finally { if (!addedReleaseListener) { - IOUtils.close(stream, bStream); + IOUtils.close(stream); } } } @@ -1173,8 +1200,8 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i /** * Serializes the given message into a bytes representation */ - private BytesReference buildMessage(long requestId, byte status, Version nodeVersion, TransportMessage message, StreamOutput stream, - ReleasableBytesStreamOutput writtenBytes) throws IOException { + private BytesReference buildMessage(long requestId, byte status, Version nodeVersion, TransportMessage message, + CompressibleBytesOutputStream stream) throws IOException { final BytesReference zeroCopyBuffer; if (message instanceof BytesTransportRequest) { // what a shitty optimization - we should use a direct send method instead BytesTransportRequest bRequest = (BytesTransportRequest) message; @@ -1185,12 +1212,12 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i message.writeTo(stream); zeroCopyBuffer = BytesArray.EMPTY; } - // we have to close the stream here - flush is not enough since we might be compressing the content - // and if we do that the close method will write some marker bytes (EOS marker) and otherwise - // we barf on the decompressing end when we read past EOF on purpose in the #validateRequest method. - // this might be a problem in deflate after all but it's important to close it for now. - stream.close(); - final BytesReference messageBody = writtenBytes.bytes(); + // we have to call materializeBytes() here before accessing the bytes. A CompressibleBytesOutputStream + // might be implementing compression. And materializeBytes() ensures that some marker bytes (EOS marker) + // are written. Otherwise we barf on the decompressing end when we read past EOF on purpose in the + // #validateRequest method. this might be a problem in deflate after all but it's important to write + // the marker bytes. + final BytesReference messageBody = stream.materializeBytes(); final BytesReference header = buildHeader(requestId, status, stream.getVersion(), messageBody.length() + zeroCopyBuffer.length()); return new CompositeBytesReference(header, messageBody, zeroCopyBuffer); } @@ -1610,7 +1637,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i /** * Called once the channel is closed for instance due to a disconnect or a closed socket etc. */ - protected final void onChannelClosed(Channel channel) { + private void onChannelClosed(Channel channel) { final Optional first = pendingHandshakes.entrySet().stream() .filter((entry) -> entry.getValue().channel == channel).map((e) -> e.getKey()).findFirst(); if (first.isPresent()) { @@ -1658,4 +1685,20 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i Releasables.close(optionalReleasable, transportAdaptorCallback::run); } } + + private void onNodeChannelsClosed(NodeChannels channels) { + // don't assert here since the channel / connection might not have been registered yet + final boolean remove = openConnections.remove(channels); + if (remove) { + transportServiceAdapter.onConnectionClosed(channels); + } + } + + final int getNumOpenConnections() { + return openConnections.size(); + } + + final int getNumConnectedNodes() { + return connectedNodes.size(); + } } diff --git a/core/src/main/java/org/elasticsearch/transport/TransportActionProxy.java b/core/src/main/java/org/elasticsearch/transport/TransportActionProxy.java index 502ab51e0fa..5259fca507e 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportActionProxy.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportActionProxy.java @@ -158,4 +158,18 @@ public final class TransportActionProxy { } return request; } + + /** + * Returns true iff the given action is a proxy action + */ + public static boolean isProxyAction(String action) { + return action.startsWith(PROXY_ACTION_PREFIX); + } + + /** + * Returns true iff the given request is a proxy request + */ + public static boolean isProxyRequest(TransportRequest request) { + return request instanceof ProxyRequest; + } } diff --git a/core/src/main/java/org/elasticsearch/usage/UsageService.java b/core/src/main/java/org/elasticsearch/usage/UsageService.java new file mode 100644 index 00000000000..9f742cca9d9 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/usage/UsageService.java @@ -0,0 +1,105 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.usage; + +import org.elasticsearch.action.admin.cluster.node.usage.NodeUsage; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * A service to monitor usage of Elasticsearch features. + */ +public class UsageService extends AbstractComponent { + + private final List handlers; + private final long sinceTime; + + @Inject + public UsageService(Settings settings) { + super(settings); + this.handlers = new ArrayList<>(); + this.sinceTime = System.currentTimeMillis(); + } + + /** + * Add a REST handler to this service. + * + * @param handler + * the {@link BaseRestHandler} to add to the usage service. + */ + public void addRestHandler(BaseRestHandler handler) { + handlers.add(handler); + } + + /** + * Get the current usage statistics for this node. + * + * @param localNode + * the {@link DiscoveryNode} for this node + * @param restActions + * whether to include rest action usage in the returned + * statistics + * @return the {@link NodeUsage} representing the usage statistics for this + * node + */ + public NodeUsage getUsageStats(DiscoveryNode localNode, boolean restActions) { + Map restUsageMap; + if (restActions) { + restUsageMap = new HashMap<>(); + handlers.forEach(handler -> { + long usageCount = handler.getUsageCount(); + if (usageCount > 0) { + restUsageMap.put(handler.getName(), usageCount); + } + }); + } else { + restUsageMap = null; + } + return new NodeUsage(localNode, System.currentTimeMillis(), sinceTime, restUsageMap); + } + +} diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index 9ac1909063d..97e14b6994a 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -64,7 +64,7 @@ grant codeBase "${codebase.mocksocket-1.1.jar}" { }; -grant codeBase "${codebase.rest-6.0.0-alpha2-SNAPSHOT.jar}" { +grant codeBase "${codebase.rest-6.0.0-alpha3-SNAPSHOT.jar}" { // rest makes socket connections for rest tests permission java.net.SocketPermission "*", "connect"; }; diff --git a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java index b0477169567..8f96936e43b 100644 --- a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java +++ b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java @@ -54,58 +54,6 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; public class BlendedTermQueryTests extends ESTestCase { - public void testBooleanQuery() throws IOException { - Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); - String[] firstNames = new String[]{ - "simon", "paul" - }; - String[] surNames = new String[]{ - "willnauer", "simon" - }; - for (int i = 0; i < surNames.length; i++) { - Document d = new Document(); - d.add(new TextField("id", Integer.toString(i), Field.Store.YES)); - d.add(new TextField("firstname", firstNames[i], Field.Store.NO)); - d.add(new TextField("surname", surNames[i], Field.Store.NO)); - w.addDocument(d); - } - int iters = scaledRandomIntBetween(25, 100); - for (int j = 0; j < iters; j++) { - Document d = new Document(); - d.add(new TextField("id", Integer.toString(firstNames.length + j), Field.Store.YES)); - d.add(new TextField("firstname", rarely() ? "some_other_name" : - "simon the sorcerer", Field.Store.NO)); // make sure length-norm is the tie-breaker - d.add(new TextField("surname", "bogus", Field.Store.NO)); - w.addDocument(d); - } - w.commit(); - DirectoryReader reader = DirectoryReader.open(w); - IndexSearcher searcher = setSimilarity(newSearcher(reader)); - - { - Term[] terms = new Term[]{new Term("firstname", "simon"), new Term("surname", "simon")}; - BlendedTermQuery query = BlendedTermQuery.booleanBlendedQuery(terms); - TopDocs search = searcher.search(query, 3); - ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(3, scoreDocs.length); - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); - } - { - BooleanQuery.Builder query = new BooleanQuery.Builder(); - query.add(new TermQuery(new Term("firstname", "simon")), BooleanClause.Occur.SHOULD); - query.add(new TermQuery(new Term("surname", "simon")), BooleanClause.Occur.SHOULD); - TopDocs search = searcher.search(query.build(), 1); - ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(1), reader.document(scoreDocs[0].doc).getField("id").stringValue()); - - } - reader.close(); - w.close(); - dir.close(); - - } - public void testDismaxQuery() throws IOException { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); @@ -183,12 +131,11 @@ public class BlendedTermQueryTests extends ESTestCase { } String term = TestUtil.randomRealisticUnicodeString(random(), 1, 10); Term[] terms = toTerms(fields, term); - boolean useBoolean = random().nextBoolean(); float tieBreaker = random().nextFloat(); - BlendedTermQuery query = useBoolean ? BlendedTermQuery.booleanBlendedQuery(terms) : BlendedTermQuery.dismaxBlendedQuery(terms, tieBreaker); + BlendedTermQuery query = BlendedTermQuery.dismaxBlendedQuery(terms, tieBreaker); QueryUtils.check(query); terms = toTerms(fields, term); - BlendedTermQuery query2 = useBoolean ? BlendedTermQuery.booleanBlendedQuery(terms) : BlendedTermQuery.dismaxBlendedQuery(terms, tieBreaker); + BlendedTermQuery query2 = BlendedTermQuery.dismaxBlendedQuery(terms, tieBreaker); assertEquals(query, query2); } } @@ -217,8 +164,7 @@ public class BlendedTermQueryTests extends ESTestCase { terms.add(new Term(TestUtil.randomRealisticUnicodeString(random(), 1, 10), TestUtil.randomRealisticUnicodeString(random(), 1, 10))); } - BlendedTermQuery blendedTermQuery = random().nextBoolean() ? BlendedTermQuery.dismaxBlendedQuery(terms.toArray(new Term[0]), random().nextFloat()) : - BlendedTermQuery.booleanBlendedQuery(terms.toArray(new Term[0])); + BlendedTermQuery blendedTermQuery = BlendedTermQuery.dismaxBlendedQuery(terms.toArray(new Term[0]), random().nextFloat()); Set extracted = new HashSet<>(); IndexSearcher searcher = new IndexSearcher(new MultiReader()); searcher.createNormalizedWeight(blendedTermQuery, false).extractTerms(extracted); diff --git a/core/src/test/java/org/apache/lucene/queries/SearchAfterSortedDocQueryTests.java b/core/src/test/java/org/apache/lucene/queries/SearchAfterSortedDocQueryTests.java new file mode 100644 index 00000000000..25c5ff6fa21 --- /dev/null +++ b/core/src/test/java/org/apache/lucene/queries/SearchAfterSortedDocQueryTests.java @@ -0,0 +1,130 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.lucene.queries; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.SortedDocValuesField; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.index.ReaderUtil; +import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.search.SortedNumericSortField; +import org.apache.lucene.search.SortedSetSortField; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.FixedBitSet; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; + +public class SearchAfterSortedDocQueryTests extends ESTestCase { + + public void testBasics() { + Sort sort1 = new Sort( + new SortedNumericSortField("field1", SortField.Type.INT), + new SortedSetSortField("field2", false) + ); + Sort sort2 = new Sort( + new SortedNumericSortField("field1", SortField.Type.INT), + new SortedSetSortField("field3", false) + ); + FieldDoc fieldDoc1 = new FieldDoc(0, 0f, new Object[]{5, new BytesRef("foo")}); + FieldDoc fieldDoc2 = new FieldDoc(0, 0f, new Object[]{5, new BytesRef("foo")}); + + SearchAfterSortedDocQuery query1 = new SearchAfterSortedDocQuery(sort1, fieldDoc1); + SearchAfterSortedDocQuery query2 = new SearchAfterSortedDocQuery(sort1, fieldDoc2); + SearchAfterSortedDocQuery query3 = new SearchAfterSortedDocQuery(sort2, fieldDoc2); + QueryUtils.check(query1); + QueryUtils.checkEqual(query1, query2); + QueryUtils.checkUnequal(query1, query3); + } + + public void testInvalidSort() { + Sort sort = new Sort(new SortedNumericSortField("field1", SortField.Type.INT)); + FieldDoc fieldDoc = new FieldDoc(0, 0f, new Object[] {4, 5}); + IllegalArgumentException ex = + expectThrows(IllegalArgumentException.class, () -> new SearchAfterSortedDocQuery(sort, fieldDoc)); + assertThat(ex.getMessage(), equalTo("after doc has 2 value(s) but sort has 1.")); + } + + public void testRandom() throws IOException { + final int numDocs = randomIntBetween(100, 200); + final Document doc = new Document(); + final Directory dir = newDirectory(); + Sort sort = new Sort( + new SortedNumericSortField("number1", SortField.Type.INT, randomBoolean()), + new SortField("string", SortField.Type.STRING, randomBoolean()) + ); + final IndexWriterConfig config = new IndexWriterConfig(); + config.setIndexSort(sort); + final RandomIndexWriter w = new RandomIndexWriter(random(), dir, config); + for (int i = 0; i < numDocs; ++i) { + int rand = randomIntBetween(0, 10); + doc.add(new SortedNumericDocValuesField("number", rand)); + doc.add(new SortedDocValuesField("string", new BytesRef(randomAlphaOfLength(randomIntBetween(5, 50))))); + w.addDocument(doc); + doc.clear(); + if (rarely()) { + w.commit(); + } + } + final IndexReader reader = w.getReader(); + final IndexSearcher searcher = newSearcher(reader); + + int step = randomIntBetween(1, 10); + FixedBitSet bitSet = new FixedBitSet(numDocs); + TopDocs topDocs = null; + for (int i = 0; i < numDocs;) { + if (topDocs != null) { + FieldDoc after = (FieldDoc) topDocs.scoreDocs[topDocs.scoreDocs.length - 1]; + topDocs = searcher.search(new SearchAfterSortedDocQuery(sort, after), step, sort); + } else { + topDocs = searcher.search(new MatchAllDocsQuery(), step, sort); + } + i += step; + for (ScoreDoc topDoc : topDocs.scoreDocs) { + int readerIndex = ReaderUtil.subIndex(topDoc.doc, reader.leaves()); + final LeafReaderContext leafReaderContext = reader.leaves().get(readerIndex); + int docRebase = topDoc.doc - leafReaderContext.docBase; + if (leafReaderContext.reader().hasDeletions()) { + assertTrue(leafReaderContext.reader().getLiveDocs().get(docRebase)); + } + assertFalse(bitSet.get(topDoc.doc)); + bitSet.set(topDoc.doc); + } + } + assertThat(bitSet.cardinality(), equalTo(reader.numDocs())); + w.close(); + reader.close(); + dir.close(); + } +} diff --git a/core/src/test/java/org/apache/lucene/search/postingshighlight/CustomPassageFormatterTests.java b/core/src/test/java/org/apache/lucene/search/postingshighlight/CustomPassageFormatterTests.java deleted file mode 100644 index 2d43a1ca64e..00000000000 --- a/core/src/test/java/org/apache/lucene/search/postingshighlight/CustomPassageFormatterTests.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.lucene.search.postingshighlight; - -import org.apache.lucene.search.highlight.Snippet; -import org.apache.lucene.search.highlight.DefaultEncoder; -import org.apache.lucene.search.highlight.SimpleHTMLEncoder; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.test.ESTestCase; - -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.notNullValue; - - -public class CustomPassageFormatterTests extends ESTestCase { - public void testSimpleFormat() { - String content = "This is a really cool highlighter. Postings highlighter gives nice snippets back. No matches here."; - - CustomPassageFormatter passageFormatter = new CustomPassageFormatter("", "", new DefaultEncoder()); - - Passage[] passages = new Passage[3]; - String match = "highlighter"; - BytesRef matchBytesRef = new BytesRef(match); - - Passage passage1 = new Passage(); - int start = content.indexOf(match); - int end = start + match.length(); - passage1.startOffset = 0; - passage1.endOffset = end + 2; //lets include the whitespace at the end to make sure we trim it - passage1.addMatch(start, end, matchBytesRef); - passages[0] = passage1; - - Passage passage2 = new Passage(); - start = content.lastIndexOf(match); - end = start + match.length(); - passage2.startOffset = passage1.endOffset; - passage2.endOffset = end + 26; - passage2.addMatch(start, end, matchBytesRef); - passages[1] = passage2; - - Passage passage3 = new Passage(); - passage3.startOffset = passage2.endOffset; - passage3.endOffset = content.length(); - passages[2] = passage3; - - Snippet[] fragments = passageFormatter.format(passages, content); - assertThat(fragments, notNullValue()); - assertThat(fragments.length, equalTo(3)); - assertThat(fragments[0].getText(), equalTo("This is a really cool highlighter.")); - assertThat(fragments[0].isHighlighted(), equalTo(true)); - assertThat(fragments[1].getText(), equalTo("Postings highlighter gives nice snippets back.")); - assertThat(fragments[1].isHighlighted(), equalTo(true)); - assertThat(fragments[2].getText(), equalTo("No matches here.")); - assertThat(fragments[2].isHighlighted(), equalTo(false)); - } - - public void testHtmlEncodeFormat() { - String content = "This is a really cool highlighter. Postings highlighter gives nice snippets back."; - - CustomPassageFormatter passageFormatter = new CustomPassageFormatter("", "", new SimpleHTMLEncoder()); - - Passage[] passages = new Passage[2]; - String match = "highlighter"; - BytesRef matchBytesRef = new BytesRef(match); - - Passage passage1 = new Passage(); - int start = content.indexOf(match); - int end = start + match.length(); - passage1.startOffset = 0; - passage1.endOffset = end + 6; //lets include the whitespace at the end to make sure we trim it - passage1.addMatch(start, end, matchBytesRef); - passages[0] = passage1; - - Passage passage2 = new Passage(); - start = content.lastIndexOf(match); - end = start + match.length(); - passage2.startOffset = passage1.endOffset; - passage2.endOffset = content.length(); - passage2.addMatch(start, end, matchBytesRef); - passages[1] = passage2; - - Snippet[] fragments = passageFormatter.format(passages, content); - assertThat(fragments, notNullValue()); - assertThat(fragments.length, equalTo(2)); - assertThat(fragments[0].getText(), equalTo("<b>This is a really cool highlighter.</b>")); - assertThat(fragments[1].getText(), equalTo("Postings highlighter gives nice snippets back.")); - } -} diff --git a/core/src/test/java/org/apache/lucene/search/postingshighlight/CustomPostingsHighlighterTests.java b/core/src/test/java/org/apache/lucene/search/postingshighlight/CustomPostingsHighlighterTests.java deleted file mode 100644 index 315e38d12fe..00000000000 --- a/core/src/test/java/org/apache/lucene/search/postingshighlight/CustomPostingsHighlighterTests.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.lucene.search.postingshighlight; - -import org.apache.lucene.analysis.MockAnalyzer; -import org.apache.lucene.document.Document; -import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; -import org.apache.lucene.document.TextField; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.highlight.Snippet; -import org.apache.lucene.search.Sort; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.TopDocs; -import org.apache.lucene.search.highlight.DefaultEncoder; -import org.apache.lucene.store.Directory; -import org.elasticsearch.search.fetch.subphase.highlight.HighlightUtils; -import org.elasticsearch.test.ESTestCase; - -import static org.hamcrest.CoreMatchers.equalTo; - -public class CustomPostingsHighlighterTests extends ESTestCase { - public void testCustomPostingsHighlighter() throws Exception { - Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); - iwc.setMergePolicy(newLogMergePolicy()); - RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); - - FieldType offsetsType = new FieldType(TextField.TYPE_STORED); - offsetsType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); - - //good position but only one match - final String firstValue = "This is a test. Just a test1 highlighting from postings highlighter."; - Field body = new Field("body", "", offsetsType); - Document doc = new Document(); - doc.add(body); - body.setStringValue(firstValue); - - //two matches, not the best snippet due to its length though - final String secondValue = "This is the second highlighting value to perform highlighting on a longer text that gets scored lower."; - Field body2 = new Field("body", "", offsetsType); - doc.add(body2); - body2.setStringValue(secondValue); - - //two matches and short, will be scored highest - final String thirdValue = "This is highlighting the third short highlighting value."; - Field body3 = new Field("body", "", offsetsType); - doc.add(body3); - body3.setStringValue(thirdValue); - - //one match, same as first but at the end, will be scored lower due to its position - final String fourthValue = "Just a test4 highlighting from postings highlighter."; - Field body4 = new Field("body", "", offsetsType); - doc.add(body4); - body4.setStringValue(fourthValue); - - iw.addDocument(doc); - - IndexReader ir = iw.getReader(); - iw.close(); - - String firstHlValue = "Just a test1 highlighting from postings highlighter."; - String secondHlValue = "This is the second highlighting value to perform highlighting on a longer text that gets scored lower."; - String thirdHlValue = "This is highlighting the third short highlighting value."; - String fourthHlValue = "Just a test4 highlighting from postings highlighter."; - - IndexSearcher searcher = newSearcher(ir); - Query query = new TermQuery(new Term("body", "highlighting")); - - TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); - assertThat(topDocs.totalHits, equalTo(1)); - - int docId = topDocs.scoreDocs[0].doc; - - String fieldValue = firstValue + HighlightUtils.PARAGRAPH_SEPARATOR + secondValue + HighlightUtils.PARAGRAPH_SEPARATOR + thirdValue + HighlightUtils.PARAGRAPH_SEPARATOR + fourthValue; - - CustomPostingsHighlighter highlighter = new CustomPostingsHighlighter(null, new CustomPassageFormatter("", "", new DefaultEncoder()), fieldValue, false); - Snippet[] snippets = highlighter.highlightField("body", query, searcher, docId, 5); - - assertThat(snippets.length, equalTo(4)); - - assertThat(snippets[0].getText(), equalTo(firstHlValue)); - assertThat(snippets[1].getText(), equalTo(secondHlValue)); - assertThat(snippets[2].getText(), equalTo(thirdHlValue)); - assertThat(snippets[3].getText(), equalTo(fourthHlValue)); - - ir.close(); - dir.close(); - } - - public void testNoMatchSize() throws Exception { - Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); - iwc.setMergePolicy(newLogMergePolicy()); - RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); - - FieldType offsetsType = new FieldType(TextField.TYPE_STORED); - offsetsType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); - Field body = new Field("body", "", offsetsType); - Field none = new Field("none", "", offsetsType); - Document doc = new Document(); - doc.add(body); - doc.add(none); - - String firstValue = "This is a test. Just a test highlighting from postings. Feel free to ignore."; - body.setStringValue(firstValue); - none.setStringValue(firstValue); - iw.addDocument(doc); - - IndexReader ir = iw.getReader(); - iw.close(); - - Query query = new TermQuery(new Term("none", "highlighting")); - - IndexSearcher searcher = newSearcher(ir); - TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); - assertThat(topDocs.totalHits, equalTo(1)); - int docId = topDocs.scoreDocs[0].doc; - - CustomPassageFormatter passageFormatter = new CustomPassageFormatter("", "", new DefaultEncoder()); - - CustomPostingsHighlighter highlighter = new CustomPostingsHighlighter(null, passageFormatter, firstValue, false); - Snippet[] snippets = highlighter.highlightField("body", query, searcher, docId, 5); - assertThat(snippets.length, equalTo(0)); - - highlighter = new CustomPostingsHighlighter(null, passageFormatter, firstValue, true); - snippets = highlighter.highlightField("body", query, searcher, docId, 5); - assertThat(snippets.length, equalTo(1)); - assertThat(snippets[0].getText(), equalTo("This is a test.")); - - ir.close(); - dir.close(); - } -} diff --git a/core/src/test/java/org/apache/lucene/search/postingshighlight/CustomSeparatorBreakIteratorTests.java b/core/src/test/java/org/apache/lucene/search/postingshighlight/CustomSeparatorBreakIteratorTests.java deleted file mode 100644 index 17aeb869c1a..00000000000 --- a/core/src/test/java/org/apache/lucene/search/postingshighlight/CustomSeparatorBreakIteratorTests.java +++ /dev/null @@ -1,178 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.lucene.search.postingshighlight; - -import org.elasticsearch.search.fetch.subphase.highlight.HighlightUtils; -import org.elasticsearch.test.ESTestCase; - -import java.text.BreakIterator; -import java.text.CharacterIterator; -import java.text.StringCharacterIterator; -import java.util.Locale; - -import static org.hamcrest.CoreMatchers.equalTo; - -public class CustomSeparatorBreakIteratorTests extends ESTestCase { - public void testBreakOnCustomSeparator() throws Exception { - Character separator = randomSeparator(); - BreakIterator bi = new CustomSeparatorBreakIterator(separator); - String source = "this" + separator + "is" + separator + "the" + separator + "first" + separator + "sentence"; - bi.setText(source); - assertThat(bi.current(), equalTo(0)); - assertThat(bi.first(), equalTo(0)); - assertThat(source.substring(bi.current(), bi.next()), equalTo("this" + separator)); - assertThat(source.substring(bi.current(), bi.next()), equalTo("is" + separator)); - assertThat(source.substring(bi.current(), bi.next()), equalTo("the" + separator)); - assertThat(source.substring(bi.current(), bi.next()), equalTo("first" + separator)); - assertThat(source.substring(bi.current(), bi.next()), equalTo("sentence")); - assertThat(bi.next(), equalTo(BreakIterator.DONE)); - - assertThat(bi.last(), equalTo(source.length())); - int current = bi.current(); - assertThat(source.substring(bi.previous(), current), equalTo("sentence")); - current = bi.current(); - assertThat(source.substring(bi.previous(), current), equalTo("first" + separator)); - current = bi.current(); - assertThat(source.substring(bi.previous(), current), equalTo("the" + separator)); - current = bi.current(); - assertThat(source.substring(bi.previous(), current), equalTo("is" + separator)); - current = bi.current(); - assertThat(source.substring(bi.previous(), current), equalTo("this" + separator)); - assertThat(bi.previous(), equalTo(BreakIterator.DONE)); - assertThat(bi.current(), equalTo(0)); - - assertThat(source.substring(0, bi.following(9)), equalTo("this" + separator + "is" + separator + "the" + separator)); - - assertThat(source.substring(0, bi.preceding(9)), equalTo("this" + separator + "is" + separator)); - - assertThat(bi.first(), equalTo(0)); - assertThat(source.substring(0, bi.next(3)), equalTo("this" + separator + "is" + separator + "the" + separator)); - } - - public void testSingleSentences() throws Exception { - BreakIterator expected = BreakIterator.getSentenceInstance(Locale.ROOT); - BreakIterator actual = new CustomSeparatorBreakIterator(randomSeparator()); - assertSameBreaks("a", expected, actual); - assertSameBreaks("ab", expected, actual); - assertSameBreaks("abc", expected, actual); - assertSameBreaks("", expected, actual); - } - - public void testSliceEnd() throws Exception { - BreakIterator expected = BreakIterator.getSentenceInstance(Locale.ROOT); - BreakIterator actual = new CustomSeparatorBreakIterator(randomSeparator()); - assertSameBreaks("a000", 0, 1, expected, actual); - assertSameBreaks("ab000", 0, 1, expected, actual); - assertSameBreaks("abc000", 0, 1, expected, actual); - assertSameBreaks("000", 0, 0, expected, actual); - } - - public void testSliceStart() throws Exception { - BreakIterator expected = BreakIterator.getSentenceInstance(Locale.ROOT); - BreakIterator actual = new CustomSeparatorBreakIterator(randomSeparator()); - assertSameBreaks("000a", 3, 1, expected, actual); - assertSameBreaks("000ab", 3, 2, expected, actual); - assertSameBreaks("000abc", 3, 3, expected, actual); - assertSameBreaks("000", 3, 0, expected, actual); - } - - public void testSliceMiddle() throws Exception { - BreakIterator expected = BreakIterator.getSentenceInstance(Locale.ROOT); - BreakIterator actual = new CustomSeparatorBreakIterator(randomSeparator()); - assertSameBreaks("000a000", 3, 1, expected, actual); - assertSameBreaks("000ab000", 3, 2, expected, actual); - assertSameBreaks("000abc000", 3, 3, expected, actual); - assertSameBreaks("000000", 3, 0, expected, actual); - } - - /** the current position must be ignored, initial position is always first() */ - public void testFirstPosition() throws Exception { - BreakIterator expected = BreakIterator.getSentenceInstance(Locale.ROOT); - BreakIterator actual = new CustomSeparatorBreakIterator(randomSeparator()); - assertSameBreaks("000ab000", 3, 2, 4, expected, actual); - } - - private static char randomSeparator() { - return randomFrom(' ', HighlightUtils.NULL_SEPARATOR, HighlightUtils.PARAGRAPH_SEPARATOR); - } - - private static void assertSameBreaks(String text, BreakIterator expected, BreakIterator actual) { - assertSameBreaks(new StringCharacterIterator(text), - new StringCharacterIterator(text), - expected, - actual); - } - - private static void assertSameBreaks(String text, int offset, int length, BreakIterator expected, BreakIterator actual) { - assertSameBreaks(text, offset, length, offset, expected, actual); - } - - private static void assertSameBreaks(String text, int offset, int length, int current, BreakIterator expected, BreakIterator actual) { - assertSameBreaks(new StringCharacterIterator(text, offset, offset + length, current), - new StringCharacterIterator(text, offset, offset + length, current), - expected, - actual); - } - - /** Asserts that two breakiterators break the text the same way */ - private static void assertSameBreaks(CharacterIterator one, CharacterIterator two, BreakIterator expected, BreakIterator actual) { - expected.setText(one); - actual.setText(two); - - assertEquals(expected.current(), actual.current()); - - // next() - int v = expected.current(); - while (v != BreakIterator.DONE) { - assertEquals(v = expected.next(), actual.next()); - assertEquals(expected.current(), actual.current()); - } - - // first() - assertEquals(expected.first(), actual.first()); - assertEquals(expected.current(), actual.current()); - // last() - assertEquals(expected.last(), actual.last()); - assertEquals(expected.current(), actual.current()); - - // previous() - v = expected.current(); - while (v != BreakIterator.DONE) { - assertEquals(v = expected.previous(), actual.previous()); - assertEquals(expected.current(), actual.current()); - } - - // following() - for (int i = one.getBeginIndex(); i <= one.getEndIndex(); i++) { - expected.first(); - actual.first(); - assertEquals(expected.following(i), actual.following(i)); - assertEquals(expected.current(), actual.current()); - } - - // preceding() - for (int i = one.getBeginIndex(); i <= one.getEndIndex(); i++) { - expected.last(); - actual.last(); - assertEquals(expected.preceding(i), actual.preceding(i)); - assertEquals(expected.current(), actual.current()); - } - } -} diff --git a/core/src/test/java/org/apache/lucene/search/uhighlight/CustomPassageFormatterTests.java b/core/src/test/java/org/apache/lucene/search/uhighlight/CustomPassageFormatterTests.java index 4e664c3e241..0b8bccb784f 100644 --- a/core/src/test/java/org/apache/lucene/search/uhighlight/CustomPassageFormatterTests.java +++ b/core/src/test/java/org/apache/lucene/search/uhighlight/CustomPassageFormatterTests.java @@ -19,7 +19,6 @@ package org.apache.lucene.search.uhighlight; -import org.apache.lucene.search.highlight.Snippet; import org.apache.lucene.search.highlight.DefaultEncoder; import org.apache.lucene.search.highlight.SimpleHTMLEncoder; import org.apache.lucene.util.BytesRef; diff --git a/core/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java b/core/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java index 23e867d2573..eec611146a6 100644 --- a/core/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java +++ b/core/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java @@ -41,7 +41,6 @@ import org.apache.lucene.search.Sort; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.highlight.DefaultEncoder; -import org.apache.lucene.search.highlight.Snippet; import org.apache.lucene.store.Directory; import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.all.AllTermQuery; @@ -121,6 +120,19 @@ public class CustomUnifiedHighlighterTests extends ESTestCase { BreakIterator.getSentenceInstance(Locale.ROOT), 100, inputs); } + public void testMultiPhrasePrefixQuerySingleTerm() throws Exception { + final String[] inputs = { + "The quick brown fox." + }; + final String[] outputs = { + "The quick brown fox." + }; + MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery(); + query.add(new Term("text", "bro")); + assertHighlightOneDoc("text", inputs, new StandardAnalyzer(), query, Locale.ROOT, + BreakIterator.getSentenceInstance(Locale.ROOT), 0, outputs); + } + public void testMultiPhrasePrefixQuery() throws Exception { final String[] inputs = { "The quick brown fox." diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 764a6d3b351..84664eaeea3 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -62,10 +62,10 @@ import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.shard.IllegalIndexShardStateException; import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.TranslogRecoveryPerformer; import org.elasticsearch.indices.IndexTemplateMissingException; import org.elasticsearch.indices.InvalidIndexTemplateException; import org.elasticsearch.indices.recovery.RecoverFilesRecoveryException; +import org.elasticsearch.indices.recovery.RecoveryTarget; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; @@ -334,22 +334,6 @@ public class ExceptionSerializationTests extends ESTestCase { assertTrue(ex.getCause() instanceof NullPointerException); } - public void testBatchOperationException() throws IOException { - ShardId id = new ShardId("foo", "_na_", 1); - TranslogRecoveryPerformer.BatchOperationException ex = serialize( - new TranslogRecoveryPerformer.BatchOperationException(id, "batched the fucker", 666, null)); - assertEquals(ex.getShardId(), id); - assertEquals(666, ex.completedOperations()); - assertEquals("batched the fucker", ex.getMessage()); - assertNull(ex.getCause()); - - ex = serialize(new TranslogRecoveryPerformer.BatchOperationException(null, "batched the fucker", -1, new NullPointerException())); - assertNull(ex.getShardId()); - assertEquals(-1, ex.completedOperations()); - assertEquals("batched the fucker", ex.getMessage()); - assertTrue(ex.getCause() instanceof NullPointerException); - } - public void testInvalidIndexTemplateException() throws IOException { InvalidIndexTemplateException ex = serialize(new InvalidIndexTemplateException("foo", "bar")); assertEquals(ex.getMessage(), "index_template [foo] invalid, cause [bar]"); @@ -702,7 +686,7 @@ public class ExceptionSerializationTests extends ESTestCase { ids.put(23, org.elasticsearch.index.shard.IndexShardStartedException.class); ids.put(24, org.elasticsearch.search.SearchContextMissingException.class); ids.put(25, org.elasticsearch.script.GeneralScriptException.class); - ids.put(26, org.elasticsearch.index.shard.TranslogRecoveryPerformer.BatchOperationException.class); + ids.put(26, null); ids.put(27, org.elasticsearch.snapshots.SnapshotCreationException.class); ids.put(28, org.elasticsearch.index.engine.DeleteFailedEngineException.class); //deprecated in 6.0 ids.put(29, org.elasticsearch.index.engine.DocumentMissingException.class); @@ -736,7 +720,7 @@ public class ExceptionSerializationTests extends ESTestCase { ids.put(57, org.elasticsearch.indices.IndexTemplateMissingException.class); ids.put(58, org.elasticsearch.transport.SendRequestTransportException.class); ids.put(59, org.elasticsearch.common.util.concurrent.EsRejectedExecutionException.class); - ids.put(60, org.elasticsearch.common.lucene.Lucene.EarlyTerminationException.class); + ids.put(60, null); // EarlyTerminationException was removed in 6.0 ids.put(61, null); // RoutingValidationException was removed in 5.0 ids.put(62, org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper.class); ids.put(63, org.elasticsearch.indices.AliasFilterParsingException.class); diff --git a/core/src/test/java/org/elasticsearch/VersionTests.java b/core/src/test/java/org/elasticsearch/VersionTests.java index 3bdec114729..d8cd635f33f 100644 --- a/core/src/test/java/org/elasticsearch/VersionTests.java +++ b/core/src/test/java/org/elasticsearch/VersionTests.java @@ -34,7 +34,7 @@ import java.util.Map; import java.util.Set; import static org.elasticsearch.Version.V_5_3_0; -import static org.elasticsearch.Version.V_6_0_0_alpha2; +import static org.elasticsearch.Version.V_6_0_0_alpha3; import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.containsString; @@ -46,30 +46,30 @@ import static org.hamcrest.Matchers.sameInstance; public class VersionTests extends ESTestCase { public void testVersionComparison() throws Exception { - assertThat(V_5_3_0.before(V_6_0_0_alpha2), is(true)); + assertThat(V_5_3_0.before(V_6_0_0_alpha3), is(true)); assertThat(V_5_3_0.before(V_5_3_0), is(false)); - assertThat(V_6_0_0_alpha2.before(V_5_3_0), is(false)); + assertThat(V_6_0_0_alpha3.before(V_5_3_0), is(false)); - assertThat(V_5_3_0.onOrBefore(V_6_0_0_alpha2), is(true)); + assertThat(V_5_3_0.onOrBefore(V_6_0_0_alpha3), is(true)); assertThat(V_5_3_0.onOrBefore(V_5_3_0), is(true)); - assertThat(V_6_0_0_alpha2.onOrBefore(V_5_3_0), is(false)); + assertThat(V_6_0_0_alpha3.onOrBefore(V_5_3_0), is(false)); - assertThat(V_5_3_0.after(V_6_0_0_alpha2), is(false)); + assertThat(V_5_3_0.after(V_6_0_0_alpha3), is(false)); assertThat(V_5_3_0.after(V_5_3_0), is(false)); - assertThat(V_6_0_0_alpha2.after(V_5_3_0), is(true)); + assertThat(V_6_0_0_alpha3.after(V_5_3_0), is(true)); - assertThat(V_5_3_0.onOrAfter(V_6_0_0_alpha2), is(false)); + assertThat(V_5_3_0.onOrAfter(V_6_0_0_alpha3), is(false)); assertThat(V_5_3_0.onOrAfter(V_5_3_0), is(true)); - assertThat(V_6_0_0_alpha2.onOrAfter(V_5_3_0), is(true)); + assertThat(V_6_0_0_alpha3.onOrAfter(V_5_3_0), is(true)); assertTrue(Version.fromString("5.0.0-alpha2").onOrAfter(Version.fromString("5.0.0-alpha1"))); assertTrue(Version.fromString("5.0.0").onOrAfter(Version.fromString("5.0.0-beta2"))); assertTrue(Version.fromString("5.0.0-rc1").onOrAfter(Version.fromString("5.0.0-beta24"))); assertTrue(Version.fromString("5.0.0-alpha24").before(Version.fromString("5.0.0-beta0"))); - assertThat(V_5_3_0, is(lessThan(V_6_0_0_alpha2))); + assertThat(V_5_3_0, is(lessThan(V_6_0_0_alpha3))); assertThat(V_5_3_0.compareTo(V_5_3_0), is(0)); - assertThat(V_6_0_0_alpha2, is(greaterThan(V_5_3_0))); + assertThat(V_6_0_0_alpha3, is(greaterThan(V_5_3_0))); } public void testMin() { @@ -97,7 +97,7 @@ public class VersionTests extends ESTestCase { } public void testMinimumIndexCompatibilityVersion() { - assertEquals(Version.V_5_0_0, Version.V_6_0_0_alpha2.minimumIndexCompatibilityVersion()); + assertEquals(Version.V_5_0_0, Version.V_6_0_0_alpha3.minimumIndexCompatibilityVersion()); assertEquals(Version.fromId(2000099), Version.V_5_0_0.minimumIndexCompatibilityVersion()); assertEquals(Version.fromId(2000099), Version.V_5_1_1.minimumIndexCompatibilityVersion()); @@ -157,7 +157,7 @@ public class VersionTests extends ESTestCase { public void testIndexCreatedVersion() { // an actual index has a IndexMetaData.SETTING_INDEX_UUID final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_2, - Version.V_5_2_0, Version.V_6_0_0_alpha2); + Version.V_5_2_0, Version.V_6_0_0_alpha3); assertEquals(version, Version.indexCreated(Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "foo").put(IndexMetaData.SETTING_VERSION_CREATED, version).build())); } @@ -171,10 +171,10 @@ public class VersionTests extends ESTestCase { // from 6.0 on we are supporting the latest minor of the previous major... this might fail once we add a new version ie. 5.x is // released since we need to bump the supported minor in Version#minimumCompatibilityVersion() Version lastVersion = VersionUtils.getPreviousVersion(Version.V_6_0_0_alpha1); - assertEquals(lastVersion.major, Version.V_6_0_0_alpha2.minimumCompatibilityVersion().major); + assertEquals(lastVersion.major, Version.V_6_0_0_alpha3.minimumCompatibilityVersion().major); assertEquals("did you miss to bump the minor in Version#minimumCompatibilityVersion()", - lastVersion.minor, Version.V_6_0_0_alpha2.minimumCompatibilityVersion().minor); - assertEquals(0, Version.V_6_0_0_alpha2.minimumCompatibilityVersion().revision); + lastVersion.minor, Version.V_6_0_0_alpha3.minimumCompatibilityVersion().minor); + assertEquals(0, Version.V_6_0_0_alpha3.minimumCompatibilityVersion().revision); } public void testToString() { @@ -254,7 +254,7 @@ public class VersionTests extends ESTestCase { final Set unreleasedVersions = new HashSet<>(VersionUtils.allUnreleasedVersions()); Map maxBranchVersions = new HashMap<>(); for (java.lang.reflect.Field field : Version.class.getFields()) { - if (field.getName().matches("_ID(_UNRELEASED)?")) { + if (field.getName().matches("_ID")) { assertTrue(field.getName() + " should be static", Modifier.isStatic(field.getModifiers())); assertTrue(field.getName() + " should be final", Modifier.isFinal(field.getModifiers())); int versionId = (Integer)field.get(Version.class); @@ -293,7 +293,12 @@ public class VersionTests extends ESTestCase { if (maxBranchVersion == null) { maxBranchVersions.put(branchName, v); } else if (v.after(maxBranchVersion)) { - assertFalse("Version " + maxBranchVersion + " cannot be a snapshot because version " + v + " exists", VersionUtils.isSnapshot(maxBranchVersion)); + if (v == Version.CURRENT) { + // Current is weird - it counts as released even though it shouldn't. + continue; + } + assertFalse("Version " + maxBranchVersion + " cannot be a snapshot because version " + v + " exists", + VersionUtils.allUnreleasedVersions().contains(maxBranchVersion)); maxBranchVersions.put(branchName, v); } } diff --git a/core/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/core/src/test/java/org/elasticsearch/action/ActionModuleTests.java index 7c9d5443464..44c22c9e412 100644 --- a/core/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/core/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.usage.UsageService; import java.io.IOException; import java.util.List; @@ -110,9 +111,10 @@ public class ActionModuleTests extends ESTestCase { public void testSetupRestHandlerContainsKnownBuiltin() { SettingsModule settings = new SettingsModule(Settings.EMPTY); + UsageService usageService = new UsageService(settings.getSettings()); ActionModule actionModule = new ActionModule(false, settings.getSettings(), new IndexNameExpressionResolver(Settings.EMPTY), settings.getIndexScopedSettings(), settings.getClusterSettings(), settings.getSettingsFilter(), null, emptyList(), null, - null); + null, usageService); actionModule.initRestHandlers(null); // At this point the easiest way to confirm that a handler is loaded is to try to register another one on top of it and to fail Exception e = expectThrows(IllegalArgumentException.class, () -> @@ -132,9 +134,10 @@ public class ActionModuleTests extends ESTestCase { SettingsModule settings = new SettingsModule(Settings.EMPTY); ThreadPool threadPool = new TestThreadPool(getTestName()); try { + UsageService usageService = new UsageService(settings.getSettings()); ActionModule actionModule = new ActionModule(false, settings.getSettings(), new IndexNameExpressionResolver(Settings.EMPTY), settings.getIndexScopedSettings(), settings.getClusterSettings(), settings.getSettingsFilter(), threadPool, - singletonList(dupsMainAction), null, null); + singletonList(dupsMainAction), null, null, usageService); Exception e = expectThrows(IllegalArgumentException.class, () -> actionModule.initRestHandlers(null)); assertThat(e.getMessage(), startsWith("Path [/] already has a value [" + RestMainAction.class.getName())); } finally { @@ -163,9 +166,10 @@ public class ActionModuleTests extends ESTestCase { SettingsModule settings = new SettingsModule(Settings.EMPTY); ThreadPool threadPool = new TestThreadPool(getTestName()); try { + UsageService usageService = new UsageService(settings.getSettings()); ActionModule actionModule = new ActionModule(false, settings.getSettings(), new IndexNameExpressionResolver(Settings.EMPTY), settings.getIndexScopedSettings(), settings.getClusterSettings(), settings.getSettingsFilter(), threadPool, - singletonList(registersFakeHandler), null, null); + singletonList(registersFakeHandler), null, null, usageService); actionModule.initRestHandlers(null); // At this point the easiest way to confirm that a handler is loaded is to try to register another one on top of it and to fail Exception e = expectThrows(IllegalArgumentException.class, () -> diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index 0cece76425d..fdd5091485b 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -164,10 +164,6 @@ public abstract class TaskManagerTestCase extends ESTestCase { @Override protected abstract NodeResponse nodeOperation(NodeRequest request); - @Override - protected boolean accumulateExceptions() { - return true; - } } public static class TestNode implements Releasable { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index b4ba0354ed7..ec981442b57 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -313,10 +313,6 @@ public class TestTaskPlugin extends Plugin implements ActionPlugin { throw new UnsupportedOperationException("the task parameter is required"); } - @Override - protected boolean accumulateExceptions() { - return true; - } } public static class TestTaskAction extends Action { @@ -453,10 +449,6 @@ public class TestTaskPlugin extends Plugin implements ActionPlugin { listener.onResponse(new UnblockTestTaskResponse()); } - @Override - protected boolean accumulateExceptions() { - return true; - } } public static class UnblockTestTasksAction extends Action startBlockingTestNodesAction(CountDownLatch checkLatch) throws InterruptedException { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestTests.java index aec8349ea8d..a60722d3f42 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestTests.java @@ -32,7 +32,8 @@ import java.util.Base64; public class PutStoredScriptRequestTests extends ESTestCase { public void testSerialization() throws IOException { - PutStoredScriptRequest storedScriptRequest = new PutStoredScriptRequest("foo", "bar", new BytesArray("{}"), XContentType.JSON); + PutStoredScriptRequest storedScriptRequest = + new PutStoredScriptRequest("foo", "bar", "context", new BytesArray("{}"), XContentType.JSON); assertEquals(XContentType.JSON, storedScriptRequest.xContentType()); try (BytesStreamOutput output = new BytesStreamOutput()) { @@ -44,6 +45,7 @@ public class PutStoredScriptRequestTests extends ESTestCase { assertEquals(XContentType.JSON, serialized.xContentType()); assertEquals(storedScriptRequest.lang(), serialized.lang()); assertEquals(storedScriptRequest.id(), serialized.id()); + assertEquals(storedScriptRequest.context(), serialized.context()); } } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java index 0e1414bdbef..5dfcd102431 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java @@ -29,18 +29,24 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractCharFilterFactory; import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.analysis.PreConfiguredCharFilter; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.mapper.AllFieldMapper; import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; +import org.elasticsearch.indices.analysis.AnalysisModuleTests.AppendCharFilter; import org.elasticsearch.plugins.AnalysisPlugin; +import static org.elasticsearch.plugins.AnalysisPlugin.requriesAnalysisSettings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import java.io.IOException; +import java.io.Reader; import java.util.List; import java.util.Map; @@ -81,10 +87,31 @@ public class TransportAnalyzeActionTests extends ESTestCase { } } + class AppendCharFilterFactory extends AbstractCharFilterFactory { + AppendCharFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { + super(indexSettings, name); + } + + @Override + public Reader create(Reader reader) { + return new AppendCharFilter(reader, "bar"); + } + } + + @Override + public Map> getCharFilters() { + return singletonMap("append", AppendCharFilterFactory::new); + } + @Override public Map> getTokenFilters() { return singletonMap("mock", MockFactory::new); } + + @Override + public List getPreConfiguredCharFilters() { + return singletonList(PreConfiguredCharFilter.singleton("append_foo", false, reader -> new AppendCharFilter(reader, "foo"))); + } }; registry = new AnalysisModule(environment, singletonList(plugin)).getAnalysisRegistry(); indexAnalyzers = registry.build(idxSettings); @@ -96,17 +123,17 @@ public class TransportAnalyzeActionTests extends ESTestCase { public void testNoIndexAnalyzers() throws IOException { // Refer to an analyzer by its type so we get its default configuration AnalyzeRequest request = new AnalyzeRequest(); - request.analyzer("standard"); request.text("the quick brown fox"); + request.analyzer("standard"); AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, null, registry, environment); List tokens = analyze.getTokens(); assertEquals(4, tokens.size()); // Refer to a token filter by its type so we get its default configuration - request.analyzer(null); - request.tokenizer("whitespace"); - request.addTokenFilter("mock"); + request = new AnalyzeRequest(); request.text("the qu1ck brown fox"); + request.tokenizer("standard"); + request.addTokenFilter("mock"); analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, randomBoolean() ? indexAnalyzers : null, registry, environment); tokens = analyze.getTokens(); assertEquals(3, tokens.size()); @@ -114,18 +141,32 @@ public class TransportAnalyzeActionTests extends ESTestCase { assertEquals("brown", tokens.get(1).getTerm()); assertEquals("fox", tokens.get(2).getTerm()); - // Refer to a char filter by its type so we get its default configuration - request.analyzer(null); - request.tokenizer("whitespace"); - request.addCharFilter("html_strip"); - request.addTokenFilter("mock"); - request.text("

the qu1ck brown fox

"); + // We can refer to a pre-configured token filter by its name to get it + request = new AnalyzeRequest(); + request.text("the qu1ck brown fox"); + request.tokenizer("standard"); + request.addCharFilter("append_foo"); analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, randomBoolean() ? indexAnalyzers : null, registry, environment); tokens = analyze.getTokens(); - assertEquals(3, tokens.size()); - assertEquals("qu1ck", tokens.get(0).getTerm()); - assertEquals("brown", tokens.get(1).getTerm()); - assertEquals("fox", tokens.get(2).getTerm()); + assertEquals(4, tokens.size()); + assertEquals("the", tokens.get(0).getTerm()); + assertEquals("qu1ck", tokens.get(1).getTerm()); + assertEquals("brown", tokens.get(2).getTerm()); + assertEquals("foxfoo", tokens.get(3).getTerm()); + + // We can refer to a token filter by its type to get its default configuration + request = new AnalyzeRequest(); + request.text("the qu1ck brown fox"); + request.tokenizer("standard"); + request.addCharFilter("append"); + request.text("the qu1ck brown fox"); + analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, randomBoolean() ? indexAnalyzers : null, registry, environment); + tokens = analyze.getTokens(); + assertEquals(4, tokens.size()); + assertEquals("the", tokens.get(0).getTerm()); + assertEquals("qu1ck", tokens.get(1).getTerm()); + assertEquals("brown", tokens.get(2).getTerm()); + assertEquals("foxbar", tokens.get(3).getTerm()); } public void testFillsAttributes() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index f461be77e0c..3c55d0df9c1 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -335,4 +335,12 @@ public class CreateIndexIT extends ESIntegTestCase { assertTrue(createPartitionedIndex.apply(1, 1)); } + + public void testIndexNameInResponse() { + CreateIndexResponse response = prepareCreate("foo") + .setSettings(Settings.builder().build()) + .get(); + + assertEquals("Should have index name in response", "foo", response.index()); + } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponseTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponseTests.java new file mode 100644 index 00000000000..588659335e4 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponseTests.java @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.create; + +import org.elasticsearch.Version; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +public class CreateIndexResponseTests extends ESTestCase { + + public void testSerialization() throws IOException { + CreateIndexResponse response = new CreateIndexResponse(true, true, "foo"); + + try (BytesStreamOutput output = new BytesStreamOutput()) { + response.writeTo(output); + + try (StreamInput in = output.bytes().streamInput()) { + CreateIndexResponse serialized = new CreateIndexResponse(); + serialized.readFrom(in); + assertEquals(response.isShardsAcked(), serialized.isShardsAcked()); + assertEquals(response.isAcknowledged(), serialized.isAcknowledged()); + assertEquals(response.index(), serialized.index()); + } + } + } + + public void testSerializationWithOldVersion() throws IOException { + Version oldVersion = Version.V_5_4_0; + CreateIndexResponse response = new CreateIndexResponse(true, true, "foo"); + + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setVersion(oldVersion); + response.writeTo(output); + + try (StreamInput in = output.bytes().streamInput()) { + in.setVersion(oldVersion); + CreateIndexResponse serialized = new CreateIndexResponse(); + serialized.readFrom(in); + assertEquals(response.isShardsAcked(), serialized.isShardsAcked()); + assertEquals(response.isAcknowledged(), serialized.isAcknowledged()); + assertNull(serialized.index()); + } + } + } +} diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexIT.java index 2bd13669fee..a34e6bcc0c4 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.get; +import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature; import org.elasticsearch.cluster.metadata.AliasMetaData; @@ -281,6 +282,8 @@ public class GetIndexIT extends ESIntegTestCase { private void assertEmptyAliases(GetIndexResponse response) { assertThat(response.aliases(), notNullValue()); - assertThat(response.aliases().isEmpty(), equalTo(true)); + for (final ObjectObjectCursor> entry : response.getAliases()) { + assertTrue(entry.value.isEmpty()); + } } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java index 9d62bd825f3..d33987c92ad 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -22,6 +22,8 @@ package org.elasticsearch.action.admin.indices.rollover; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; +import org.elasticsearch.action.admin.indices.stats.CommonStats; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.metadata.AliasAction; import org.elasticsearch.cluster.metadata.AliasMetaData; @@ -40,12 +42,30 @@ import java.util.List; import java.util.Locale; import java.util.Set; +import org.mockito.ArgumentCaptor; import static org.elasticsearch.action.admin.indices.rollover.TransportRolloverAction.evaluateConditions; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + public class TransportRolloverActionTests extends ESTestCase { + public void testDocStatsSelectionFromPrimariesOnly() throws Exception { + long docsInPrimaryShards = 100; + long docsInShards = 200; + + final Condition condition = createTestCondition(); + evaluateConditions(Sets.newHashSet(condition), createMetaData(), createIndecesStatResponse(docsInShards, docsInPrimaryShards)); + final ArgumentCaptor argument = ArgumentCaptor.forClass(Condition.Stats.class); + verify(condition).evaluate(argument.capture()); + + assertEquals(docsInPrimaryShards, argument.getValue().numDocs); + } + public void testEvaluateConditions() throws Exception { MaxDocsCondition maxDocsCondition = new MaxDocsCondition(100L); MaxAgeCondition maxAgeCondition = new MaxAgeCondition(TimeValue.timeValueHours(2)); @@ -190,4 +210,37 @@ public class TransportRolloverActionTests extends ESTestCase { assertThat(createIndexRequest.index(), equalTo(rolloverIndex)); assertThat(createIndexRequest.cause(), equalTo("rollover_index")); } + + private IndicesStatsResponse createIndecesStatResponse(long totalDocs, long primaryDocs) { + final CommonStats primaryStats = mock(CommonStats.class); + when(primaryStats.getDocs()).thenReturn(new DocsStats(primaryDocs, 0)); + + final CommonStats totalStats = mock(CommonStats.class); + when(totalStats.getDocs()).thenReturn(new DocsStats(totalDocs, 0)); + + final IndicesStatsResponse response = mock(IndicesStatsResponse.class); + when(response.getPrimaries()).thenReturn(primaryStats); + when(response.getTotal()).thenReturn(totalStats); + + return response; + } + + private IndexMetaData createMetaData() { + final Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .build(); + return IndexMetaData.builder(randomAlphaOfLength(10)) + .creationDate(System.currentTimeMillis() - TimeValue.timeValueHours(3).getMillis()) + .settings(settings) + .build(); + } + + private Condition createTestCondition() { + final Condition condition = mock(Condition.class); + when(condition.evaluate(any())).thenReturn(new Condition.Result(condition, true)); + return condition; + } } diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 76810056485..170b0f143a3 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -177,6 +177,31 @@ public class BulkRequestTests extends ESTestCase { assertThat(bulkRequest.numberOfActions(), equalTo(9)); } + public void testBulkEmptyObject() throws Exception { + String bulkIndexAction = "{ \"index\":{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"1\"} }\r\n"; + String bulkIndexSource = "{ \"field1\" : \"value1\" }\r\n"; + String emptyObject = "{}\r\n"; + StringBuilder bulk = new StringBuilder(); + int emptyLine; + if (randomBoolean()) { + bulk.append(emptyObject); + emptyLine = 1; + } else { + int actions = randomIntBetween(1, 10); + int emptyAction = randomIntBetween(1, actions); + emptyLine = emptyAction * 2 - 1; + for (int i = 1; i <= actions; i++) { + bulk.append(i == emptyAction ? emptyObject : bulkIndexAction); + bulk.append(randomBoolean() ? emptyObject : bulkIndexSource); + } + } + String bulkAction = bulk.toString(); + BulkRequest bulkRequest = new BulkRequest(); + IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, + () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON)); + assertThat(exc.getMessage(), containsString("Malformed action/metadata line [" + emptyLine + "], expected FIELD_NAME but found [END_OBJECT]")); + } + // issue 7361 public void testBulkRequestWithRefresh() throws Exception { BulkRequest bulkRequest = new BulkRequest(); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java index dd8f92bb2e5..5da1451a138 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkResponseTests.java @@ -95,7 +95,7 @@ public class BulkResponseTests extends ESTestCase { assertNull(parser.nextToken()); } - assertEquals(took, parsedBulkResponse.getTookInMillis()); + assertEquals(took, parsedBulkResponse.getTook().getMillis()); assertEquals(ingestTook, parsedBulkResponse.getIngestTookInMillis()); assertEquals(expectedBulkItems.length, parsedBulkResponse.getItems().length); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java b/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java index 5ab9e1ea535..1d6b77fc747 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java @@ -118,7 +118,7 @@ public class TransportBulkActionIndicesThatCannotBeCreatedTests extends ESTestCa @Override void createIndex(String index, TimeValue timeout, ActionListener listener) { // If we try to create an index just immediately assume it worked - listener.onResponse(new CreateIndexResponse(true, true) {}); + listener.onResponse(new CreateIndexResponse(true, true, index) {}); } }; action.doExecute(null, bulkRequest, null); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java b/core/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java index 39a4bb2feca..aa7f613a176 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java @@ -541,11 +541,13 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { itemRequests[0] = itemRequest; BulkShardRequest bulkShardRequest = new BulkShardRequest( shard.shardId(), RefreshPolicy.NONE, itemRequests); + bulkShardRequest.primaryTerm(randomIntBetween(1, (int) shard.getPrimaryTerm())); TransportShardBulkAction.performOnReplica(bulkShardRequest, shard); ArgumentCaptor noOp = ArgumentCaptor.forClass(Engine.NoOp.class); verify(shard, times(1)).markSeqNoAsNoOp(noOp.capture()); final Engine.NoOp noOpValue = noOp.getValue(); assertThat(noOpValue.seqNo(), equalTo(1L)); + assertThat(noOpValue.primaryTerm(), equalTo(bulkShardRequest.primaryTerm())); assertThat(noOpValue.reason(), containsString(failureMessage)); closeShards(shard); } diff --git a/core/src/test/java/org/elasticsearch/action/main/MainResponseTests.java b/core/src/test/java/org/elasticsearch/action/main/MainResponseTests.java index 4a602c11003..effd52acc9d 100644 --- a/core/src/test/java/org/elasticsearch/action/main/MainResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/main/MainResponseTests.java @@ -36,6 +36,7 @@ import java.util.Date; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; +import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; public class MainResponseTests extends ESTestCase { @@ -55,8 +56,10 @@ public class MainResponseTests extends ESTestCase { XContentType xContentType = randomFrom(XContentType.values()); boolean humanReadable = randomBoolean(); BytesReference originalBytes = toShuffledXContent(mainResponse, xContentType, ToXContent.EMPTY_PARAMS, humanReadable); + // we add a few random fields to check that parser is lenient on new fields + BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, null, random()); MainResponse parsed; - try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { + try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) { parsed = MainResponse.fromXContent(parser); assertNull(parser.nextToken()); } diff --git a/core/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java b/core/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java new file mode 100644 index 00000000000..5037ffe03f9 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/search/ClearScrollControllerTests.java @@ -0,0 +1,256 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.search; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchPhaseResult; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.NodeNotConnectedException; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportResponse; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicInteger; + +public class ClearScrollControllerTests extends ESTestCase { + + public void testClearAll() throws IOException, InterruptedException { + DiscoveryNode node1 = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); + DiscoveryNode node2 = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); + DiscoveryNode node3 = new DiscoveryNode("node_3", buildNewFakeTransportAddress(), Version.CURRENT); + DiscoveryNodes nodes = DiscoveryNodes.builder().add(node1).add(node2).add(node3).build(); + CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = new ActionListener() { + @Override + public void onResponse(ClearScrollResponse clearScrollResponse) { + try { + assertEquals(3, clearScrollResponse.getNumFreed()); + assertTrue(clearScrollResponse.isSucceeded()); + } finally { + latch.countDown(); + } + } + + @Override + public void onFailure(Exception e) { + try { + throw new AssertionError(e); + } finally { + latch.countDown(); + } + } + }; + List nodesInvoked = new CopyOnWriteArrayList<>(); + SearchTransportService searchTransportService = new SearchTransportService(Settings.EMPTY, null) { + @Override + public void sendClearAllScrollContexts(Transport.Connection connection, ActionListener listener) { + nodesInvoked.add(connection.getNode()); + Thread t = new Thread(() -> listener.onResponse(TransportResponse.Empty.INSTANCE)); // response is unused + t.start(); + } + + @Override + Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); + clearScrollRequest.scrollIds(Arrays.asList("_all")); + ClearScrollController controller = new ClearScrollController(clearScrollRequest, listener, + nodes, logger, searchTransportService); + controller.run(); + latch.await(); + assertEquals(3, nodesInvoked.size()); + Collections.sort(nodesInvoked, Comparator.comparing(DiscoveryNode::getId)); + assertEquals(nodesInvoked, Arrays.asList(node1, node2, node3)); + } + + + public void testClearScrollIds() throws IOException, InterruptedException { + DiscoveryNode node1 = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); + DiscoveryNode node2 = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); + DiscoveryNode node3 = new DiscoveryNode("node_3", buildNewFakeTransportAddress(), Version.CURRENT); + AtomicArray array = new AtomicArray<>(3); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult1 = new SearchAsyncActionTests.TestSearchPhaseResult(1, node1); + testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), null, null)); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult2 = new SearchAsyncActionTests.TestSearchPhaseResult(12, node2); + testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), null, null)); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult3 = new SearchAsyncActionTests.TestSearchPhaseResult(42, node3); + testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null, null)); + array.setOnce(0, testSearchPhaseResult1); + array.setOnce(1, testSearchPhaseResult2); + array.setOnce(2, testSearchPhaseResult3); + AtomicInteger numFreed = new AtomicInteger(0); + String scrollId = TransportSearchHelper.buildScrollId(array); + DiscoveryNodes nodes = DiscoveryNodes.builder().add(node1).add(node2).add(node3).build(); + CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = new ActionListener() { + @Override + public void onResponse(ClearScrollResponse clearScrollResponse) { + try { + assertEquals(numFreed.get(), clearScrollResponse.getNumFreed()); + assertTrue(clearScrollResponse.isSucceeded()); + } finally { + latch.countDown(); + } + + } + + @Override + public void onFailure(Exception e) { + try { + throw new AssertionError(e); + } finally { + latch.countDown(); + } + } + }; + List nodesInvoked = new CopyOnWriteArrayList<>(); + SearchTransportService searchTransportService = new SearchTransportService(Settings.EMPTY, null) { + + @Override + public void sendFreeContext(Transport.Connection connection, long contextId, + ActionListener listener) { + nodesInvoked.add(connection.getNode()); + boolean freed = randomBoolean(); + if (freed) { + numFreed.incrementAndGet(); + } + Thread t = new Thread(() -> listener.onResponse(new SearchFreeContextResponse(freed))); + t.start(); + } + + @Override + Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + }; + ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); + clearScrollRequest.scrollIds(Arrays.asList(scrollId)); + ClearScrollController controller = new ClearScrollController(clearScrollRequest, listener, + nodes, logger, searchTransportService); + controller.run(); + latch.await(); + assertEquals(3, nodesInvoked.size()); + Collections.sort(nodesInvoked, Comparator.comparing(DiscoveryNode::getId)); + assertEquals(nodesInvoked, Arrays.asList(node1, node2, node3)); + } + + public void testClearScrollIdsWithFailure() throws IOException, InterruptedException { + DiscoveryNode node1 = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); + DiscoveryNode node2 = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); + DiscoveryNode node3 = new DiscoveryNode("node_3", buildNewFakeTransportAddress(), Version.CURRENT); + AtomicArray array = new AtomicArray<>(3); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult1 = new SearchAsyncActionTests.TestSearchPhaseResult(1, node1); + testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), null, null)); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult2 = new SearchAsyncActionTests.TestSearchPhaseResult(12, node2); + testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), null, null)); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult3 = new SearchAsyncActionTests.TestSearchPhaseResult(42, node3); + testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null, null)); + array.setOnce(0, testSearchPhaseResult1); + array.setOnce(1, testSearchPhaseResult2); + array.setOnce(2, testSearchPhaseResult3); + AtomicInteger numFreed = new AtomicInteger(0); + AtomicInteger numFailures = new AtomicInteger(0); + AtomicInteger numConnectionFailures = new AtomicInteger(0); + String scrollId = TransportSearchHelper.buildScrollId(array); + DiscoveryNodes nodes = DiscoveryNodes.builder().add(node1).add(node2).add(node3).build(); + CountDownLatch latch = new CountDownLatch(1); + + ActionListener listener = new ActionListener() { + @Override + public void onResponse(ClearScrollResponse clearScrollResponse) { + try { + assertEquals(numFreed.get(), clearScrollResponse.getNumFreed()); + if (numFailures.get() > 0) { + assertFalse(clearScrollResponse.isSucceeded()); + } else { + assertTrue(clearScrollResponse.isSucceeded()); + } + + } finally { + latch.countDown(); + } + + } + + @Override + public void onFailure(Exception e) { + try { + throw new AssertionError(e); + } finally { + latch.countDown(); + } + } + }; + List nodesInvoked = new CopyOnWriteArrayList<>(); + SearchTransportService searchTransportService = new SearchTransportService(Settings.EMPTY, null) { + + @Override + public void sendFreeContext(Transport.Connection connection, long contextId, + ActionListener listener) { + nodesInvoked.add(connection.getNode()); + boolean freed = randomBoolean(); + boolean fail = randomBoolean(); + Thread t = new Thread(() -> { + if (fail) { + numFailures.incrementAndGet(); + listener.onFailure(new IllegalArgumentException("boom")); + } else { + if (freed) { + numFreed.incrementAndGet(); + } + listener.onResponse(new SearchFreeContextResponse(freed)); + } + }); + t.start(); + } + + @Override + Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + if (randomBoolean()) { + numFailures.incrementAndGet(); + numConnectionFailures.incrementAndGet(); + throw new NodeNotConnectedException(node, "boom"); + } + return new SearchAsyncActionTests.MockConnection(node); + } + }; + ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); + clearScrollRequest.scrollIds(Arrays.asList(scrollId)); + ClearScrollController controller = new ClearScrollController(clearScrollRequest, listener, + nodes, logger, searchTransportService); + controller.run(); + latch.await(); + assertEquals(3 - numConnectionFailures.get(), nodesInvoked.size()); + } +} diff --git a/core/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/core/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java index a85f4892933..fef4cff6a4e 100644 --- a/core/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java @@ -108,13 +108,12 @@ public class ExpandSearchPhaseTests extends ESTestCase { Collections.singletonMap("someField", new SearchHitField("someField", Collections.singletonList(collapseValue))))}, 1, 1.0F); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); - SearchResponse response = mockSearchPhaseContext.buildSearchResponse(internalSearchResponse, null); AtomicReference reference = new AtomicReference<>(); - ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, response, r -> + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, (r) -> new SearchPhase("test") { @Override public void run() throws IOException { - reference.set(r); + reference.set(mockSearchPhaseContext.buildSearchResponse(r, null)); } } ); @@ -123,7 +122,6 @@ public class ExpandSearchPhaseTests extends ESTestCase { mockSearchPhaseContext.assertNoFailure(); assertNotNull(reference.get()); SearchResponse theResponse = reference.get(); - assertSame(theResponse, response); assertEquals(numInnerHits, theResponse.getHits().getHits()[0].getInnerHits().size()); for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) { @@ -167,13 +165,12 @@ public class ExpandSearchPhaseTests extends ESTestCase { Collections.singletonMap("someField", new SearchHitField("someField", Collections.singletonList(collapseValue))))}, 1, 1.0F); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); - SearchResponse response = mockSearchPhaseContext.buildSearchResponse(internalSearchResponse, null); AtomicReference reference = new AtomicReference<>(); - ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, response, r -> + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, r -> new SearchPhase("test") { @Override public void run() throws IOException { - reference.set(r); + reference.set(mockSearchPhaseContext.buildSearchResponse(r, null)); } } ); @@ -201,13 +198,12 @@ public class ExpandSearchPhaseTests extends ESTestCase { new SearchHit(2, "ID2", new Text("type"), Collections.singletonMap("someField", new SearchHitField("someField", Collections.singletonList(null))))}, 1, 1.0F); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); - SearchResponse response = mockSearchPhaseContext.buildSearchResponse(internalSearchResponse, null); AtomicReference reference = new AtomicReference<>(); - ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, response, r -> + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, r -> new SearchPhase("test") { @Override public void run() throws IOException { - reference.set(r); + reference.set(mockSearchPhaseContext.buildSearchResponse(r, null)); } } ); @@ -232,13 +228,12 @@ public class ExpandSearchPhaseTests extends ESTestCase { SearchHits hits = new SearchHits(new SearchHit[0], 1, 1.0f); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); - SearchResponse response = mockSearchPhaseContext.buildSearchResponse(internalSearchResponse, null); AtomicReference reference = new AtomicReference<>(); - ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, response, r -> + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, r -> new SearchPhase("test") { @Override public void run() throws IOException { - reference.set(r); + reference.set(mockSearchPhaseContext.buildSearchResponse(r, null)); } } ); diff --git a/core/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/core/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index be42455a80a..05c92585472 100644 --- a/core/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.QueryFetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; +import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.Transport; @@ -66,10 +67,10 @@ public class FetchSearchPhaseTests extends ESTestCase { } FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext, - (searchResponse) -> new SearchPhase("test") { + (searchResponse, scrollId) -> new SearchPhase("test") { @Override public void run() throws IOException { - responseRef.set(searchResponse); + responseRef.set(mockSearchPhaseContext.buildSearchResponse(searchResponse, null)); } }); assertEquals("fetch", phase.getName()); @@ -119,10 +120,10 @@ public class FetchSearchPhaseTests extends ESTestCase { }; mockSearchPhaseContext.searchTransport = searchTransportService; FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext, - (searchResponse) -> new SearchPhase("test") { + (searchResponse, scrollId) -> new SearchPhase("test") { @Override public void run() throws IOException { - responseRef.set(searchResponse); + responseRef.set(mockSearchPhaseContext.buildSearchResponse(searchResponse, null)); } }); assertEquals("fetch", phase.getName()); @@ -173,10 +174,10 @@ public class FetchSearchPhaseTests extends ESTestCase { }; mockSearchPhaseContext.searchTransport = searchTransportService; FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext, - (searchResponse) -> new SearchPhase("test") { + (searchResponse, scrollId) -> new SearchPhase("test") { @Override public void run() throws IOException { - responseRef.set(searchResponse); + responseRef.set(mockSearchPhaseContext.buildSearchResponse(searchResponse, null)); } }); assertEquals("fetch", phase.getName()); @@ -224,10 +225,10 @@ public class FetchSearchPhaseTests extends ESTestCase { mockSearchPhaseContext.searchTransport = searchTransportService; CountDownLatch latch = new CountDownLatch(1); FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext, - (searchResponse) -> new SearchPhase("test") { + (searchResponse, scrollId) -> new SearchPhase("test") { @Override public void run() throws IOException { - responseRef.set(searchResponse); + responseRef.set(mockSearchPhaseContext.buildSearchResponse(searchResponse, null)); latch.countDown(); } }); @@ -290,10 +291,10 @@ public class FetchSearchPhaseTests extends ESTestCase { }; mockSearchPhaseContext.searchTransport = searchTransportService; FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext, - (searchResponse) -> new SearchPhase("test") { + (searchResponse, scrollId) -> new SearchPhase("test") { @Override public void run() throws IOException { - responseRef.set(searchResponse); + responseRef.set(mockSearchPhaseContext.buildSearchResponse(searchResponse, null)); } }); assertEquals("fetch", phase.getName()); @@ -339,10 +340,10 @@ public class FetchSearchPhaseTests extends ESTestCase { }; mockSearchPhaseContext.searchTransport = searchTransportService; FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext, - (searchResponse) -> new SearchPhase("test") { + (searchResponse, scrollId) -> new SearchPhase("test") { @Override public void run() throws IOException { - responseRef.set(searchResponse); + responseRef.set(mockSearchPhaseContext.buildSearchResponse(searchResponse, null)); } }); assertEquals("fetch", phase.getName()); @@ -357,5 +358,4 @@ public class FetchSearchPhaseTests extends ESTestCase { assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size()); assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(123L)); } - } diff --git a/core/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/core/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index 39890038f2a..878cb7e6126 100644 --- a/core/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -214,7 +214,7 @@ public class SearchAsyncActionTests extends ESTestCase { } } - public final class MockConnection implements Transport.Connection { + public static final class MockConnection implements Transport.Connection { private final DiscoveryNode node; diff --git a/core/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/core/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index c92caef628a..9a1e78e2987 100644 --- a/core/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -148,29 +148,35 @@ public class SearchPhaseControllerTests extends ESTestCase { int nShards = randomIntBetween(1, 20); int queryResultSize = randomBoolean() ? 0 : randomIntBetween(1, nShards * 2); AtomicArray queryResults = generateQueryResults(nShards, suggestions, queryResultSize, false); - SearchPhaseController.ReducedQueryPhase reducedQueryPhase = searchPhaseController.reducedQueryPhase(queryResults.asList(), false); - AtomicArray searchPhaseResultAtomicArray = generateFetchResults(nShards, reducedQueryPhase.scoreDocs, - reducedQueryPhase.suggest); - InternalSearchResponse mergedResponse = searchPhaseController.merge(false, - reducedQueryPhase, - searchPhaseResultAtomicArray.asList(), searchPhaseResultAtomicArray::get); - int suggestSize = 0; - for (Suggest.Suggestion s : reducedQueryPhase.suggest) { - Stream stream = s.getEntries().stream(); - suggestSize += stream.collect(Collectors.summingInt(e -> e.getOptions().size())); - } - assertThat(suggestSize, lessThanOrEqualTo(maxSuggestSize)); - assertThat(mergedResponse.hits().getHits().length, equalTo(reducedQueryPhase.scoreDocs.length-suggestSize)); - Suggest suggestResult = mergedResponse.suggest(); - for (Suggest.Suggestion suggestion : reducedQueryPhase.suggest) { - assertThat(suggestion, instanceOf(CompletionSuggestion.class)); - if (suggestion.getEntries().get(0).getOptions().size() > 0) { - CompletionSuggestion suggestionResult = suggestResult.getSuggestion(suggestion.getName()); - assertNotNull(suggestionResult); - List options = suggestionResult.getEntries().get(0).getOptions(); - assertThat(options.size(), equalTo(suggestion.getEntries().get(0).getOptions().size())); - for (CompletionSuggestion.Entry.Option option : options) { - assertNotNull(option.getHit()); + for (boolean trackTotalHits : new boolean[] {true, false}) { + SearchPhaseController.ReducedQueryPhase reducedQueryPhase = + searchPhaseController.reducedQueryPhase(queryResults.asList(), false, trackTotalHits); + AtomicArray searchPhaseResultAtomicArray = generateFetchResults(nShards, reducedQueryPhase.scoreDocs, + reducedQueryPhase.suggest); + InternalSearchResponse mergedResponse = searchPhaseController.merge(false, + reducedQueryPhase, + searchPhaseResultAtomicArray.asList(), searchPhaseResultAtomicArray::get); + if (trackTotalHits == false) { + assertThat(mergedResponse.hits.totalHits, equalTo(-1L)); + } + int suggestSize = 0; + for (Suggest.Suggestion s : reducedQueryPhase.suggest) { + Stream stream = s.getEntries().stream(); + suggestSize += stream.collect(Collectors.summingInt(e -> e.getOptions().size())); + } + assertThat(suggestSize, lessThanOrEqualTo(maxSuggestSize)); + assertThat(mergedResponse.hits().getHits().length, equalTo(reducedQueryPhase.scoreDocs.length - suggestSize)); + Suggest suggestResult = mergedResponse.suggest(); + for (Suggest.Suggestion suggestion : reducedQueryPhase.suggest) { + assertThat(suggestion, instanceOf(CompletionSuggestion.class)); + if (suggestion.getEntries().get(0).getOptions().size() > 0) { + CompletionSuggestion suggestionResult = suggestResult.getSuggestion(suggestion.getName()); + assertNotNull(suggestionResult); + List options = suggestionResult.getEntries().get(0).getOptions(); + assertThat(options.size(), equalTo(suggestion.getEntries().get(0).getOptions().size())); + for (CompletionSuggestion.Entry.Option option : options) { + assertNotNull(option.getHit()); + } } } } diff --git a/core/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/core/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index 5cc92cb7d87..c91fd7377a5 100644 --- a/core/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -49,6 +49,7 @@ import java.util.Collections; import java.util.List; import static java.util.Collections.singletonMap; +import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; public class SearchResponseTests extends ESTestCase { @@ -78,31 +79,71 @@ public class SearchResponseTests extends ESTestCase { } private SearchResponse createTestItem(ShardSearchFailure... shardSearchFailures) { - SearchHits hits = SearchHitsTests.createTestItem(); + return createTestItem(false, shardSearchFailures); + } + + /** + * This SearchResponse doesn't include SearchHits, Aggregations, Suggestions, ShardSearchFailures, SearchProfileShardResults + * to make it possible to only test properties of the SearchResponse itself + */ + private SearchResponse createMinimalTestItem() { + return createTestItem(true); + } + + /** + * if minimal is set, don't include search hits, aggregations, suggest etc... to make test simpler + */ + private SearchResponse createTestItem(boolean minimal, ShardSearchFailure... shardSearchFailures) { boolean timedOut = randomBoolean(); Boolean terminatedEarly = randomBoolean() ? null : randomBoolean(); int numReducePhases = randomIntBetween(1, 10); long tookInMillis = randomNonNegativeLong(); int successfulShards = randomInt(); int totalShards = randomInt(); - - InternalAggregations aggregations = aggregationsTests.createTestInstance(); - Suggest suggest = SuggestTests.createTestItem(); - SearchProfileShardResults profileShardResults = SearchProfileShardResultsTests.createTestItem(); - - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, aggregations, suggest, profileShardResults, + InternalSearchResponse internalSearchResponse; + if (minimal == false) { + SearchHits hits = SearchHitsTests.createTestItem(); + InternalAggregations aggregations = aggregationsTests.createTestInstance(); + Suggest suggest = SuggestTests.createTestItem(); + SearchProfileShardResults profileShardResults = SearchProfileShardResultsTests.createTestItem(); + internalSearchResponse = new InternalSearchResponse(hits, aggregations, suggest, profileShardResults, timedOut, terminatedEarly, numReducePhases); + } else { + internalSearchResponse = InternalSearchResponse.empty(); + } return new SearchResponse(internalSearchResponse, null, totalShards, successfulShards, tookInMillis, shardSearchFailures); } + /** + * the "_shard/total/failures" section makes it impossible to directly + * compare xContent, so we omit it here + */ public void testFromXContent() throws IOException { - // the "_shard/total/failures" section makes if impossible to directly compare xContent, so we omit it here - SearchResponse response = createTestItem(); + doFromXContentTestWithRandomFields(createTestItem(), false); + } + + /** + * This test adds random fields and objects to the xContent rendered out to + * ensure we can parse it back to be forward compatible with additions to + * the xContent. We test this with a "minimal" SearchResponse, adding random + * fields to SearchHits, Aggregations etc... is tested in their own tests + */ + public void testFromXContentWithRandomFields() throws IOException { + doFromXContentTestWithRandomFields(createMinimalTestItem(), true); + } + + private void doFromXContentTestWithRandomFields(SearchResponse response, boolean addRandomFields) throws IOException { XContentType xcontentType = randomFrom(XContentType.values()); boolean humanReadable = randomBoolean(); final ToXContent.Params params = new ToXContent.MapParams(singletonMap(RestSearchAction.TYPED_KEYS_PARAM, "true")); BytesReference originalBytes = toShuffledXContent(response, xcontentType, params, humanReadable); - try (XContentParser parser = createParser(xcontentType.xContent(), originalBytes)) { + BytesReference mutated; + if (addRandomFields) { + mutated = insertRandomFields(xcontentType, originalBytes, null, random()); + } else { + mutated = originalBytes; + } + try (XContentParser parser = createParser(xcontentType.xContent(), mutated)) { SearchResponse parsed = SearchResponse.fromXContent(parser); assertToXContentEquivalent(originalBytes, XContentHelper.toXContent(parsed, xcontentType, params, humanReadable), xcontentType); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); diff --git a/core/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java b/core/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java new file mode 100644 index 00000000000..038bb6ca8f6 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java @@ -0,0 +1,459 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.search; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.index.Index; +import org.elasticsearch.search.Scroll; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.internal.InternalScrollSearchRequest; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.Transport; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.BiFunction; + +public class SearchScrollAsyncActionTests extends ESTestCase { + + public void testSendRequestsToNodes() throws InterruptedException { + + ParsedScrollId scrollId = getParsedScrollId( + new ScrollIdForNode(null, "node1", 1), + new ScrollIdForNode(null, "node2", 2), + new ScrollIdForNode(null, "node3", 17), + new ScrollIdForNode(null, "node1", 0), + new ScrollIdForNode(null, "node3", 0)); + DiscoveryNodes discoveryNodes = DiscoveryNodes.builder() + .add(new DiscoveryNode("node1", buildNewFakeTransportAddress(), Version.CURRENT)) + .add(new DiscoveryNode("node2", buildNewFakeTransportAddress(), Version.CURRENT)) + .add(new DiscoveryNode("node3", buildNewFakeTransportAddress(), Version.CURRENT)).build(); + + AtomicArray results = new AtomicArray<>(scrollId.getContext().length); + SearchScrollRequest request = new SearchScrollRequest(); + request.scroll(new Scroll(TimeValue.timeValueMinutes(1))); + CountDownLatch latch = new CountDownLatch(1); + AtomicInteger movedCounter = new AtomicInteger(0); + SearchScrollAsyncAction action = + new SearchScrollAsyncAction(scrollId, logger, discoveryNodes, dummyListener(), + null, request, null) + { + @Override + protected void executeInitialPhase(Transport.Connection connection, InternalScrollSearchRequest internalRequest, + SearchActionListener searchActionListener) + { + new Thread(() -> { + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult = + new SearchAsyncActionTests.TestSearchPhaseResult(internalRequest.id(), connection.getNode()); + testSearchPhaseResult.setSearchShardTarget(new SearchShardTarget(connection.getNode().getId(), + new Index("test", "_na_"), 1)); + searchActionListener.onResponse(testSearchPhaseResult); + }).start(); + } + + @Override + protected Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + + @Override + protected SearchPhase moveToNextPhase(BiFunction clusterNodeLookup) { + assertEquals(1, movedCounter.incrementAndGet()); + return new SearchPhase("test") { + @Override + public void run() throws IOException { + latch.countDown(); + } + }; + } + + @Override + protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearchPhaseResult result) { + results.setOnce(shardId, result); + } + }; + + action.run(); + latch.await(); + ShardSearchFailure[] shardSearchFailures = action.buildShardFailures(); + assertEquals(0, shardSearchFailures.length); + ScrollIdForNode[] context = scrollId.getContext(); + for (int i = 0; i < results.length(); i++) { + assertNotNull(results.get(i)); + assertEquals(context[i].getScrollId(), results.get(i).getRequestId()); + assertEquals(context[i].getNode(), results.get(i).node.getId()); + } + } + + public void testFailNextPhase() throws InterruptedException { + + ParsedScrollId scrollId = getParsedScrollId( + new ScrollIdForNode(null, "node1", 1), + new ScrollIdForNode(null, "node2", 2), + new ScrollIdForNode(null, "node3", 17), + new ScrollIdForNode(null, "node1", 0), + new ScrollIdForNode(null, "node3", 0)); + DiscoveryNodes discoveryNodes = DiscoveryNodes.builder() + .add(new DiscoveryNode("node1", buildNewFakeTransportAddress(), Version.CURRENT)) + .add(new DiscoveryNode("node2", buildNewFakeTransportAddress(), Version.CURRENT)) + .add(new DiscoveryNode("node3", buildNewFakeTransportAddress(), Version.CURRENT)).build(); + + AtomicArray results = new AtomicArray<>(scrollId.getContext().length); + SearchScrollRequest request = new SearchScrollRequest(); + request.scroll(new Scroll(TimeValue.timeValueMinutes(1))); + CountDownLatch latch = new CountDownLatch(1); + AtomicInteger movedCounter = new AtomicInteger(0); + ActionListener listener = new ActionListener() { + @Override + public void onResponse(Object o) { + try { + fail("got a result"); + } finally { + latch.countDown(); + } + } + + @Override + public void onFailure(Exception e) { + try { + assertTrue(e instanceof SearchPhaseExecutionException); + SearchPhaseExecutionException ex = (SearchPhaseExecutionException) e; + assertEquals("BOOM", ex.getCause().getMessage()); + assertEquals("TEST_PHASE", ex.getPhaseName()); + assertEquals("Phase failed", ex.getMessage()); + } finally { + latch.countDown(); + } + } + }; + SearchScrollAsyncAction action = + new SearchScrollAsyncAction(scrollId, logger, discoveryNodes, listener, null, + request, null) { + @Override + protected void executeInitialPhase(Transport.Connection connection, InternalScrollSearchRequest internalRequest, + SearchActionListener searchActionListener) + { + new Thread(() -> { + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult = + new SearchAsyncActionTests.TestSearchPhaseResult(internalRequest.id(), connection.getNode()); + testSearchPhaseResult.setSearchShardTarget(new SearchShardTarget(connection.getNode().getId(), + new Index("test", "_na_"), 1)); + searchActionListener.onResponse(testSearchPhaseResult); + }).start(); + } + + @Override + protected Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + + @Override + protected SearchPhase moveToNextPhase(BiFunction clusterNodeLookup) { + assertEquals(1, movedCounter.incrementAndGet()); + return new SearchPhase("TEST_PHASE") { + @Override + public void run() throws IOException { + throw new IllegalArgumentException("BOOM"); + } + }; + } + + @Override + protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearchPhaseResult result) { + results.setOnce(shardId, result); + } + }; + + action.run(); + latch.await(); + ShardSearchFailure[] shardSearchFailures = action.buildShardFailures(); + assertEquals(0, shardSearchFailures.length); + ScrollIdForNode[] context = scrollId.getContext(); + for (int i = 0; i < results.length(); i++) { + assertNotNull(results.get(i)); + assertEquals(context[i].getScrollId(), results.get(i).getRequestId()); + assertEquals(context[i].getNode(), results.get(i).node.getId()); + } + } + + public void testNodeNotAvailable() throws InterruptedException { + ParsedScrollId scrollId = getParsedScrollId( + new ScrollIdForNode(null, "node1", 1), + new ScrollIdForNode(null, "node2", 2), + new ScrollIdForNode(null, "node3", 17), + new ScrollIdForNode(null, "node1", 0), + new ScrollIdForNode(null, "node3", 0)); + // node2 is not available + DiscoveryNodes discoveryNodes = DiscoveryNodes.builder() + .add(new DiscoveryNode("node1", buildNewFakeTransportAddress(), Version.CURRENT)) + .add(new DiscoveryNode("node3", buildNewFakeTransportAddress(), Version.CURRENT)).build(); + + AtomicArray results = new AtomicArray<>(scrollId.getContext().length); + SearchScrollRequest request = new SearchScrollRequest(); + request.scroll(new Scroll(TimeValue.timeValueMinutes(1))); + CountDownLatch latch = new CountDownLatch(1); + AtomicInteger movedCounter = new AtomicInteger(0); + SearchScrollAsyncAction action = + new SearchScrollAsyncAction(scrollId, logger, discoveryNodes, dummyListener() + , null, request, null) + { + @Override + protected void executeInitialPhase(Transport.Connection connection, InternalScrollSearchRequest internalRequest, + SearchActionListener searchActionListener) + { + try { + assertNotEquals("node2 is not available", "node2", connection.getNode().getId()); + } catch (NullPointerException e) { + logger.warn(e); + } + new Thread(() -> { + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult = + new SearchAsyncActionTests.TestSearchPhaseResult(internalRequest.id(), connection.getNode()); + testSearchPhaseResult.setSearchShardTarget(new SearchShardTarget(connection.getNode().getId(), + new Index("test", "_na_"), 1)); + searchActionListener.onResponse(testSearchPhaseResult); + }).start(); + } + + @Override + protected Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + + @Override + protected SearchPhase moveToNextPhase(BiFunction clusterNodeLookup) { + assertEquals(1, movedCounter.incrementAndGet()); + return new SearchPhase("test") { + @Override + public void run() throws IOException { + latch.countDown(); + } + }; + } + + @Override + protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearchPhaseResult result) { + results.setOnce(shardId, result); + } + }; + + action.run(); + latch.await(); + ShardSearchFailure[] shardSearchFailures = action.buildShardFailures(); + assertEquals(1, shardSearchFailures.length); + assertEquals("IllegalStateException[node [node2] is not available]", shardSearchFailures[0].reason()); + + ScrollIdForNode[] context = scrollId.getContext(); + for (int i = 0; i < results.length(); i++) { + if (context[i].getNode().equals("node2")) { + assertNull(results.get(i)); + } else { + assertNotNull(results.get(i)); + assertEquals(context[i].getScrollId(), results.get(i).getRequestId()); + assertEquals(context[i].getNode(), results.get(i).node.getId()); + } + } + } + + public void testShardFailures() throws InterruptedException { + ParsedScrollId scrollId = getParsedScrollId( + new ScrollIdForNode(null, "node1", 1), + new ScrollIdForNode(null, "node2", 2), + new ScrollIdForNode(null, "node3", 17), + new ScrollIdForNode(null, "node1", 0), + new ScrollIdForNode(null, "node3", 0)); + DiscoveryNodes discoveryNodes = DiscoveryNodes.builder() + .add(new DiscoveryNode("node1", buildNewFakeTransportAddress(), Version.CURRENT)) + .add(new DiscoveryNode("node2", buildNewFakeTransportAddress(), Version.CURRENT)) + .add(new DiscoveryNode("node3", buildNewFakeTransportAddress(), Version.CURRENT)).build(); + + AtomicArray results = new AtomicArray<>(scrollId.getContext().length); + SearchScrollRequest request = new SearchScrollRequest(); + request.scroll(new Scroll(TimeValue.timeValueMinutes(1))); + CountDownLatch latch = new CountDownLatch(1); + AtomicInteger movedCounter = new AtomicInteger(0); + SearchScrollAsyncAction action = + new SearchScrollAsyncAction(scrollId, logger, discoveryNodes, dummyListener(), + null, request, null) + { + @Override + protected void executeInitialPhase(Transport.Connection connection, InternalScrollSearchRequest internalRequest, + SearchActionListener searchActionListener) + { + new Thread(() -> { + if (internalRequest.id() == 17) { + searchActionListener.onFailure(new IllegalArgumentException("BOOM on shard")); + } else { + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult = + new SearchAsyncActionTests.TestSearchPhaseResult(internalRequest.id(), connection.getNode()); + testSearchPhaseResult.setSearchShardTarget(new SearchShardTarget(connection.getNode().getId(), + new Index("test", "_na_"), 1)); + searchActionListener.onResponse(testSearchPhaseResult); + } + }).start(); + } + + @Override + protected Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + + @Override + protected SearchPhase moveToNextPhase(BiFunction clusterNodeLookup) { + assertEquals(1, movedCounter.incrementAndGet()); + return new SearchPhase("test") { + @Override + public void run() throws IOException { + latch.countDown(); + } + }; + } + + @Override + protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearchPhaseResult result) { + results.setOnce(shardId, result); + } + }; + + action.run(); + latch.await(); + ShardSearchFailure[] shardSearchFailures = action.buildShardFailures(); + assertEquals(1, shardSearchFailures.length); + assertEquals("IllegalArgumentException[BOOM on shard]", shardSearchFailures[0].reason()); + + ScrollIdForNode[] context = scrollId.getContext(); + for (int i = 0; i < results.length(); i++) { + if (context[i].getScrollId() == 17) { + assertNull(results.get(i)); + } else { + assertNotNull(results.get(i)); + assertEquals(context[i].getScrollId(), results.get(i).getRequestId()); + assertEquals(context[i].getNode(), results.get(i).node.getId()); + } + } + } + + public void testAllShardsFailed() throws InterruptedException { + ParsedScrollId scrollId = getParsedScrollId( + new ScrollIdForNode(null, "node1", 1), + new ScrollIdForNode(null, "node2", 2), + new ScrollIdForNode(null, "node3", 17), + new ScrollIdForNode(null, "node1", 0), + new ScrollIdForNode(null, "node3", 0)); + DiscoveryNodes discoveryNodes = DiscoveryNodes.builder() + .add(new DiscoveryNode("node1", buildNewFakeTransportAddress(), Version.CURRENT)) + .add(new DiscoveryNode("node2", buildNewFakeTransportAddress(), Version.CURRENT)) + .add(new DiscoveryNode("node3", buildNewFakeTransportAddress(), Version.CURRENT)).build(); + + AtomicArray results = new AtomicArray<>(scrollId.getContext().length); + SearchScrollRequest request = new SearchScrollRequest(); + request.scroll(new Scroll(TimeValue.timeValueMinutes(1))); + CountDownLatch latch = new CountDownLatch(1); + ActionListener listener = new ActionListener() { + @Override + public void onResponse(Object o) { + try { + fail("got a result"); + } finally { + latch.countDown(); + } + } + + @Override + public void onFailure(Exception e) { + try { + assertTrue(e instanceof SearchPhaseExecutionException); + SearchPhaseExecutionException ex = (SearchPhaseExecutionException) e; + assertEquals("BOOM on shard", ex.getCause().getMessage()); + assertEquals("query", ex.getPhaseName()); + assertEquals("all shards failed", ex.getMessage()); + } finally { + latch.countDown(); + } + } + }; + SearchScrollAsyncAction action = + new SearchScrollAsyncAction(scrollId, logger, discoveryNodes, listener, null, + request, null) { + @Override + protected void executeInitialPhase(Transport.Connection connection, InternalScrollSearchRequest internalRequest, + SearchActionListener searchActionListener) + { + new Thread(() -> searchActionListener.onFailure(new IllegalArgumentException("BOOM on shard"))).start(); + } + + @Override + protected Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { + return new SearchAsyncActionTests.MockConnection(node); + } + + @Override + protected SearchPhase moveToNextPhase(BiFunction clusterNodeLookup) { + fail("don't move all shards failed"); + return null; + } + + @Override + protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearchPhaseResult result) { + results.setOnce(shardId, result); + } + }; + + action.run(); + latch.await(); + ScrollIdForNode[] context = scrollId.getContext(); + + ShardSearchFailure[] shardSearchFailures = action.buildShardFailures(); + assertEquals(context.length, shardSearchFailures.length); + assertEquals("IllegalArgumentException[BOOM on shard]", shardSearchFailures[0].reason()); + + for (int i = 0; i < results.length(); i++) { + assertNull(results.get(i)); + } + } + + private static ParsedScrollId getParsedScrollId(ScrollIdForNode... idsForNodes) { + List scrollIdForNodes = Arrays.asList(idsForNodes); + Collections.shuffle(scrollIdForNodes, random()); + return new ParsedScrollId("", "test", scrollIdForNodes.toArray(new ScrollIdForNode[0])); + } + + private ActionListener dummyListener() { + return new ActionListener() { + @Override + public void onResponse(SearchResponse response) { + fail("dummy"); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + }; + } +} diff --git a/core/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java b/core/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java index 3c88551acbf..9a8c0b1feb1 100644 --- a/core/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/ShardSearchFailureTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; public class ShardSearchFailureTests extends ESTestCase { @@ -48,13 +49,31 @@ public class ShardSearchFailureTests extends ESTestCase { } public void testFromXContent() throws IOException { + doFromXContentTestWithRandomFields(false); + } + + /** + * This test adds random fields and objects to the xContent rendered out to + * ensure we can parse it back to be forward compatible with additions to + * the xContent + */ + public void testFromXContentWithRandomFields() throws IOException { + doFromXContentTestWithRandomFields(true); + } + + private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException { ShardSearchFailure response = createTestItem(); XContentType xContentType = randomFrom(XContentType.values()); boolean humanReadable = randomBoolean(); BytesReference originalBytes = toShuffledXContent(response, xContentType, ToXContent.EMPTY_PARAMS, humanReadable); - + BytesReference mutated; + if (addRandomFields) { + mutated = insertRandomFields(xContentType, originalBytes, null, random()); + } else { + mutated = originalBytes; + } ShardSearchFailure parsed; - try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { + try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); parsed = ShardSearchFailure.fromXContent(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); @@ -64,8 +83,11 @@ public class ShardSearchFailureTests extends ESTestCase { assertEquals(response.shard().getNodeId(), parsed.shard().getNodeId()); assertEquals(response.shardId(), parsed.shardId()); - // we cannot compare the cause, because it will be wrapped in an outer ElasticSearchException - // best effort: try to check that the original message appears somewhere in the rendered xContent + /** + * we cannot compare the cause, because it will be wrapped in an outer + * ElasticSearchException best effort: try to check that the original + * message appears somewhere in the rendered xContent + */ String originalMsg = response.getCause().getMessage(); assertEquals(parsed.getCause().getMessage(), "Elasticsearch exception [type=parsing_exception, reason=" + originalMsg + "]"); String nestedMsg = response.getCause().getCause().getMessage(); diff --git a/core/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java b/core/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java new file mode 100644 index 00000000000..49d7450096b --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.search; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchPhaseResult; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +public class TransportSearchHelperTests extends ESTestCase { + + public void testParseScrollId() throws IOException { + AtomicArray array = new AtomicArray<>(3); + DiscoveryNode node1 = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); + DiscoveryNode node2 = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); + DiscoveryNode node3 = new DiscoveryNode("node_3", buildNewFakeTransportAddress(), Version.CURRENT); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult1 = new SearchAsyncActionTests.TestSearchPhaseResult(1, node1); + testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), "cluster_x", null)); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult2 = new SearchAsyncActionTests.TestSearchPhaseResult(12, node2); + testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), "cluster_y", null)); + SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult3 = new SearchAsyncActionTests.TestSearchPhaseResult(42, node3); + testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null, null)); + array.setOnce(0, testSearchPhaseResult1); + array.setOnce(1, testSearchPhaseResult2); + array.setOnce(2, testSearchPhaseResult3); + + + String scrollId = TransportSearchHelper.buildScrollId(array); + ParsedScrollId parseScrollId = TransportSearchHelper.parseScrollId(scrollId); + assertEquals(3, parseScrollId.getContext().length); + assertEquals("node_1", parseScrollId.getContext()[0].getNode()); + assertEquals("cluster_x", parseScrollId.getContext()[0].getClusterAlias()); + assertEquals(1, parseScrollId.getContext()[0].getScrollId()); + + assertEquals("node_2", parseScrollId.getContext()[1].getNode()); + assertEquals("cluster_y", parseScrollId.getContext()[1].getClusterAlias()); + assertEquals(12, parseScrollId.getContext()[1].getScrollId()); + + assertEquals("node_3", parseScrollId.getContext()[2].getNode()); + assertNull(parseScrollId.getContext()[2].getClusterAlias()); + assertEquals(42, parseScrollId.getContext()[2].getScrollId()); + } +} diff --git a/core/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java b/core/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java index c687fc6cabc..84acfa7fecd 100644 --- a/core/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java @@ -32,7 +32,7 @@ public class IndicesOptionsTests extends ESTestCase { int iterations = randomIntBetween(5, 20); for (int i = 0; i < iterations; i++) { IndicesOptions indicesOptions = IndicesOptions.fromOptions( - randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()); + randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()); BytesStreamOutput output = new BytesStreamOutput(); Version outputVersion = randomVersion(random()); @@ -50,6 +50,12 @@ public class IndicesOptionsTests extends ESTestCase { assertThat(indicesOptions2.forbidClosedIndices(), equalTo(indicesOptions.forbidClosedIndices())); assertThat(indicesOptions2.allowAliasesToMultipleIndices(), equalTo(indicesOptions.allowAliasesToMultipleIndices())); + + if (output.getVersion().onOrAfter(Version.V_6_0_0_alpha2)) { + assertEquals(indicesOptions2.ignoreAliases(), indicesOptions.ignoreAliases()); + } else { + assertFalse(indicesOptions2.ignoreAliases()); + } } } @@ -62,9 +68,11 @@ public class IndicesOptionsTests extends ESTestCase { boolean expandToClosedIndices = randomBoolean(); boolean allowAliasesToMultipleIndices = randomBoolean(); boolean forbidClosedIndices = randomBoolean(); + boolean ignoreAliases = randomBoolean(); + IndicesOptions indicesOptions = IndicesOptions.fromOptions( ignoreUnavailable, allowNoIndices,expandToOpenIndices, expandToClosedIndices, - allowAliasesToMultipleIndices, forbidClosedIndices + allowAliasesToMultipleIndices, forbidClosedIndices, ignoreAliases ); assertThat(indicesOptions.ignoreUnavailable(), equalTo(ignoreUnavailable)); @@ -74,6 +82,7 @@ public class IndicesOptionsTests extends ESTestCase { assertThat(indicesOptions.allowAliasesToMultipleIndices(), equalTo(allowAliasesToMultipleIndices)); assertThat(indicesOptions.allowAliasesToMultipleIndices(), equalTo(allowAliasesToMultipleIndices)); assertThat(indicesOptions.forbidClosedIndices(), equalTo(forbidClosedIndices)); + assertEquals(ignoreAliases, indicesOptions.ignoreAliases()); } } } diff --git a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java index c3ca62616fd..7d471f77f83 100644 --- a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -57,7 +57,6 @@ import java.util.function.Supplier; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; -import static org.mockito.Mockito.mock; public class TransportNodesActionTests extends ESTestCase { @@ -275,10 +274,6 @@ public class TransportNodesActionTests extends ESTestCase { return new TestNodeResponse(); } - @Override - protected boolean accumulateExceptions() { - return false; - } } private static class DataNodesOnlyTransportNodesAction diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java index 9fcc8c24353..88cf5769a48 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.support.replication; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.UnavailableShardsException; @@ -56,7 +57,9 @@ import java.util.function.Supplier; import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.state; import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithActivePrimary; import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -191,8 +194,7 @@ public class ReplicationOperationTests extends ESTestCase { assertTrue(primaryFailed.compareAndSet(false, true)); } }; - final TestReplicationOperation op = new TestReplicationOperation(request, primary, listener, replicasProxy, - () -> finalState); + final TestReplicationOperation op = new TestReplicationOperation(request, primary, listener, replicasProxy, () -> finalState); op.execute(); assertThat("request was not processed on primary", request.processedOnPrimary.get(), equalTo(true)); @@ -299,6 +301,53 @@ public class ReplicationOperationTests extends ESTestCase { } } + public void testPrimaryFailureHandlingReplicaResponse() throws Exception { + final String index = "test"; + final ShardId shardId = new ShardId(index, "_na_", 0); + + final Request request = new Request(shardId); + + final ClusterState state = stateWithActivePrimary(index, true, 1, 0); + final IndexMetaData indexMetaData = state.getMetaData().index(index); + final long primaryTerm = indexMetaData.primaryTerm(0); + final ShardRouting primaryRouting = state.getRoutingTable().shardRoutingTable(shardId).primaryShard(); + + final boolean fatal = randomBoolean(); + final AtomicBoolean primaryFailed = new AtomicBoolean(); + final ReplicationOperation.Primary primary = new TestPrimary(primaryRouting, primaryTerm) { + + @Override + public void failShard(String message, Exception exception) { + primaryFailed.set(true); + } + + @Override + public void updateLocalCheckpointForShard(String allocationId, long checkpoint) { + if (primaryRouting.allocationId().getId().equals(allocationId)) { + super.updateLocalCheckpointForShard(allocationId, checkpoint); + } else { + if (fatal) { + throw new NullPointerException(); + } else { + throw new AlreadyClosedException("already closed"); + } + } + } + + }; + + final PlainActionFuture listener = new PlainActionFuture<>(); + final ReplicationOperation.Replicas replicas = new TestReplicaProxy(Collections.emptyMap()); + TestReplicationOperation operation = new TestReplicationOperation(request, primary, listener, replicas, () -> state); + operation.execute(); + + assertThat(primaryFailed.get(), equalTo(fatal)); + final ShardInfo shardInfo = listener.actionGet().getShardInfo(); + assertThat(shardInfo.getFailed(), equalTo(0)); + assertThat(shardInfo.getFailures(), arrayWithSize(0)); + assertThat(shardInfo.getSuccessful(), equalTo(1 + getExpectedReplicas(shardId, state).size())); + } + private Set getExpectedReplicas(ShardId shardId, ClusterState state) { Set expectedReplicas = new HashSet<>(); String localNodeId = state.nodes().getLocalNodeId(); diff --git a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index e872d3d854e..8b389d69d38 100644 --- a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -159,7 +159,7 @@ public class UpdateRequestTests extends ESTestCase { // simple verbose script request.fromXContent(createParser(XContentFactory.jsonBuilder().startObject() - .startObject("script").field("inline", "script1").endObject() + .startObject("script").field("source", "script1").endObject() .endObject())); script = request.script(); assertThat(script, notNullValue()); @@ -173,7 +173,7 @@ public class UpdateRequestTests extends ESTestCase { request = new UpdateRequest("test", "type", "1"); request.fromXContent(createParser(XContentFactory.jsonBuilder().startObject() .startObject("script") - .field("inline", "script1") + .field("source", "script1") .startObject("params") .field("param1", "value1") .endObject() @@ -195,7 +195,7 @@ public class UpdateRequestTests extends ESTestCase { .startObject("params") .field("param1", "value1") .endObject() - .field("inline", "script1") + .field("source", "script1") .endObject() .endObject())); script = request.script(); @@ -215,7 +215,7 @@ public class UpdateRequestTests extends ESTestCase { .startObject("params") .field("param1", "value1") .endObject() - .field("inline", "script1") + .field("source", "script1") .endObject() .startObject("upsert") .field("field1", "value1") @@ -249,7 +249,7 @@ public class UpdateRequestTests extends ESTestCase { .startObject("params") .field("param1", "value1") .endObject() - .field("inline", "script1") + .field("source", "script1") .endObject().endObject())); script = request.script(); assertThat(script, notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java b/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java index e711117fb6e..9526b5b97e6 100644 --- a/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java +++ b/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java @@ -19,7 +19,7 @@ package org.elasticsearch.aliases; -import org.apache.lucene.search.join.ScoreMode; +import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse; @@ -33,9 +33,11 @@ import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.StopWatch; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.action.admin.indices.AliasesNotFoundException; @@ -49,6 +51,7 @@ import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; import java.util.HashSet; +import java.util.List; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; @@ -63,7 +66,6 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_ME import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_WRITE; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_READ_ONLY; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.CollectionAssertions.hasKey; @@ -425,6 +427,23 @@ public class IndexAliasesIT extends ESIntegTestCase { AliasesExistResponse response = admin().indices().prepareAliasesExist(aliases).get(); assertThat(response.exists(), equalTo(false)); + + logger.info("--> creating index [foo_foo] and [bar_bar]"); + assertAcked(prepareCreate("foo_foo")); + assertAcked(prepareCreate("bar_bar")); + ensureGreen(); + + logger.info("--> adding [foo] alias to [foo_foo] and [bar_bar]"); + assertAcked(admin().indices().prepareAliases().addAlias("foo_foo", "foo")); + assertAcked(admin().indices().prepareAliases().addAlias("bar_bar", "foo")); + + assertAcked(admin().indices().prepareAliases().addAliasAction(AliasActions.remove().index("foo*").alias("foo")).execute().get()); + + assertTrue(admin().indices().prepareAliasesExist("foo").get().exists()); + assertFalse(admin().indices().prepareAliasesExist("foo").setIndices("foo_foo").get().exists()); + assertTrue(admin().indices().prepareAliasesExist("foo").setIndices("bar_bar").get().exists()); + expectThrows(IndexNotFoundException.class, () -> admin().indices().prepareAliases() + .addAliasAction(AliasActions.remove().index("foo").alias("foo")).execute().actionGet()); } public void testWaitForAliasCreationMultipleShards() throws Exception { @@ -551,20 +570,24 @@ public class IndexAliasesIT extends ESIntegTestCase { logger.info("--> getting alias1"); GetAliasesResponse getResponse = admin().indices().prepareGetAliases("alias1").get(); assertThat(getResponse, notNullValue()); - assertThat(getResponse.getAliases().size(), equalTo(1)); + assertThat(getResponse.getAliases().size(), equalTo(5)); assertThat(getResponse.getAliases().get("foobar").size(), equalTo(1)); assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue()); assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("alias1")); assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue()); assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue()); assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue()); + assertTrue(getResponse.getAliases().get("test").isEmpty()); + assertTrue(getResponse.getAliases().get("test123").isEmpty()); + assertTrue(getResponse.getAliases().get("foobarbaz").isEmpty()); + assertTrue(getResponse.getAliases().get("bazbar").isEmpty()); AliasesExistResponse existsResponse = admin().indices().prepareAliasesExist("alias1").get(); assertThat(existsResponse.exists(), equalTo(true)); logger.info("--> getting all aliases that start with alias*"); getResponse = admin().indices().prepareGetAliases("alias*").get(); assertThat(getResponse, notNullValue()); - assertThat(getResponse.getAliases().size(), equalTo(1)); + assertThat(getResponse.getAliases().size(), equalTo(5)); assertThat(getResponse.getAliases().get("foobar").size(), equalTo(2)); assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue()); assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("alias1")); @@ -576,6 +599,10 @@ public class IndexAliasesIT extends ESIntegTestCase { assertThat(getResponse.getAliases().get("foobar").get(1).getFilter(), nullValue()); assertThat(getResponse.getAliases().get("foobar").get(1).getIndexRouting(), nullValue()); assertThat(getResponse.getAliases().get("foobar").get(1).getSearchRouting(), nullValue()); + assertTrue(getResponse.getAliases().get("test").isEmpty()); + assertTrue(getResponse.getAliases().get("test123").isEmpty()); + assertTrue(getResponse.getAliases().get("foobarbaz").isEmpty()); + assertTrue(getResponse.getAliases().get("bazbar").isEmpty()); existsResponse = admin().indices().prepareAliasesExist("alias*").get(); assertThat(existsResponse.exists(), equalTo(true)); @@ -660,12 +687,13 @@ public class IndexAliasesIT extends ESIntegTestCase { logger.info("--> getting f* for index *bar"); getResponse = admin().indices().prepareGetAliases("f*").addIndices("*bar").get(); assertThat(getResponse, notNullValue()); - assertThat(getResponse.getAliases().size(), equalTo(1)); + assertThat(getResponse.getAliases().size(), equalTo(2)); assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue()); assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("foo")); assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue()); assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue()); assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue()); + assertTrue(getResponse.getAliases().get("bazbar").isEmpty()); existsResponse = admin().indices().prepareAliasesExist("f*") .addIndices("*bar").get(); assertThat(existsResponse.exists(), equalTo(true)); @@ -674,13 +702,14 @@ public class IndexAliasesIT extends ESIntegTestCase { logger.info("--> getting f* for index *bac"); getResponse = admin().indices().prepareGetAliases("foo").addIndices("*bac").get(); assertThat(getResponse, notNullValue()); - assertThat(getResponse.getAliases().size(), equalTo(1)); + assertThat(getResponse.getAliases().size(), equalTo(2)); assertThat(getResponse.getAliases().get("foobar").size(), equalTo(1)); assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue()); assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("foo")); assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue()); assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue()); assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue()); + assertTrue(getResponse.getAliases().get("bazbar").isEmpty()); existsResponse = admin().indices().prepareAliasesExist("foo") .addIndices("*bac").get(); assertThat(existsResponse.exists(), equalTo(true)); @@ -713,7 +742,9 @@ public class IndexAliasesIT extends ESIntegTestCase { .removeAlias("foobar", "foo")); getResponse = admin().indices().prepareGetAliases("foo").addIndices("foobar").get(); - assertThat(getResponse.getAliases().isEmpty(), equalTo(true)); + for (final ObjectObjectCursor> entry : getResponse.getAliases()) { + assertTrue(entry.value.isEmpty()); + } existsResponse = admin().indices().prepareAliasesExist("foo").addIndices("foobar").get(); assertThat(existsResponse.exists(), equalTo(false)); } @@ -785,6 +816,21 @@ public class IndexAliasesIT extends ESIntegTestCase { } } + public void testAliasesCanBeAddedToIndicesOnly() throws Exception { + logger.info("--> creating index [2017-05-20]"); + assertAcked(prepareCreate("2017-05-20")); + ensureGreen(); + + logger.info("--> adding [week_20] alias to [2017-05-20]"); + assertAcked(admin().indices().prepareAliases().addAlias("2017-05-20", "week_20")); + + IndexNotFoundException infe = expectThrows(IndexNotFoundException.class, () -> admin().indices().prepareAliases() + .addAliasAction(AliasActions.add().index("week_20").alias("tmp")).execute().actionGet()); + assertEquals("week_20", infe.getIndex().getName()); + + assertAcked(admin().indices().prepareAliases().addAliasAction(AliasActions.add().index("2017-05-20").alias("tmp")).execute().get()); + } + // Before 2.0 alias filters were parsed at alias creation time, in order // for filters to work correctly ES required that fields mentioned in those // filters exist in the mapping. @@ -864,6 +910,26 @@ public class IndexAliasesIT extends ESIntegTestCase { } } + public void testAliasActionRemoveIndex() throws InterruptedException, ExecutionException { + assertAcked(prepareCreate("foo_foo")); + assertAcked(prepareCreate("bar_bar")); + assertAcked(admin().indices().prepareAliases().addAlias("foo_foo", "foo")); + assertAcked(admin().indices().prepareAliases().addAlias("bar_bar", "foo")); + + expectThrows(IndexNotFoundException.class, + () -> client().admin().indices().prepareAliases().removeIndex("foo").execute().actionGet()); + + assertAcked(client().admin().indices().prepareAliases().removeIndex("foo*").execute().get()); + assertFalse(client().admin().indices().prepareExists("foo_foo").execute().actionGet().isExists()); + assertTrue(admin().indices().prepareAliasesExist("foo").get().exists()); + assertTrue(client().admin().indices().prepareExists("bar_bar").execute().actionGet().isExists()); + assertTrue(admin().indices().prepareAliasesExist("foo").setIndices("bar_bar").get().exists()); + + assertAcked(client().admin().indices().prepareAliases().removeIndex("bar_bar")); + assertFalse(admin().indices().prepareAliasesExist("foo").get().exists()); + assertFalse(client().admin().indices().prepareExists("bar_bar").execute().actionGet().isExists()); + } + public void testRemoveIndexAndReplaceWithAlias() throws InterruptedException, ExecutionException { assertAcked(client().admin().indices().prepareCreate("test")); indexRandom(true, client().prepareIndex("test_2", "test", "test").setSource("test", "test")); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index eb68fe17d9a..21dd76b67e6 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -24,7 +24,6 @@ import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.elasticsearch.Version; import org.elasticsearch.VersionTests; -import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -53,7 +52,6 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.OldIndexUtils; @@ -178,7 +176,8 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { public void testAllVersionsTested() throws Exception { SortedSet expectedVersions = new TreeSet<>(); for (Version v : VersionUtils.allReleasedVersions()) { - if (VersionUtils.isSnapshot(v)) continue; // snapshots are unreleased, so there is no backcompat yet + // The current version is in the "released" list even though it isn't released for historical reasons + if (v == Version.CURRENT) continue; if (v.isRelease() == false) continue; // no guarantees for prereleases if (v.before(Version.CURRENT.minimumIndexCompatibilityVersion())) continue; // we can only support one major version backward if (v.equals(Version.CURRENT)) continue; // the current version is always compatible with itself @@ -229,7 +228,6 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { // node startup upgradeIndexFolder(); importIndex(indexName); - assertBasicSearchWorks(indexName); assertAllSearchWorks(indexName); assertBasicAggregationWorks(indexName); assertRealtimeGetWorks(indexName); @@ -241,31 +239,6 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { unloadIndex(indexName); } - void assertBasicSearchWorks(String indexName) { - logger.info("--> testing basic search"); - SearchRequestBuilder searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()); - SearchResponse searchRsp = searchReq.get(); - ElasticsearchAssertions.assertNoFailures(searchRsp); - long numDocs = searchRsp.getHits().getTotalHits(); - logger.info("Found {} in old index", numDocs); - - logger.info("--> testing basic search with sort"); - searchReq.addSort("long_sort", SortOrder.ASC); - ElasticsearchAssertions.assertNoFailures(searchReq.get()); - - logger.info("--> testing exists filter"); - searchReq = client().prepareSearch(indexName).setQuery(QueryBuilders.existsQuery("string")); - searchRsp = searchReq.get(); - ElasticsearchAssertions.assertNoFailures(searchRsp); - assertEquals(numDocs, searchRsp.getHits().getTotalHits()); - GetSettingsResponse getSettingsResponse = client().admin().indices().prepareGetSettings(indexName).get(); - searchReq = client().prepareSearch(indexName) - .setQuery(QueryBuilders.existsQuery("field.with.dots")); - searchRsp = searchReq.get(); - ElasticsearchAssertions.assertNoFailures(searchRsp); - assertEquals(numDocs, searchRsp.getHits().getTotalHits()); - } - boolean findPayloadBoostInExplanation(Explanation expl) { if (expl.getDescription().startsWith("payloadBoost=") && expl.getValue() != 1f) { return true; diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java index 394f09120d3..9ee8fa654b2 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java @@ -89,7 +89,8 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { SortedSet expectedVersions = new TreeSet<>(); for (Version v : VersionUtils.allReleasedVersions()) { - if (VersionUtils.isSnapshot(v)) continue; // snapshots are unreleased, so there is no backcompat yet + // The current version is in the "released" list even though it isn't released for historical reasons + if (v == Version.CURRENT) continue; if (v.isRelease() == false) continue; // no guarantees for prereleases if (v.before(Version.CURRENT.minimumIndexCompatibilityVersion())) continue; // we only support versions N and N-1 if (v.equals(Version.CURRENT)) continue; // the current version is always compatible with itself diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index 2143e5e67d4..31e421769c2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.test.ESTestCase; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; +import java.util.List; import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.hamcrest.Matchers.arrayContaining; @@ -643,6 +644,60 @@ public class IndexNameExpressionResolverTests extends ESTestCase { assertEquals(0, indexNames.length); } + public void testConcreteIndicesWildcardAndAliases() { + MetaData.Builder mdBuilder = MetaData.builder() + .put(indexBuilder("foo_foo").state(State.OPEN).putAlias(AliasMetaData.builder("foo"))) + .put(indexBuilder("bar_bar").state(State.OPEN).putAlias(AliasMetaData.builder("foo"))); + ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); + + // when ignoreAliases option is set, concreteIndexNames resolves the provided expressions + // only against the defined indices + IndicesOptions ignoreAliasesOptions = IndicesOptions.fromOptions(false, false, true, false, true, false, true); + + String[] indexNamesIndexWildcard = indexNameExpressionResolver.concreteIndexNames(state, ignoreAliasesOptions, "foo*"); + + assertEquals(1, indexNamesIndexWildcard.length); + assertEquals("foo_foo", indexNamesIndexWildcard[0]); + + indexNamesIndexWildcard = indexNameExpressionResolver.concreteIndexNames(state, ignoreAliasesOptions, "*o"); + + assertEquals(1, indexNamesIndexWildcard.length); + assertEquals("foo_foo", indexNamesIndexWildcard[0]); + + indexNamesIndexWildcard = indexNameExpressionResolver.concreteIndexNames(state, ignoreAliasesOptions, "f*o"); + + assertEquals(1, indexNamesIndexWildcard.length); + assertEquals("foo_foo", indexNamesIndexWildcard[0]); + + IndexNotFoundException infe = expectThrows(IndexNotFoundException.class, + () -> indexNameExpressionResolver.concreteIndexNames(state, ignoreAliasesOptions, "foo")); + assertThat(infe.getIndex().getName(), equalTo("foo")); + + // when ignoreAliases option is not set, concreteIndexNames resolves the provided + // expressions against the defined indices and aliases + IndicesOptions indicesAndAliasesOptions = IndicesOptions.fromOptions(false, false, true, false, true, false, false); + + List indexNames = Arrays.asList(indexNameExpressionResolver.concreteIndexNames(state, indicesAndAliasesOptions, "foo*")); + assertEquals(2, indexNames.size()); + assertTrue(indexNames.contains("foo_foo")); + assertTrue(indexNames.contains("bar_bar")); + + indexNames = Arrays.asList(indexNameExpressionResolver.concreteIndexNames(state, indicesAndAliasesOptions, "*o")); + assertEquals(2, indexNames.size()); + assertTrue(indexNames.contains("foo_foo")); + assertTrue(indexNames.contains("bar_bar")); + + indexNames = Arrays.asList(indexNameExpressionResolver.concreteIndexNames(state, indicesAndAliasesOptions, "f*o")); + assertEquals(2, indexNames.size()); + assertTrue(indexNames.contains("foo_foo")); + assertTrue(indexNames.contains("bar_bar")); + + indexNames = Arrays.asList(indexNameExpressionResolver.concreteIndexNames(state, indicesAndAliasesOptions, "foo")); + assertEquals(2, indexNames.size()); + assertTrue(indexNames.contains("foo_foo")); + assertTrue(indexNames.contains("bar_bar")); + } + /** * test resolving _all pattern (null, empty array or "_all") for random IndicesOptions */ diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index 2778525f7da..3c8b540f45c 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData.State; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; @@ -125,6 +126,59 @@ public class WildcardExpressionResolverTests extends ESTestCase { assertThat(newHashSet(resolver.resolve(context, Arrays.asList("_all"))), equalTo(newHashSet("testXXX", "testXYY", "testYYY"))); } + public void testConcreteIndicesWildcardAndAliases() { + MetaData.Builder mdBuilder = MetaData.builder() + .put(indexBuilder("foo_foo").state(State.OPEN).putAlias(AliasMetaData.builder("foo"))) + .put(indexBuilder("bar_bar").state(State.OPEN).putAlias(AliasMetaData.builder("foo"))); + ClusterState state = ClusterState.builder(new ClusterName("_name")).metaData(mdBuilder).build(); + + // when ignoreAliases option is not set, WildcardExpressionResolver resolves the provided + // expressions against the defined indices and aliases + IndicesOptions indicesAndAliasesOptions = IndicesOptions.fromOptions(false, false, true, false, true, false, false); + IndexNameExpressionResolver.Context indicesAndAliasesContext = new IndexNameExpressionResolver.Context(state, indicesAndAliasesOptions); + + // ignoreAliases option is set, WildcardExpressionResolver resolves the provided expressions + // only against the defined indices + IndicesOptions onlyIndicesOptions = IndicesOptions.fromOptions(false, false, true, false, true, false, true); + IndexNameExpressionResolver.Context onlyIndicesContext = new IndexNameExpressionResolver.Context(state, onlyIndicesOptions); + + assertThat( + IndexNameExpressionResolver.WildcardExpressionResolver + .matches(indicesAndAliasesContext, state.getMetaData(), "*").keySet(), + equalTo(newHashSet("bar_bar", "foo_foo", "foo"))); + assertThat( + IndexNameExpressionResolver.WildcardExpressionResolver + .matches(onlyIndicesContext, state.getMetaData(), "*").keySet(), + equalTo(newHashSet("bar_bar", "foo_foo"))); + + assertThat( + IndexNameExpressionResolver.WildcardExpressionResolver + .matches(indicesAndAliasesContext, state.getMetaData(), "foo*").keySet(), + equalTo(newHashSet("foo", "foo_foo"))); + assertThat( + IndexNameExpressionResolver.WildcardExpressionResolver + .matches(onlyIndicesContext, state.getMetaData(), "foo*").keySet(), + equalTo(newHashSet("foo_foo"))); + + assertThat( + IndexNameExpressionResolver.WildcardExpressionResolver + .matches(indicesAndAliasesContext, state.getMetaData(), "f*o").keySet(), + equalTo(newHashSet("foo", "foo_foo"))); + assertThat( + IndexNameExpressionResolver.WildcardExpressionResolver + .matches(onlyIndicesContext, state.getMetaData(), "f*o").keySet(), + equalTo(newHashSet("foo_foo"))); + + assertThat( + IndexNameExpressionResolver.WildcardExpressionResolver + .matches(indicesAndAliasesContext, state.getMetaData(), "foo").keySet(), + equalTo(newHashSet("foo"))); + assertThat( + IndexNameExpressionResolver.WildcardExpressionResolver + .matches(onlyIndicesContext, state.getMetaData(), "foo").keySet(), + equalTo(newHashSet())); + } + private IndexMetaData.Builder indexBuilder(String index) { return IndexMetaData.builder(index).settings(settings(Version.CURRENT).put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); } diff --git a/core/src/test/java/org/elasticsearch/common/ExponentiallyWeightedMovingAverageTests.java b/core/src/test/java/org/elasticsearch/common/ExponentiallyWeightedMovingAverageTests.java new file mode 100644 index 00000000000..9e50d0afd71 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/ExponentiallyWeightedMovingAverageTests.java @@ -0,0 +1,66 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.lessThan; +import static org.junit.Assert.assertThat; + +/** + * Implements exponentially weighted moving averages (commonly abbreviated EWMA) for a single value. + */ +public class ExponentiallyWeightedMovingAverageTests extends ESTestCase { + + public void testEWMA() { + final ExponentiallyWeightedMovingAverage ewma = new ExponentiallyWeightedMovingAverage(0.5, 10); + ewma.addValue(12); + assertThat(ewma.getAverage(), equalTo(11.0)); + ewma.addValue(10); + ewma.addValue(15); + ewma.addValue(13); + assertThat(ewma.getAverage(), equalTo(12.875)); + } + + public void testInvalidAlpha() { + try { + ExponentiallyWeightedMovingAverage ewma = new ExponentiallyWeightedMovingAverage(-0.5, 10); + fail("should have failed to create EWMA"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("alpha must be greater or equal to 0 and less than or equal to 1")); + } + + try { + ExponentiallyWeightedMovingAverage ewma = new ExponentiallyWeightedMovingAverage(1.5, 10); + fail("should have failed to create EWMA"); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), equalTo("alpha must be greater or equal to 0 and less than or equal to 1")); + } + } + + public void testConvergingToValue() { + final ExponentiallyWeightedMovingAverage ewma = new ExponentiallyWeightedMovingAverage(0.5, 10000); + for (int i = 0; i < 100000; i++) { + ewma.addValue(1); + } + assertThat(ewma.getAverage(), lessThan(2.0)); + } +} diff --git a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index a1132647c7e..753aedea01e 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -377,7 +377,7 @@ public class LuceneTests extends ESTestCase { Weight termWeight = new TermQuery(new Term("foo", "bar")).createWeight(searcher, false, 1f); assertEquals(1, reader.leaves().size()); LeafReaderContext leafReaderContext = searcher.getIndexReader().leaves().get(0); - Bits bits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), termWeight.scorer(leafReaderContext)); + Bits bits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), termWeight.scorerSupplier(leafReaderContext)); expectThrows(IndexOutOfBoundsException.class, () -> bits.get(-1)); expectThrows(IndexOutOfBoundsException.class, () -> bits.get(leafReaderContext.reader().maxDoc())); diff --git a/core/src/test/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunctionTests.java b/core/src/test/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunctionTests.java index c375be1a328..369129826e0 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunctionTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunctionTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.lucene.search.function; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.script.GeneralScriptException; -import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.SearchScript; import org.elasticsearch.test.ESTestCase; @@ -33,10 +32,15 @@ public class ScriptScoreFunctionTests extends ESTestCase { */ public void testScriptScoresReturnsNaN() throws IOException { // script that always returns NaN - ScoreFunction scoreFunction = new ScriptScoreFunction(mockScript("Double.NaN"), new SearchScript() { + ScoreFunction scoreFunction = new ScriptScoreFunction(mockScript("Double.NaN"), new SearchScript.LeafFactory() { @Override - public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException { - return () -> Double.NaN; + public SearchScript newInstance(LeafReaderContext context) throws IOException { + return new SearchScript(null, null, null) { + @Override + public double runAsDouble() { + return Double.NaN; + } + }; } @Override diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java index e393fee4f23..81e685e6852 100644 --- a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -101,6 +101,10 @@ public class NetworkModuleTests extends ModuleTestCase { } @Override public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { return channel -> {}; } + @Override + public String getName() { + return "FakeRestHandler"; + } } static class FakeCatRestHandler extends AbstractCatAction { @@ -115,6 +119,10 @@ public class NetworkModuleTests extends ModuleTestCase { protected Table getTableWithHeader(RestRequest request) { return null; } + @Override + public String getName() { + return "FakeCatRestHandler"; + } } public void testRegisterTransport() { diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java index 80ca8cc275a..9fbad982bdb 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -38,6 +38,7 @@ import java.util.Set; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; @@ -515,6 +516,39 @@ public class SettingsTests extends ESTestCase { expectThrows(NoSuchElementException.class, () -> prefixIterator.next()); } + public void testSecureSettingsPrefix() { + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("test.prefix.foo", "somethingsecure"); + Settings.Builder builder = Settings.builder(); + builder.setSecureSettings(secureSettings); + Settings settings = builder.build(); + Settings prefixSettings = settings.getByPrefix("test.prefix."); + assertTrue(prefixSettings.names().contains("foo")); + } + + public void testGroupPrefix() { + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("test.key1.foo", "somethingsecure"); + secureSettings.setString("test.key1.bar", "somethingsecure"); + secureSettings.setString("test.key2.foo", "somethingsecure"); + secureSettings.setString("test.key2.bog", "somethingsecure"); + Settings.Builder builder = Settings.builder(); + builder.put("test.key1.baz", "blah1"); + builder.put("test.key1.other", "blah2"); + builder.put("test.key2.baz", "blah3"); + builder.put("test.key2.else", "blah4"); + builder.setSecureSettings(secureSettings); + Settings settings = builder.build(); + Map groups = settings.getGroups("test"); + assertEquals(2, groups.size()); + Settings key1 = groups.get("key1"); + assertNotNull(key1); + assertThat(key1.names(), containsInAnyOrder("foo", "bar", "baz", "other")); + Settings key2 = groups.get("key2"); + assertNotNull(key2); + assertThat(key2.names(), containsInAnyOrder("foo", "bog", "baz", "else")); + } + public void testEmptyFilterMap() { Settings.Builder builder = Settings.builder(); builder.put("a", "a1"); diff --git a/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java b/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java index 301f48f9b04..945dda446ce 100644 --- a/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/BigArraysTests.java @@ -33,6 +33,11 @@ import org.junit.Before; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Arrays; +import java.util.List; +import java.util.function.Function; + +import static org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; public class BigArraysTests extends ESTestCase { @@ -330,22 +335,17 @@ public class BigArraysTests extends ESTestCase { } public void testMaxSizeExceededOnNew() throws Exception { - final int size = scaledRandomIntBetween(5, 1 << 22); - for (String type : Arrays.asList("Byte", "Int", "Long", "Float", "Double", "Object")) { - HierarchyCircuitBreakerService hcbs = new HierarchyCircuitBreakerService( - Settings.builder() - .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), size - 1, ByteSizeUnit.BYTES) - .build(), - new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); - BigArrays bigArrays = new BigArrays(null, hcbs, false).withCircuitBreaking(); - Method create = BigArrays.class.getMethod("new" + type + "Array", long.class); + final long size = scaledRandomIntBetween(5, 1 << 22); + final long maxSize = size - 1; + for (BigArraysHelper bigArraysHelper : bigArrayCreators(maxSize, true)) { try { - create.invoke(bigArrays, size); - fail("expected an exception on " + create); - } catch (InvocationTargetException e) { - assertTrue(e.getCause() instanceof CircuitBreakingException); + bigArraysHelper.arrayAllocator.apply(size); + fail("circuit breaker should trip"); + } catch (CircuitBreakingException e) { + assertEquals(maxSize, e.getByteLimit()); + assertThat(e.getBytesWanted(), greaterThanOrEqualTo(size)); } - assertEquals(0, hcbs.getBreaker(CircuitBreaker.REQUEST).getUsed()); + assertEquals(0, bigArraysHelper.bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed()); } } @@ -354,7 +354,7 @@ public class BigArraysTests extends ESTestCase { final long maxSize = randomIntBetween(1 << 10, 1 << 22); HierarchyCircuitBreakerService hcbs = new HierarchyCircuitBreakerService( Settings.builder() - .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), maxSize, ByteSizeUnit.BYTES) + .put(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), maxSize, ByteSizeUnit.BYTES) .build(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); BigArrays bigArrays = new BigArrays(null, hcbs, false).withCircuitBreaking(); @@ -377,4 +377,63 @@ public class BigArraysTests extends ESTestCase { } } + public void testEstimatedBytesSameAsActualBytes() throws Exception { + final int maxSize = 1 << scaledRandomIntBetween(15, 22); + final long size = randomIntBetween((1 << 14) + 1, maxSize); + for (final BigArraysHelper bigArraysHelper : bigArrayCreators(maxSize, false)) { + final BigArray bigArray = bigArraysHelper.arrayAllocator.apply(size); + assertEquals(bigArraysHelper.ramEstimator.apply(size).longValue(), bigArray.ramBytesUsed()); + } + } + + private List bigArrayCreators(final long maxSize, final boolean withBreaking) { + final BigArrays byteBigArrays = newBigArraysInstance(maxSize, withBreaking); + BigArraysHelper byteHelper = new BigArraysHelper(byteBigArrays, + (Long size) -> byteBigArrays.newByteArray(size), + (Long size) -> BigByteArray.estimateRamBytes(size)); + final BigArrays intBigArrays = newBigArraysInstance(maxSize, withBreaking); + BigArraysHelper intHelper = new BigArraysHelper(intBigArrays, + (Long size) -> intBigArrays.newIntArray(size), + (Long size) -> BigIntArray.estimateRamBytes(size)); + final BigArrays longBigArrays = newBigArraysInstance(maxSize, withBreaking); + BigArraysHelper longHelper = new BigArraysHelper(longBigArrays, + (Long size) -> longBigArrays.newLongArray(size), + (Long size) -> BigLongArray.estimateRamBytes(size)); + final BigArrays floatBigArrays = newBigArraysInstance(maxSize, withBreaking); + BigArraysHelper floatHelper = new BigArraysHelper(floatBigArrays, + (Long size) -> floatBigArrays.newFloatArray(size), + (Long size) -> BigFloatArray.estimateRamBytes(size)); + final BigArrays doubleBigArrays = newBigArraysInstance(maxSize, withBreaking); + BigArraysHelper doubleHelper = new BigArraysHelper(doubleBigArrays, + (Long size) -> doubleBigArrays.newDoubleArray(size), + (Long size) -> BigDoubleArray.estimateRamBytes(size)); + final BigArrays objectBigArrays = newBigArraysInstance(maxSize, withBreaking); + BigArraysHelper objectHelper = new BigArraysHelper(objectBigArrays, + (Long size) -> objectBigArrays.newObjectArray(size), + (Long size) -> BigObjectArray.estimateRamBytes(size)); + return Arrays.asList(byteHelper, intHelper, longHelper, floatHelper, doubleHelper, objectHelper); + } + + private BigArrays newBigArraysInstance(final long maxSize, final boolean withBreaking) { + HierarchyCircuitBreakerService hcbs = new HierarchyCircuitBreakerService( + Settings.builder() + .put(REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), maxSize, ByteSizeUnit.BYTES) + .build(), + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + BigArrays bigArrays = new BigArrays(null, hcbs, false); + return (withBreaking ? bigArrays.withCircuitBreaking() : bigArrays); + } + + private static class BigArraysHelper { + final BigArrays bigArrays; + final Function arrayAllocator; + final Function ramEstimator; + + BigArraysHelper(BigArrays bigArrays, Function arrayAllocator, Function ramEstimator) { + this.bigArrays = bigArrays; + this.arrayAllocator = arrayAllocator; + this.ramEstimator = ramEstimator; + } + } + } diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutorTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutorTests.java index 82b67806b79..5365e1bb909 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutorTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutorTests.java @@ -184,6 +184,47 @@ public class QueueResizingEsThreadPoolExecutorTests extends ESTestCase { context.close(); } + public void testExecutionEWMACalculation() throws Exception { + ThreadContext context = new ThreadContext(Settings.EMPTY); + ResizableBlockingQueue queue = + new ResizableBlockingQueue<>(ConcurrentCollections.newBlockingQueue(), + 100); + + QueueResizingEsThreadPoolExecutor executor = + new QueueResizingEsThreadPoolExecutor( + "test-threadpool", 1, 1, 1000, + TimeUnit.MILLISECONDS, queue, 10, 200, fastWrapper(), 10, TimeValue.timeValueMillis(1), + EsExecutors.daemonThreadFactory("queuetest"), new EsAbortPolicy(), context); + executor.prestartAllCoreThreads(); + logger.info("--> executor: {}", executor); + + assertThat((long)executor.getTaskExecutionEWMA(), equalTo(1000000L)); + executeTask(executor, 1); + assertBusy(() -> { + assertThat((long)executor.getTaskExecutionEWMA(), equalTo(700030L)); + }); + executeTask(executor, 1); + assertBusy(() -> { + assertThat((long)executor.getTaskExecutionEWMA(), equalTo(490050L)); + }); + executeTask(executor, 1); + assertBusy(() -> { + assertThat((long)executor.getTaskExecutionEWMA(), equalTo(343065L)); + }); + executeTask(executor, 1); + assertBusy(() -> { + assertThat((long)executor.getTaskExecutionEWMA(), equalTo(240175L)); + }); + executeTask(executor, 1); + assertBusy(() -> { + assertThat((long)executor.getTaskExecutionEWMA(), equalTo(168153L)); + }); + + executor.shutdown(); + executor.awaitTermination(10, TimeUnit.SECONDS); + context.close(); + } + private Function randomBetweenLimitsWrapper(final int minNs, final int maxNs) { return (runnable) -> { return new SettableTimedRunnable(randomIntBetween(minNs, maxNs)); @@ -222,5 +263,10 @@ public class QueueResizingEsThreadPoolExecutorTests extends ESTestCase { public long getTotalNanos() { return timeTaken; } + + @Override + public long getTotalExecutionNanos() { + return timeTaken; + } } } diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index deb6b536e9d..74436b39378 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -120,7 +120,7 @@ public class IndexModuleTests extends ESTestCase { index = indexSettings.getIndex(); environment = new Environment(settings); emptyAnalysisRegistry = new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), - emptyMap(), emptyMap()); + emptyMap(), emptyMap(), emptyMap()); threadPool = new TestThreadPool("test"); circuitBreakerService = new NoneCircuitBreakerService(); bigArrays = new BigArrays(settings, circuitBreakerService); diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java index 57ef842072a..9303159c265 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java @@ -57,7 +57,7 @@ public class AnalysisRegistryTests extends ESTestCase { private static AnalysisRegistry emptyAnalysisRegistry(Settings settings) { return new AnalysisRegistry(new Environment(settings), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), - emptyMap()); + emptyMap(), emptyMap()); } private static IndexSettings indexSettingsOfCurrentVersion(Settings.Builder settings) { diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java index a818d9c7178..66b28ec419a 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java @@ -32,6 +32,7 @@ import java.io.IOException; import java.io.Reader; import java.util.List; import java.util.Map; +import java.util.function.Function; import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; @@ -101,12 +102,12 @@ public class CustomNormalizerTests extends ESTokenStreamTestCase { public void testIllegalCharFilters() throws IOException { Settings settings = Settings.builder() - .putArray("index.analysis.normalizer.my_normalizer.char_filter", "html_strip") + .putArray("index.analysis.normalizer.my_normalizer.char_filter", "mock_forbidden") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings)); - assertEquals("Custom normalizer [my_normalizer] may not use char filter [html_strip]", e.getMessage()); + () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, MOCK_ANALYSIS_PLUGIN)); + assertEquals("Custom normalizer [my_normalizer] may not use char filter [mock_forbidden]", e.getMessage()); } private static class MockAnalysisPlugin implements AnalysisPlugin { @@ -115,6 +116,11 @@ public class CustomNormalizerTests extends ESTokenStreamTestCase { return singletonList(PreConfiguredTokenFilter.singleton("mock_forbidden", false, MockLowerCaseFilter::new)); } + @Override + public List getPreConfiguredCharFilters() { + return singletonList(PreConfiguredCharFilter.singleton("mock_forbidden", false, Function.identity())); + } + @Override public Map> getCharFilters() { return singletonMap("mock_char_filter", (indexSettings, env, name, settings) -> { diff --git a/core/src/test/java/org/elasticsearch/index/analysis/PatternAnalyzerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/PatternAnalyzerTests.java index ab0a24d9dd8..d80cbf66c34 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/PatternAnalyzerTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/PatternAnalyzerTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.analysis; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.core.StopAnalyzer; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.test.ESTokenStreamTestCase; import java.io.IOException; @@ -111,4 +112,11 @@ public class PatternAnalyzerTests extends ESTokenStreamTestCase { Analyzer a = new PatternAnalyzer(Pattern.compile(","), true, StopAnalyzer.ENGLISH_STOP_WORDS_SET); checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER); } + + public void testNormalize() { + PatternAnalyzer a = new PatternAnalyzer(Pattern.compile("\\s+"), false, null); + assertEquals(new BytesRef("FooBar"), a.normalize("dummy", "FooBar")); + a = new PatternAnalyzer(Pattern.compile("\\s+"), true, null); + assertEquals(new BytesRef("foobar"), a.normalize("dummy", "FooBar")); + } } diff --git a/core/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java b/core/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java new file mode 100644 index 00000000000..d21273a7b03 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/engine/CombinedDeletionPolicyTests.java @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.engine; + +import org.apache.lucene.index.IndexCommit; +import org.apache.lucene.index.SnapshotDeletionPolicy; +import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.index.translog.TranslogDeletionPolicy; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class CombinedDeletionPolicyTests extends ESTestCase { + + public void testPassThrough() throws IOException { + SnapshotDeletionPolicy indexDeletionPolicy = mock(SnapshotDeletionPolicy.class); + CombinedDeletionPolicy combinedDeletionPolicy = new CombinedDeletionPolicy(indexDeletionPolicy, new TranslogDeletionPolicy(), + EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG); + List commitList = new ArrayList<>(); + long count = randomIntBetween(1, 3); + for (int i = 0; i < count; i++) { + commitList.add(mockIndexCommitWithTranslogGen(randomNonNegativeLong())); + } + combinedDeletionPolicy.onInit(commitList); + verify(indexDeletionPolicy, times(1)).onInit(commitList); + combinedDeletionPolicy.onCommit(commitList); + verify(indexDeletionPolicy, times(1)).onCommit(commitList); + } + + public void testSettingMinTranslogGen() throws IOException { + SnapshotDeletionPolicy indexDeletionPolicy = mock(SnapshotDeletionPolicy.class); + final TranslogDeletionPolicy translogDeletionPolicy = mock(TranslogDeletionPolicy.class); + CombinedDeletionPolicy combinedDeletionPolicy = new CombinedDeletionPolicy(indexDeletionPolicy, translogDeletionPolicy, + EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG); + List commitList = new ArrayList<>(); + long count = randomIntBetween(10, 20); + long lastGen = 0; + for (int i = 0; i < count; i++) { + lastGen += randomIntBetween(10, 20000); + commitList.add(mockIndexCommitWithTranslogGen(lastGen)); + } + combinedDeletionPolicy.onInit(commitList); + verify(translogDeletionPolicy, times(1)).setMinTranslogGenerationForRecovery(lastGen); + commitList.clear(); + for (int i = 0; i < count; i++) { + lastGen += randomIntBetween(10, 20000); + commitList.add(mockIndexCommitWithTranslogGen(lastGen)); + } + combinedDeletionPolicy.onCommit(commitList); + verify(translogDeletionPolicy, times(1)).setMinTranslogGenerationForRecovery(lastGen); + } + + IndexCommit mockIndexCommitWithTranslogGen(long gen) throws IOException { + IndexCommit commit = mock(IndexCommit.class); + when(commit.getUserData()).thenReturn(Collections.singletonMap(Translog.TRANSLOG_GENERATION_KEY, Long.toString(gen))); + return commit; + } +} diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 8f97da31a6b..16e746a67f7 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -119,13 +119,14 @@ import org.elasticsearch.index.seqno.SequenceNumbersService; import org.elasticsearch.index.shard.IndexSearcherWrapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardUtils; -import org.elasticsearch.index.shard.TranslogRecoveryPerformer; +import org.elasticsearch.index.shard.TranslogOpToEngineOpConverter; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.DirectoryUtils; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; +import org.elasticsearch.index.translog.TranslogDeletionPolicy; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.test.DummyShardLock; @@ -150,6 +151,7 @@ import java.util.Arrays; import java.util.Base64; import java.util.Collections; import java.util.Comparator; +import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; @@ -165,11 +167,13 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiFunction; +import java.util.function.Function; import java.util.function.LongSupplier; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.LongStream; +import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.shuffle; import static org.elasticsearch.index.engine.Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY; @@ -220,12 +224,12 @@ public class InternalEngineTests extends ESTestCase { codecName = "default"; } defaultSettings = IndexSettingsModule.newIndexSettings("test", Settings.builder() - .put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), "1h") // make sure this doesn't kick in on us - .put(EngineConfig.INDEX_CODEC_SETTING.getKey(), codecName) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexSettings.MAX_REFRESH_LISTENERS_PER_SHARD.getKey(), - between(10, 10 * IndexSettings.MAX_REFRESH_LISTENERS_PER_SHARD.get(Settings.EMPTY))) - .build()); // TODO randomize more settings + .put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), "1h") // make sure this doesn't kick in on us + .put(EngineConfig.INDEX_CODEC_SETTING.getKey(), codecName) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexSettings.MAX_REFRESH_LISTENERS_PER_SHARD.getKey(), + between(10, 10 * IndexSettings.MAX_REFRESH_LISTENERS_PER_SHARD.get(Settings.EMPTY))) + .build()); // TODO randomize more settings threadPool = new TestThreadPool(getClass().getName()); store = createStore(); storeReplica = createStore(); @@ -258,9 +262,9 @@ public class InternalEngineTests extends ESTestCase { public EngineConfig copy(EngineConfig config, EngineConfig.OpenMode openMode, Analyzer analyzer) { return new EngineConfig(openMode, config.getShardId(), config.getThreadPool(), config.getIndexSettings(), config.getWarmer(), config.getStore(), config.getMergePolicy(), analyzer, config.getSimilarity(), - new CodecService(null, logger), config.getEventListener(), config.getTranslogRecoveryPerformer(), config.getQueryCache(), - config.getQueryCachingPolicy(), config.getTranslogConfig(), config.getFlushMergesAfter(), config.getRefreshListeners(), - config.getIndexSort()); + new CodecService(null, logger), config.getEventListener(), config.getQueryCache(), + config.getQueryCachingPolicy(), config.getTranslogConfig(), + config.getFlushMergesAfter(), config.getRefreshListeners(), config.getIndexSort(), config.getTranslogRecoveryRunner()); } @Override @@ -268,14 +272,14 @@ public class InternalEngineTests extends ESTestCase { public void tearDown() throws Exception { super.tearDown(); IOUtils.close( - replicaEngine, storeReplica, - engine, store); + replicaEngine, storeReplica, + engine, store); terminate(threadPool); } private static Document testDocumentWithTextField() { - return testDocumentWithTextField("test"); + return testDocumentWithTextField("test"); } private static Document testDocumentWithTextField(String value) { @@ -315,6 +319,7 @@ public class InternalEngineTests extends ESTestCase { protected Store createStore(final Directory directory) throws IOException { return createStore(INDEX_SETTINGS, directory); } + protected Store createStore(final IndexSettings indexSettings, final Directory directory) throws IOException { final DirectoryService directoryService = new DirectoryService(shardId, indexSettings) { @Override @@ -331,17 +336,23 @@ public class InternalEngineTests extends ESTestCase { protected Translog createTranslog(Path translogPath) throws IOException { TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, INDEX_SETTINGS, BigArrays.NON_RECYCLING_INSTANCE); - return new Translog(translogConfig, null, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); + return new Translog(translogConfig, null, new TranslogDeletionPolicy(), () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); } protected InternalEngine createEngine(Store store, Path translogPath) throws IOException { return createEngine(defaultSettings, store, translogPath, newMergePolicy(), null); } + protected InternalEngine createEngine(Store store, Path translogPath, + Function sequenceNumbersServiceSupplier) throws IOException { + return createEngine(defaultSettings, store, translogPath, newMergePolicy(), null, sequenceNumbersServiceSupplier); + } + protected InternalEngine createEngine(IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy) throws IOException { return createEngine(indexSettings, store, translogPath, mergePolicy, null); } + protected InternalEngine createEngine(IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy, @Nullable IndexWriterFactory indexWriterFactory) throws IOException { return createEngine(indexSettings, store, translogPath, mergePolicy, indexWriterFactory, null); @@ -353,7 +364,7 @@ public class InternalEngineTests extends ESTestCase { Path translogPath, MergePolicy mergePolicy, @Nullable IndexWriterFactory indexWriterFactory, - @Nullable Supplier sequenceNumbersServiceSupplier) throws IOException { + @Nullable Function sequenceNumbersServiceSupplier) throws IOException { return createEngine(indexSettings, store, translogPath, mergePolicy, indexWriterFactory, sequenceNumbersServiceSupplier, null); } @@ -363,7 +374,7 @@ public class InternalEngineTests extends ESTestCase { Path translogPath, MergePolicy mergePolicy, @Nullable IndexWriterFactory indexWriterFactory, - @Nullable Supplier sequenceNumbersServiceSupplier, + @Nullable Function sequenceNumbersServiceSupplier, @Nullable Sort indexSort) throws IOException { EngineConfig config = config(indexSettings, store, translogPath, mergePolicy, null, indexSort); InternalEngine internalEngine = createInternalEngine(indexWriterFactory, sequenceNumbersServiceSupplier, config); @@ -380,19 +391,19 @@ public class InternalEngineTests extends ESTestCase { } public static InternalEngine createInternalEngine(@Nullable final IndexWriterFactory indexWriterFactory, - @Nullable final Supplier sequenceNumbersServiceSupplier, + @Nullable final Function sequenceNumbersServiceSupplier, final EngineConfig config) { return new InternalEngine(config) { - @Override - IndexWriter createWriter(Directory directory, IndexWriterConfig iwc) throws IOException { - return (indexWriterFactory != null) ? - indexWriterFactory.createWriter(directory, iwc) : - super.createWriter(directory, iwc); - } + @Override + IndexWriter createWriter(Directory directory, IndexWriterConfig iwc) throws IOException { + return (indexWriterFactory != null) ? + indexWriterFactory.createWriter(directory, iwc) : + super.createWriter(directory, iwc); + } @Override public SequenceNumbersService seqNoService() { - return (sequenceNumbersServiceSupplier != null) ? sequenceNumbersServiceSupplier.get() : super.seqNoService(); + return (sequenceNumbersServiceSupplier != null) ? sequenceNumbersServiceSupplier.apply(config) : super.seqNoService(); } }; } @@ -422,11 +433,14 @@ public class InternalEngineTests extends ESTestCase { // we don't need to notify anybody in this test } }; + final TranslogHandler handler = new TranslogHandler(xContentRegistry(), IndexSettingsModule.newIndexSettings(shardId.getIndexName(), + indexSettings.getSettings())); + final List refreshListenerList = + refreshListener == null ? emptyList() : Collections.singletonList(refreshListener); EngineConfig config = new EngineConfig(openMode, shardId, threadPool, indexSettings, null, store, mergePolicy, iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(null, logger), listener, - new TranslogHandler(xContentRegistry(), shardId.getIndexName(), indexSettings.getSettings(), logger), - IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, - TimeValue.timeValueMinutes(5), refreshListener, indexSort); + IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, + TimeValue.timeValueMinutes(5), refreshListenerList, indexSort, handler); return config; } @@ -442,7 +456,7 @@ public class InternalEngineTests extends ESTestCase { public void testSegments() throws Exception { try (Store store = createStore(); - Engine engine = createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE)) { + Engine engine = createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE)) { List segments = engine.segments(false); assertThat(segments.isEmpty(), equalTo(true)); assertThat(engine.segmentsStats(false).getCount(), equalTo(0L)); @@ -591,7 +605,7 @@ public class InternalEngineTests extends ESTestCase { public void testSegmentsWithMergeFlag() throws Exception { try (Store store = createStore(); - Engine engine = createEngine(defaultSettings, store, createTempDir(), new TieredMergePolicy())) { + Engine engine = createEngine(defaultSettings, store, createTempDir(), new TieredMergePolicy())) { ParsedDocument doc = testParsedDocument("1", null, testDocument(), B_1, null); Engine.Index index = indexForDoc(doc); engine.index(index); @@ -674,7 +688,7 @@ public class InternalEngineTests extends ESTestCase { public void testSegmentsStatsIncludingFileSizes() throws Exception { try (Store store = createStore(); - Engine engine = createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE)) { + Engine engine = createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE)) { assertThat(engine.segmentsStats(true).getFileSizes().size(), equalTo(0)); ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null); @@ -696,25 +710,18 @@ public class InternalEngineTests extends ESTestCase { } public void testCommitStats() throws IOException { - InternalEngine engine = null; - try { - this.engine.close(); - - final AtomicLong maxSeqNo = new AtomicLong(SequenceNumbersService.NO_OPS_PERFORMED); - final AtomicLong localCheckpoint = new AtomicLong(SequenceNumbersService.NO_OPS_PERFORMED); - final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbersService.UNASSIGNED_SEQ_NO); - - engine = new InternalEngine(copy(this.engine.config(), this.engine.config().getOpenMode())) { - @Override - public SequenceNumbersService seqNoService() { - return new SequenceNumbersService( - this.config().getShardId(), - this.config().getIndexSettings(), + final AtomicLong maxSeqNo = new AtomicLong(SequenceNumbersService.NO_OPS_PERFORMED); + final AtomicLong localCheckpoint = new AtomicLong(SequenceNumbersService.NO_OPS_PERFORMED); + final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbersService.UNASSIGNED_SEQ_NO); + try ( + Store store = createStore(); + InternalEngine engine = createEngine(store, createTempDir(), (config) -> new SequenceNumbersService( + config.getShardId(), + config.getIndexSettings(), maxSeqNo.get(), localCheckpoint.get(), - globalCheckpoint.get()); - } - }; + globalCheckpoint.get()) + )) { CommitStats stats1 = engine.commitStats(); assertThat(stats1.getGeneration(), greaterThan(0L)); assertThat(stats1.getId(), notNullValue()); @@ -751,8 +758,6 @@ public class InternalEngineTests extends ESTestCase { assertThat(Long.parseLong(stats2.getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)), equalTo(localCheckpoint.get())); assertThat(stats2.getUserData(), hasKey(SequenceNumbers.MAX_SEQ_NO)); assertThat(Long.parseLong(stats2.getUserData().get(SequenceNumbers.MAX_SEQ_NO)), equalTo(maxSeqNo.get())); - } finally { - IOUtils.close(engine); } } @@ -877,26 +882,24 @@ public class InternalEngineTests extends ESTestCase { final int docs = randomIntBetween(1, 4096); final List seqNos = LongStream.range(0, docs).boxed().collect(Collectors.toList()); Randomness.shuffle(seqNos); - engine.close(); Engine initialEngine = null; + Engine recoveringEngine = null; + Store store = createStore(); + final AtomicInteger counter = new AtomicInteger(); try { - final AtomicInteger counter = new AtomicInteger(); - initialEngine = new InternalEngine(copy(engine.config(), EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG)) { - @Override - public SequenceNumbersService seqNoService() { - return new SequenceNumbersService( - engine.shardId, - engine.config().getIndexSettings(), - SequenceNumbersService.NO_OPS_PERFORMED, - SequenceNumbersService.NO_OPS_PERFORMED, - SequenceNumbersService.UNASSIGNED_SEQ_NO) { - @Override - public long generateSeqNo() { - return seqNos.get(counter.getAndIncrement()); - } - }; + initialEngine = createEngine(store, createTempDir(), (config) -> + new SequenceNumbersService( + config.getShardId(), + config.getIndexSettings(), + SequenceNumbersService.NO_OPS_PERFORMED, + SequenceNumbersService.NO_OPS_PERFORMED, + SequenceNumbersService.UNASSIGNED_SEQ_NO) { + @Override + public long generateSeqNo() { + return seqNos.get(counter.getAndIncrement()); + } } - }; + ); for (int i = 0; i < docs; i++) { final String id = Integer.toString(i); final ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null); @@ -907,12 +910,7 @@ public class InternalEngineTests extends ESTestCase { initialEngine.flush(); } } - } finally { - IOUtils.close(initialEngine); - } - - Engine recoveringEngine = null; - try { + initialEngine.close(); recoveringEngine = new InternalEngine(copy(initialEngine.config(), EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG)); recoveringEngine.recoverFromTranslog(); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { @@ -920,9 +918,8 @@ public class InternalEngineTests extends ESTestCase { assertEquals(docs, topDocs.totalHits); } } finally { - IOUtils.close(recoveringEngine); + IOUtils.close(initialEngine, recoveringEngine, store); } - } public void testConcurrentGetAndFlush() throws Exception { @@ -930,7 +927,8 @@ public class InternalEngineTests extends ESTestCase { engine.index(indexForDoc(doc)); final AtomicReference latestGetResult = new AtomicReference<>(); - latestGetResult.set(engine.get(newGet(true, doc))); + final Function searcherFactory = engine::acquireSearcher; + latestGetResult.set(engine.get(newGet(true, doc), searcherFactory)); final AtomicBoolean flushFinished = new AtomicBoolean(false); final CyclicBarrier barrier = new CyclicBarrier(2); Thread getThread = new Thread(() -> { @@ -944,7 +942,7 @@ public class InternalEngineTests extends ESTestCase { if (previousGetResult != null) { previousGetResult.release(); } - latestGetResult.set(engine.get(newGet(true, doc))); + latestGetResult.set(engine.get(newGet(true, doc), searcherFactory)); if (latestGetResult.get().exists() == false) { break; } @@ -964,6 +962,8 @@ public class InternalEngineTests extends ESTestCase { MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0)); searchResult.close(); + final Function searcherFactory = engine::acquireSearcher; + // create a document Document document = testDocumentWithTextField(); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); @@ -977,12 +977,12 @@ public class InternalEngineTests extends ESTestCase { searchResult.close(); // but, not there non realtime - Engine.GetResult getResult = engine.get(newGet(false, doc)); + Engine.GetResult getResult = engine.get(newGet(false, doc), searcherFactory); assertThat(getResult.exists(), equalTo(false)); getResult.release(); // but, we can still get it (in realtime) - getResult = engine.get(newGet(true, doc)); + getResult = engine.get(newGet(true, doc), searcherFactory); assertThat(getResult.exists(), equalTo(true)); assertThat(getResult.docIdAndVersion(), notNullValue()); getResult.release(); @@ -997,7 +997,7 @@ public class InternalEngineTests extends ESTestCase { searchResult.close(); // also in non realtime - getResult = engine.get(newGet(false, doc)); + getResult = engine.get(newGet(false, doc), searcherFactory); assertThat(getResult.exists(), equalTo(true)); assertThat(getResult.docIdAndVersion(), notNullValue()); getResult.release(); @@ -1017,7 +1017,7 @@ public class InternalEngineTests extends ESTestCase { searchResult.close(); // but, we can still get it (in realtime) - getResult = engine.get(newGet(true, doc)); + getResult = engine.get(newGet(true, doc), searcherFactory); assertThat(getResult.exists(), equalTo(true)); assertThat(getResult.docIdAndVersion(), notNullValue()); getResult.release(); @@ -1042,7 +1042,7 @@ public class InternalEngineTests extends ESTestCase { searchResult.close(); // but, get should not see it (in realtime) - getResult = engine.get(newGet(true, doc)); + getResult = engine.get(newGet(true, doc), searcherFactory); assertThat(getResult.exists(), equalTo(false)); getResult.release(); @@ -1082,7 +1082,7 @@ public class InternalEngineTests extends ESTestCase { engine.flush(); // and, verify get (in real time) - getResult = engine.get(newGet(true, doc)); + getResult = engine.get(newGet(true, doc), searcherFactory); assertThat(getResult.exists(), equalTo(true)); assertThat(getResult.docIdAndVersion(), notNullValue()); getResult.release(); @@ -1148,9 +1148,23 @@ public class InternalEngineTests extends ESTestCase { searchResult.close(); } + public void testCommitAdvancesMinTranslogForRecovery() throws IOException { + ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null); + engine.index(indexForDoc(doc)); + engine.flush(); + assertThat(engine.getTranslog().currentFileGeneration(), equalTo(2L)); + assertThat(engine.getTranslog().getDeletionPolicy().getMinTranslogGenerationForRecovery(), equalTo(2L)); + engine.flush(); + assertThat(engine.getTranslog().currentFileGeneration(), equalTo(2L)); + assertThat(engine.getTranslog().getDeletionPolicy().getMinTranslogGenerationForRecovery(), equalTo(2L)); + engine.flush(true, true); + assertThat(engine.getTranslog().currentFileGeneration(), equalTo(3L)); + assertThat(engine.getTranslog().getDeletionPolicy().getMinTranslogGenerationForRecovery(), equalTo(3L)); + } + public void testSyncedFlush() throws IOException { try (Store store = createStore(); - Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), new LogByteSizeMergePolicy(), null))) { + Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), new LogByteSizeMergePolicy(), null))) { final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20); ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null); engine.index(indexForDoc(doc)); @@ -1160,13 +1174,13 @@ public class InternalEngineTests extends ESTestCase { wrongBytes[0] = (byte) ~wrongBytes[0]; Engine.CommitId wrongId = new Engine.CommitId(wrongBytes); assertEquals("should fail to sync flush with wrong id (but no docs)", engine.syncFlush(syncId + "1", wrongId), - Engine.SyncedFlushResult.COMMIT_MISMATCH); + Engine.SyncedFlushResult.COMMIT_MISMATCH); engine.index(indexForDoc(doc)); assertEquals("should fail to sync flush with right id but pending doc", engine.syncFlush(syncId + "2", commitID), - Engine.SyncedFlushResult.PENDING_OPERATIONS); + Engine.SyncedFlushResult.PENDING_OPERATIONS); commitID = engine.flush(); assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID), - Engine.SyncedFlushResult.SUCCESS); + Engine.SyncedFlushResult.SUCCESS); assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId); assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId); } @@ -1177,7 +1191,7 @@ public class InternalEngineTests extends ESTestCase { for (int i = 0; i < iters; i++) { try (Store store = createStore(); InternalEngine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), - new LogDocMergePolicy(), null))) { + new LogDocMergePolicy(), null))) { final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20); Engine.Index doc1 = indexForDoc(testParsedDocument("1", null, testDocumentWithTextField(), B_1, null)); engine.index(doc1); @@ -1196,7 +1210,7 @@ public class InternalEngineTests extends ESTestCase { } Engine.CommitId commitID = engine.flush(); assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID), - Engine.SyncedFlushResult.SUCCESS); + Engine.SyncedFlushResult.SUCCESS); assertEquals(3, engine.segments(false).size()); engine.forceMerge(forceMergeFlushes, 1, false, false, false); @@ -1236,7 +1250,7 @@ public class InternalEngineTests extends ESTestCase { engine.index(indexForDoc(doc)); final Engine.CommitId commitID = engine.flush(); assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID), - Engine.SyncedFlushResult.SUCCESS); + Engine.SyncedFlushResult.SUCCESS); assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId); assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId); EngineConfig config = engine.config(); @@ -1259,7 +1273,7 @@ public class InternalEngineTests extends ESTestCase { engine.index(indexForDoc(doc)); final Engine.CommitId commitID = engine.flush(); assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID), - Engine.SyncedFlushResult.SUCCESS); + Engine.SyncedFlushResult.SUCCESS); assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId); assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId); doc = testParsedDocument("2", null, testDocumentWithTextField(), new BytesArray("{}"), null); @@ -1295,8 +1309,8 @@ public class InternalEngineTests extends ESTestCase { public void testForceMerge() throws IOException { try (Store store = createStore(); - Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), - new LogByteSizeMergePolicy(), null))) { // use log MP here we test some behavior in ESMP + Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), + new LogByteSizeMergePolicy(), null))) { // use log MP here we test some behavior in ESMP int numDocs = randomIntBetween(10, 100); for (int i = 0; i < numDocs; i++) { ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null); @@ -1410,7 +1424,7 @@ public class InternalEngineTests extends ESTestCase { final Term id = newUid("1"); final int startWithSeqNo; if (partialOldPrimary) { - startWithSeqNo = randomBoolean() ? numOfOps - 1 : randomIntBetween(0, numOfOps - 1); + startWithSeqNo = randomBoolean() ? numOfOps - 1 : randomIntBetween(0, numOfOps - 1); } else { startWithSeqNo = 0; } @@ -1529,7 +1543,8 @@ public class InternalEngineTests extends ESTestCase { } if (randomBoolean()) { engine.refresh("test"); - } if (randomBoolean()) { + } + if (randomBoolean()) { engine.flush(); } firstOp = false; @@ -1586,9 +1601,9 @@ public class InternalEngineTests extends ESTestCase { try { final Engine.Operation op = ops.get(docOffset); if (op instanceof Engine.Index) { - engine.index((Engine.Index)op); + engine.index((Engine.Index) op); } else { - engine.delete((Engine.Delete)op); + engine.delete((Engine.Delete) op); } if ((docOffset + 1) % 4 == 0) { engine.refresh("test"); @@ -1629,7 +1644,7 @@ public class InternalEngineTests extends ESTestCase { final long correctVersion = docDeleted && randomBoolean() ? Versions.MATCH_DELETED : lastOpVersion; logger.info("performing [{}]{}{}", op.operationType().name().charAt(0), - versionConflict ? " (conflict " + conflictingVersion +")" : "", + versionConflict ? " (conflict " + conflictingVersion + ")" : "", versionedOp ? " (versioned " + correctVersion + ")" : ""); if (op instanceof Engine.Index) { final Engine.Index index = (Engine.Index) op; @@ -1799,7 +1814,7 @@ public class InternalEngineTests extends ESTestCase { assertOpsOnReplica(replicaOps, replicaEngine, true); final int opsOnPrimary = assertOpsOnPrimary(primaryOps, finalReplicaVersion, deletedOnReplica, replicaEngine); final long currentSeqNo = getSequenceID(replicaEngine, - new Engine.Get(false, "type", lastReplicaOp.uid().text(), lastReplicaOp.uid())).v1(); + new Engine.Get(false, "type", lastReplicaOp.uid().text(), lastReplicaOp.uid())).v1(); try (Searcher searcher = engine.acquireSearcher("test")) { final TotalHitCountCollector collector = new TotalHitCountCollector(); searcher.searcher().search(new MatchAllDocsQuery(), collector); @@ -1854,6 +1869,7 @@ public class InternalEngineTests extends ESTestCase { ParsedDocument doc = testParsedDocument("1", null, testDocument(), bytesArray(""), null); final Term uidTerm = newUid(doc); engine.index(indexForDoc(doc)); + final Function searcherFactory = engine::acquireSearcher; for (int i = 0; i < thread.length; i++) { thread[i] = new Thread(() -> { startGun.countDown(); @@ -1863,7 +1879,7 @@ public class InternalEngineTests extends ESTestCase { throw new AssertionError(e); } for (int op = 0; op < opsPerThread; op++) { - try (Engine.GetResult get = engine.get(new Engine.Get(true, doc.type(), doc.id(), uidTerm))) { + try (Engine.GetResult get = engine.get(new Engine.Get(true, doc.type(), doc.id(), uidTerm), searcherFactory)) { FieldsVisitor visitor = new FieldsVisitor(true); get.docIdAndVersion().context.reader().document(get.docIdAndVersion().docId, visitor); List values = new ArrayList<>(Strings.commaDelimitedListToSet(visitor.source().utf8ToString())); @@ -1905,7 +1921,7 @@ public class InternalEngineTests extends ESTestCase { assertTrue(op.added + " should not exist", exists); } - try (Engine.GetResult get = engine.get(new Engine.Get(true, doc.type(), doc.id(), uidTerm))) { + try (Engine.GetResult get = engine.get(new Engine.Get(true, doc.type(), doc.id(), uidTerm), searcherFactory)) { FieldsVisitor visitor = new FieldsVisitor(true); get.docIdAndVersion().context.reader().document(get.docIdAndVersion().docId, visitor); List values = Arrays.asList(Strings.commaDelimitedListToStringArray(visitor.source().utf8ToString())); @@ -1923,7 +1939,7 @@ public class InternalEngineTests extends ESTestCase { indexResult = engine.index(index); assertFalse(indexResult.isCreated()); - engine.delete(new Engine.Delete(null, "1", newUid(doc))); + engine.delete(new Engine.Delete("doc", "1", newUid(doc))); index = indexForDoc(doc); indexResult = engine.index(index); @@ -2156,11 +2172,11 @@ public class InternalEngineTests extends ESTestCase { final IndexCommit commit = commitRef.getIndexCommit(); Map userData = commit.getUserData(); long localCheckpoint = userData.containsKey(SequenceNumbers.LOCAL_CHECKPOINT_KEY) ? - Long.parseLong(userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) : - SequenceNumbersService.NO_OPS_PERFORMED; + Long.parseLong(userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) : + SequenceNumbersService.NO_OPS_PERFORMED; long maxSeqNo = userData.containsKey(SequenceNumbers.MAX_SEQ_NO) ? - Long.parseLong(userData.get(SequenceNumbers.MAX_SEQ_NO)) : - SequenceNumbersService.UNASSIGNED_SEQ_NO; + Long.parseLong(userData.get(SequenceNumbers.MAX_SEQ_NO)) : + SequenceNumbersService.UNASSIGNED_SEQ_NO; // local checkpoint and max seq no shouldn't go backwards assertThat(localCheckpoint, greaterThanOrEqualTo(prevLocalCheckpoint)); assertThat(maxSeqNo, greaterThanOrEqualTo(prevMaxSeqNo)); @@ -2179,7 +2195,7 @@ public class InternalEngineTests extends ESTestCase { FixedBitSet seqNosBitSet = getSeqNosSet(reader, highestSeqNo); for (int i = 0; i <= localCheckpoint; i++) { assertTrue("local checkpoint [" + localCheckpoint + "], _seq_no [" + i + "] should be indexed", - seqNosBitSet.get(i)); + seqNosBitSet.get(i)); } } prevLocalCheckpoint = localCheckpoint; @@ -2255,9 +2271,11 @@ public class InternalEngineTests extends ESTestCase { public void testEnableGcDeletes() throws Exception { try (Store store = createStore(); - Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), newMergePolicy(), null))) { + Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), newMergePolicy(), null))) { engine.config().setEnableGcDeletes(false); + final Function searcherFactory = engine::acquireSearcher; + // Add document Document document = testDocument(); document.add(new TextField("value", "test1", Field.Store.YES)); @@ -2269,7 +2287,7 @@ public class InternalEngineTests extends ESTestCase { engine.delete(new Engine.Delete("test", "1", newUid(doc), SequenceNumbersService.UNASSIGNED_SEQ_NO, 0, 10, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime())); // Get should not find the document - Engine.GetResult getResult = engine.get(newGet(true, doc)); + Engine.GetResult getResult = engine.get(newGet(true, doc), searcherFactory); assertThat(getResult.exists(), equalTo(false)); // Give the gc pruning logic a chance to kick in @@ -2283,7 +2301,7 @@ public class InternalEngineTests extends ESTestCase { engine.delete(new Engine.Delete("test", "2", newUid("2"), SequenceNumbersService.UNASSIGNED_SEQ_NO, 0, 10, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime())); // Get should not find the document (we never indexed uid=2): - getResult = engine.get(new Engine.Get(true, "type", "2", newUid("2"))); + getResult = engine.get(new Engine.Get(true, "type", "2", newUid("2")), searcherFactory); assertThat(getResult.exists(), equalTo(false)); // Try to index uid=1 with a too-old version, should fail: @@ -2293,7 +2311,7 @@ public class InternalEngineTests extends ESTestCase { assertThat(indexResult.getFailure(), instanceOf(VersionConflictEngineException.class)); // Get should still not find the document - getResult = engine.get(newGet(true, doc)); + getResult = engine.get(newGet(true, doc), searcherFactory); assertThat(getResult.exists(), equalTo(false)); // Try to index uid=2 with a too-old version, should fail: @@ -2303,7 +2321,7 @@ public class InternalEngineTests extends ESTestCase { assertThat(indexResult.getFailure(), instanceOf(VersionConflictEngineException.class)); // Get should not find the document - getResult = engine.get(newGet(true, doc)); + getResult = engine.get(newGet(true, doc), searcherFactory); assertThat(getResult.exists(), equalTo(false)); } } @@ -2326,7 +2344,7 @@ public class InternalEngineTests extends ESTestCase { private Engine.Index replicaIndexForDoc(ParsedDocument doc, long version, long seqNo, boolean isRetry) { - return new Engine.Index(newUid(doc), doc, seqNo, 1, version, VersionType.EXTERNAL, + return new Engine.Index(newUid(doc), doc, seqNo, 1, version, VersionType.EXTERNAL, Engine.Operation.Origin.REPLICA, System.nanoTime(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, isRetry); } @@ -2469,6 +2487,38 @@ public class InternalEngineTests extends ESTestCase { } } + public void testTranslogCleanUpPostCommitCrash() throws Exception { + try (Store store = createStore()) { + AtomicBoolean throwErrorOnCommit = new AtomicBoolean(); + final Path translogPath = createTempDir(); + try (InternalEngine engine = new InternalEngine(config(defaultSettings, store, translogPath, newMergePolicy(), null, null)) { + @Override + protected void commitIndexWriter(IndexWriter writer, Translog translog, String syncId) throws IOException { + super.commitIndexWriter(writer, translog, syncId); + if (throwErrorOnCommit.get()) { + throw new RuntimeException("power's out"); + } + } + }) { + final ParsedDocument doc1 = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null); + engine.index(indexForDoc(doc1)); + throwErrorOnCommit.set(true); + FlushFailedEngineException e = expectThrows(FlushFailedEngineException.class, engine::flush); + assertThat(e.getCause().getMessage(), equalTo("power's out")); + } + try (InternalEngine engine = new InternalEngine(config(defaultSettings, store, translogPath, newMergePolicy(), null, null))) { + engine.recoverFromTranslog(); + assertVisibleCount(engine, 1); + final long committedGen = Long.valueOf( + engine.getLastCommittedSegmentInfos().getUserData().get(Translog.TRANSLOG_GENERATION_KEY)); + for (int gen = 1; gen < committedGen; gen++) { + final Path genFile = translogPath.resolve(Translog.getFilename(gen)); + assertFalse(genFile + " wasn't cleaned up", Files.exists(genFile)); + } + } + } + } + public void testSkipTranslogReplay() throws IOException { final int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { @@ -2479,13 +2529,11 @@ public class InternalEngineTests extends ESTestCase { } assertVisibleCount(engine, numDocs); engine.close(); - engine = new InternalEngine(engine.config()); - + engine = new InternalEngine(copy(engine.config(), EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG)); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10)); assertThat(topDocs.totalHits, equalTo(0)); } - } private Mapping dynamicUpdate() { @@ -2603,7 +2651,7 @@ public class InternalEngineTests extends ESTestCase { } assertVisibleCount(engine, numDocs); - TranslogHandler parser = (TranslogHandler) engine.config().getTranslogRecoveryPerformer(); + TranslogHandler parser = (TranslogHandler) engine.config().getTranslogRecoveryRunner(); parser.mappingUpdate = dynamicUpdate(); engine.close(); @@ -2611,8 +2659,8 @@ public class InternalEngineTests extends ESTestCase { engine.recoverFromTranslog(); assertVisibleCount(engine, numDocs, false); - parser = (TranslogHandler) engine.config().getTranslogRecoveryPerformer(); - assertEquals(numDocs, parser.recoveredOps.get()); + parser = (TranslogHandler) engine.config().getTranslogRecoveryRunner(); + assertEquals(numDocs, parser.appliedOperations.get()); if (parser.mappingUpdate != null) { assertEquals(1, parser.getRecoveredTypes().size()); assertTrue(parser.getRecoveredTypes().containsKey("test")); @@ -2623,8 +2671,8 @@ public class InternalEngineTests extends ESTestCase { engine.close(); engine = createEngine(store, primaryTranslogDir); assertVisibleCount(engine, numDocs, false); - parser = (TranslogHandler) engine.config().getTranslogRecoveryPerformer(); - assertEquals(0, parser.recoveredOps.get()); + parser = (TranslogHandler) engine.config().getTranslogRecoveryRunner(); + assertEquals(0, parser.appliedOperations.get()); final boolean flush = randomBoolean(); int randomId = randomIntBetween(numDocs + 1, numDocs + 10); @@ -2652,8 +2700,8 @@ public class InternalEngineTests extends ESTestCase { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), numDocs + 1); assertThat(topDocs.totalHits, equalTo(numDocs + 1)); } - parser = (TranslogHandler) engine.config().getTranslogRecoveryPerformer(); - assertEquals(flush ? 1 : 2, parser.recoveredOps.get()); + parser = (TranslogHandler) engine.config().getTranslogRecoveryRunner(); + assertEquals(flush ? 1 : 2, parser.appliedOperations.get()); engine.delete(new Engine.Delete("test", Integer.toString(randomId), newUid(doc))); if (randomBoolean()) { engine.refresh("test"); @@ -2667,23 +2715,22 @@ public class InternalEngineTests extends ESTestCase { } } - public static class TranslogHandler extends TranslogRecoveryPerformer { + public static class TranslogHandler extends TranslogOpToEngineOpConverter + implements EngineConfig.TranslogRecoveryRunner { private final MapperService mapperService; public Mapping mappingUpdate = null; + private final Map recoveredTypes = new HashMap<>(); + private final AtomicLong appliedOperations = new AtomicLong(); - public final AtomicInteger recoveredOps = new AtomicInteger(0); - - public TranslogHandler(NamedXContentRegistry xContentRegistry, String indexName, Settings settings, Logger logger) { - super(new ShardId("test", "_na_", 0), null, logger); - Index index = new Index(indexName, "_na_"); - IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); + public TranslogHandler(NamedXContentRegistry xContentRegistry, IndexSettings indexSettings) { + super(new ShardId("test", "_na_", 0), null); NamedAnalyzer defaultAnalyzer = new NamedAnalyzer("default", AnalyzerScope.INDEX, new StandardAnalyzer()); IndexAnalyzers indexAnalyzers = new IndexAnalyzers(indexSettings, defaultAnalyzer, defaultAnalyzer, defaultAnalyzer, Collections.emptyMap(), Collections.emptyMap()); SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry(); mapperService = new MapperService(indexSettings, indexAnalyzers, xContentRegistry, similarityService, mapperRegistry, - () -> null); + () -> null); } @Override @@ -2693,9 +2740,44 @@ public class InternalEngineTests extends ESTestCase { return new DocumentMapperForType(b.build(mapperService), mappingUpdate); } + private void applyOperation(Engine engine, Engine.Operation operation) throws IOException { + switch (operation.operationType()) { + case INDEX: + Engine.Index engineIndex = (Engine.Index) operation; + Mapping update = engineIndex.parsedDoc().dynamicMappingsUpdate(); + if (engineIndex.parsedDoc().dynamicMappingsUpdate() != null) { + recoveredTypes.compute(engineIndex.type(), (k, mapping) -> mapping == null ? update : mapping.merge(update, false)); + } + engine.index(engineIndex); + break; + case DELETE: + engine.delete((Engine.Delete) operation); + break; + case NO_OP: + engine.noOp((Engine.NoOp) operation); + break; + default: + throw new IllegalStateException("No operation defined for [" + operation + "]"); + } + } + + /** + * Returns the recovered types modifying the mapping during the recovery + */ + public Map getRecoveredTypes() { + return recoveredTypes; + } + @Override - protected void operationProcessed() { - recoveredOps.incrementAndGet(); + public int run(Engine engine, Translog.Snapshot snapshot) throws IOException { + int opsRecovered = 0; + Translog.Operation operation; + while ((operation = snapshot.next()) != null) { + applyOperation(engine, convertToEngineOp(operation, Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY)); + opsRecovered++; + appliedOperations.incrementAndGet(); + } + return opsRecovered; } } @@ -2713,21 +2795,22 @@ public class InternalEngineTests extends ESTestCase { Translog translog = new Translog( new TranslogConfig(shardId, createTempDir(), INDEX_SETTINGS, BigArrays.NON_RECYCLING_INSTANCE), - null, - () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); + null, new TranslogDeletionPolicy(), () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); translog.add(new Translog.Index("test", "SomeBogusId", 0, "{}".getBytes(Charset.forName("UTF-8")))); assertEquals(generation.translogFileGeneration, translog.currentFileGeneration()); translog.close(); EngineConfig config = engine.config(); /* create a TranslogConfig that has been created with a different UUID */ - TranslogConfig translogConfig = new TranslogConfig(shardId, translog.location(), config.getIndexSettings(), BigArrays.NON_RECYCLING_INSTANCE); + TranslogConfig translogConfig = new TranslogConfig(shardId, translog.location(), config.getIndexSettings(), + BigArrays.NON_RECYCLING_INSTANCE); EngineConfig brokenConfig = new EngineConfig(EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG, shardId, threadPool, config.getIndexSettings(), null, store, newMergePolicy(), config.getAnalyzer(), - config.getSimilarity(), new CodecService(null, logger), config.getEventListener(), config.getTranslogRecoveryPerformer(), + config.getSimilarity(), new CodecService(null, logger), config.getEventListener(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, - TimeValue.timeValueMinutes(5), config.getRefreshListeners(), null); + TimeValue.timeValueMinutes(5), config.getRefreshListeners(), null, + config.getTranslogRecoveryRunner()); try { InternalEngine internalEngine = new InternalEngine(brokenConfig); @@ -2822,6 +2905,8 @@ public class InternalEngineTests extends ESTestCase { assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); expectThrows(IllegalStateException.class, () -> engine.recoverFromTranslog()); + assertEquals(1, engine.getTranslog().currentFileGeneration()); + assertEquals(0L, engine.getTranslog().totalOperations()); } } @@ -3539,7 +3624,7 @@ public class InternalEngineTests extends ESTestCase { final AtomicLong expectedLocalCheckpoint = new AtomicLong(SequenceNumbersService.NO_OPS_PERFORMED); final List threads = new ArrayList<>(); final SequenceNumbersService seqNoService = getStallingSeqNoService(latchReference, barrier, stall, expectedLocalCheckpoint); - initialEngine = createEngine(defaultSettings, store, primaryTranslogDir, newMergePolicy(), null, () -> seqNoService); + initialEngine = createEngine(defaultSettings, store, primaryTranslogDir, newMergePolicy(), null, (config) -> seqNoService); final InternalEngine finalInitialEngine = initialEngine; for (int i = 0; i < docs; i++) { final String id = Integer.toString(i); @@ -3635,6 +3720,7 @@ public class InternalEngineTests extends ESTestCase { document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); final ParsedDocument doc = testParsedDocument("1", null, document, B_1, null); final Term uid = newUid(doc); + final Function searcherFactory = engine::acquireSearcher; for (int i = 0; i < numberOfOperations; i++) { if (randomBoolean()) { final Engine.Index index = new Engine.Index( @@ -3696,7 +3782,7 @@ public class InternalEngineTests extends ESTestCase { } assertThat(engine.seqNoService().getLocalCheckpoint(), equalTo(expectedLocalCheckpoint)); - try (Engine.GetResult result = engine.get(new Engine.Get(true, "type", "2", uid))) { + try (Engine.GetResult result = engine.get(new Engine.Get(true, "type", "2", uid), searcherFactory)) { assertThat(result.exists(), equalTo(exists)); } } @@ -3767,7 +3853,7 @@ public class InternalEngineTests extends ESTestCase { final AtomicLong expectedLocalCheckpoint = new AtomicLong(SequenceNumbersService.NO_OPS_PERFORMED); final Map threads = new LinkedHashMap<>(); final SequenceNumbersService seqNoService = getStallingSeqNoService(latchReference, barrier, stall, expectedLocalCheckpoint); - actualEngine = createEngine(defaultSettings, store, primaryTranslogDir, newMergePolicy(), null, () -> seqNoService); + actualEngine = createEngine(defaultSettings, store, primaryTranslogDir, newMergePolicy(), null, (config) -> seqNoService); final InternalEngine finalActualEngine = actualEngine; final Translog translog = finalActualEngine.getTranslog(); final long generation = finalActualEngine.getTranslog().currentFileGeneration(); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java index dfb3a3c1b3e..64dcf0a0943 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java @@ -19,11 +19,9 @@ package org.elasticsearch.index.fielddata; -import com.carrotsearch.hppc.ObjectArrayList; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -31,8 +29,8 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; +import java.util.ArrayList; import java.util.List; -import static org.hamcrest.Matchers.equalTo; public class BinaryDVFieldDataTests extends AbstractFieldDataTestCase { @Override @@ -53,15 +51,21 @@ public class BinaryDVFieldDataTests extends AbstractFieldDataTestCase { final DocumentMapper mapper = mapperService.documentMapperParser().parse("test", new CompressedXContent(mapping)); - ObjectArrayList bytesList1 = new ObjectArrayList<>(2); + List bytesList1 = new ArrayList<>(2); bytesList1.add(randomBytes()); bytesList1.add(randomBytes()); - XContentBuilder doc = XContentFactory.jsonBuilder().startObject().startArray("field").value(bytesList1.get(0)).value(bytesList1.get(1)).endArray().endObject(); - ParsedDocument d = mapper.parse(SourceToParse.source("test", "test", "1", - doc.bytes(), XContentType.JSON)); + XContentBuilder doc = XContentFactory.jsonBuilder().startObject(); + { + doc.startArray("field"); + doc.value(bytesList1.get(0)); + doc.value(bytesList1.get(1)); + doc.endArray(); + } + doc.endObject(); + ParsedDocument d = mapper.parse(SourceToParse.source("test", "test", "1", doc.bytes(), XContentType.JSON)); writer.addDocument(d.rootDoc()); - byte[] bytes1 = randomBytes(); + BytesRef bytes1 = randomBytes(); doc = XContentFactory.jsonBuilder().startObject().field("field", bytes1).endObject(); d = mapper.parse(SourceToParse.source("test", "test", "2", doc.bytes(), XContentType.JSON)); writer.addDocument(d.rootDoc()); @@ -71,45 +75,75 @@ public class BinaryDVFieldDataTests extends AbstractFieldDataTestCase { writer.addDocument(d.rootDoc()); // test remove duplicate value - ObjectArrayList bytesList2 = new ObjectArrayList<>(2); + List bytesList2 = new ArrayList<>(2); bytesList2.add(randomBytes()); bytesList2.add(randomBytes()); - doc = XContentFactory.jsonBuilder().startObject().startArray("field").value(bytesList2.get(0)).value(bytesList2.get(1)).value(bytesList2.get(0)).endArray().endObject(); + doc = XContentFactory.jsonBuilder().startObject(); + { + doc.startArray("field"); + doc.value(bytesList2.get(0)); + doc.value(bytesList2.get(1)); + doc.value(bytesList2.get(0)); + doc.endArray(); + } + doc.endObject(); d = mapper.parse(SourceToParse.source("test", "test", "4", doc.bytes(), XContentType.JSON)); writer.addDocument(d.rootDoc()); - List readers = refreshReader(); IndexFieldData indexFieldData = getForField("field"); - for (LeafReaderContext reader : readers) { - AtomicFieldData fieldData = indexFieldData.load(reader); + List readers = refreshReader(); + assertEquals(1, readers.size()); + LeafReaderContext reader = readers.get(0); - SortedBinaryDocValues bytesValues = fieldData.getBytesValues(); + bytesList1.sort(null); + bytesList2.sort(null); - CollectionUtils.sortAndDedup(bytesList1); - assertTrue(bytesValues.advanceExact(0)); - assertThat(bytesValues.docValueCount(), equalTo(2)); - assertThat(bytesValues.nextValue(), equalTo(new BytesRef(bytesList1.get(0)))); - assertThat(bytesValues.nextValue(), equalTo(new BytesRef(bytesList1.get(1)))); + // Test SortedBinaryDocValues's decoding: + AtomicFieldData fieldData = indexFieldData.load(reader); + SortedBinaryDocValues bytesValues = fieldData.getBytesValues(); - assertTrue(bytesValues.advanceExact(1)); - assertThat(bytesValues.docValueCount(), equalTo(1)); - assertThat(bytesValues.nextValue(), equalTo(new BytesRef(bytes1))); + assertTrue(bytesValues.advanceExact(0)); + assertEquals(2, bytesValues.docValueCount()); + assertEquals(bytesList1.get(0), bytesValues.nextValue()); + assertEquals(bytesList1.get(1), bytesValues.nextValue()); - assertFalse(bytesValues.advanceExact(2)); + assertTrue(bytesValues.advanceExact(1)); + assertEquals(1, bytesValues.docValueCount()); + assertEquals(bytes1, bytesValues.nextValue()); - CollectionUtils.sortAndDedup(bytesList2); - assertTrue(bytesValues.advanceExact(3)); - assertThat(bytesValues.docValueCount(), equalTo(2)); - assertThat(bytesValues.nextValue(), equalTo(new BytesRef(bytesList2.get(0)))); - assertThat(bytesValues.nextValue(), equalTo(new BytesRef(bytesList2.get(1)))); - } + assertFalse(bytesValues.advanceExact(2)); + + assertTrue(bytesValues.advanceExact(3)); + assertEquals(2, bytesValues.docValueCount()); + assertEquals(bytesList2.get(0), bytesValues.nextValue()); + assertEquals(bytesList2.get(1), bytesValues.nextValue()); + + // Test whether ScriptDocValues.BytesRefs makes a deepcopy + fieldData = indexFieldData.load(reader); + ScriptDocValues scriptValues = fieldData.getScriptValues(); + scriptValues.setNextDocId(0); + assertEquals(2, scriptValues.size()); + assertEquals(bytesList1.get(0), scriptValues.get(0)); + assertEquals(bytesList1.get(1), scriptValues.get(1)); + + scriptValues.setNextDocId(1); + assertEquals(1, scriptValues.size()); + assertEquals(bytes1, scriptValues.get(0)); + + scriptValues.setNextDocId(2); + assertEquals(0, scriptValues.size()); + + scriptValues.setNextDocId(3); + assertEquals(2, scriptValues.size()); + assertEquals(bytesList2.get(0), scriptValues.get(0)); + assertEquals(bytesList2.get(1), scriptValues.get(1)); } - private byte[] randomBytes() { + private static BytesRef randomBytes() { int size = randomIntBetween(10, 1000); byte[] bytes = new byte[size]; random().nextBytes(bytes); - return bytes; + return new BytesRef(bytes); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/mapper/AllFieldIT.java b/core/src/test/java/org/elasticsearch/index/mapper/AllFieldIT.java new file mode 100644 index 00000000000..2be58b3b68e --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/AllFieldIT.java @@ -0,0 +1,109 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.Version; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; + +import java.util.Arrays; +import java.util.Collection; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; + +public class AllFieldIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Arrays.asList(InternalSettingsPlugin.class); // uses index.version.created + } + + public void test5xIndicesContinueToUseAll() throws Exception { + // Default 5.x settings + assertAcked(prepareCreate("test").setSettings("index.version.created", Version.V_5_1_1.id)); + client().prepareIndex("test", "type", "1").setSource("body", "foo").get(); + refresh(); + SearchResponse resp = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("_all", "foo")).get(); + assertHitCount(resp, 1); + assertSearchHits(resp, "1"); + + // _all explicitly enabled + assertAcked(prepareCreate("test2") + .setSource(jsonBuilder() + .startObject() + .startObject("mappings") + .startObject("type") + .startObject("_all") + .field("enabled", true) + .endObject() // _all + .endObject() // type + .endObject() // mappings + .endObject()) + .setSettings("index.version.created", Version.V_5_4_0_ID)); + client().prepareIndex("test2", "type", "1").setSource("foo", "bar").get(); + refresh(); + resp = client().prepareSearch("test2").setQuery(QueryBuilders.matchQuery("_all", "bar")).get(); + assertHitCount(resp, 1); + assertSearchHits(resp, "1"); + + // _all explicitly disabled + assertAcked(prepareCreate("test3") + .setSource(jsonBuilder() + .startObject() + .startObject("mappings") + .startObject("type") + .startObject("_all") + .field("enabled", false) + .endObject() // _all + .endObject() // type + .endObject() // mappings + .endObject()) + .setSettings("index.version.created", Version.V_5_4_0_ID)); + client().prepareIndex("test3", "type", "1").setSource("foo", "baz").get(); + refresh(); + resp = client().prepareSearch("test3").setQuery(QueryBuilders.matchQuery("_all", "baz")).get(); + assertHitCount(resp, 0); + + // _all present, but not enabled or disabled (default settings) + assertAcked(prepareCreate("test4") + .setSource(jsonBuilder() + .startObject() + .startObject("mappings") + .startObject("type") + .startObject("_all") + .endObject() // _all + .endObject() // type + .endObject() // mappings + .endObject()) + .setSettings("index.version.created", Version.V_5_4_0_ID)); + client().prepareIndex("test4", "type", "1").setSource("foo", "eggplant").get(); + refresh(); + resp = client().prepareSearch("test4").setQuery(QueryBuilders.matchQuery("_all", "eggplant")).get(); + assertHitCount(resp, 1); + assertSearchHits(resp, "1"); + } + +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java index e2fbbe7ebfe..198992b41a0 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java @@ -168,9 +168,9 @@ public class DocumentMapperMergeTests extends ESSingleNodeTestCase { barrier.await(); for (int i = 0; i < 200 && stopped.get() == false; i++) { final String fieldName = Integer.toString(i); - ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", - "test", - fieldName, + ParsedDocument doc = documentMapper.parse(SourceToParse.source("test", + "test", + fieldName, new BytesArray("{ \"" + fieldName + "\" : \"test\" }"), XContentType.JSON)); Mapping update = doc.dynamicMappingsUpdate(); @@ -191,10 +191,10 @@ public class DocumentMapperMergeTests extends ESSingleNodeTestCase { while(stopped.get() == false) { final String fieldName = lastIntroducedFieldName.get(); final BytesReference source = new BytesArray("{ \"" + fieldName + "\" : \"test\" }"); - ParsedDocument parsedDoc = documentMapper.parse(SourceToParse.source("test", - "test", - "random", - source, + ParsedDocument parsedDoc = documentMapper.parse(SourceToParse.source("test", + "test", + "random", + source, XContentType.JSON)); if (parsedDoc.dynamicMappingsUpdate() != null) { // not in the mapping yet, try again @@ -235,4 +235,65 @@ public class DocumentMapperMergeTests extends ESSingleNodeTestCase { assertNotNull(mapper.mappers().getMapper("foo")); assertFalse(mapper.sourceMapper().enabled()); } + + public void testMergeChildType() throws IOException { + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + + String initMapping = XContentFactory.jsonBuilder().startObject().startObject("child") + .startObject("_parent").field("type", "parent").endObject() + .endObject().endObject().string(); + DocumentMapper initMapper = parser.parse("child", new CompressedXContent(initMapping)); + + assertThat(initMapper.mappers().getMapper("_parent#parent"), notNullValue()); + + String updatedMapping1 = XContentFactory.jsonBuilder().startObject().startObject("child") + .startObject("properties") + .startObject("name").field("type", "text").endObject() + .endObject().endObject().endObject().string(); + DocumentMapper updatedMapper1 = parser.parse("child", new CompressedXContent(updatedMapping1)); + DocumentMapper mergedMapper1 = initMapper.merge(updatedMapper1.mapping(), false); + + assertThat(mergedMapper1.mappers().getMapper("_parent#parent"), notNullValue()); + assertThat(mergedMapper1.mappers().getMapper("name"), notNullValue()); + + String updatedMapping2 = XContentFactory.jsonBuilder().startObject().startObject("child") + .startObject("_parent").field("type", "parent").endObject() + .startObject("properties") + .startObject("age").field("type", "byte").endObject() + .endObject().endObject().endObject().string(); + DocumentMapper updatedMapper2 = parser.parse("child", new CompressedXContent(updatedMapping2)); + DocumentMapper mergedMapper2 = mergedMapper1.merge(updatedMapper2.mapping(), false); + + assertThat(mergedMapper2.mappers().getMapper("_parent#parent"), notNullValue()); + assertThat(mergedMapper2.mappers().getMapper("name"), notNullValue()); + assertThat(mergedMapper2.mappers().getMapper("age"), notNullValue()); + + String modParentMapping = XContentFactory.jsonBuilder().startObject().startObject("child") + .startObject("_parent").field("type", "new_parent").endObject() + .endObject().endObject().string(); + DocumentMapper modParentMapper = parser.parse("child", new CompressedXContent(modParentMapping)); + Exception e = expectThrows(IllegalArgumentException.class, () -> initMapper.merge(modParentMapper.mapping(), false)); + assertThat(e.getMessage(), containsString("The _parent field's type option can't be changed: [parent]->[new_parent]")); + } + + public void testMergeAddingParent() throws IOException { + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + + String initMapping = XContentFactory.jsonBuilder().startObject().startObject("cowboy") + .startObject("properties") + .startObject("name").field("type", "text").endObject() + .endObject().endObject().endObject().string(); + DocumentMapper initMapper = parser.parse("cowboy", new CompressedXContent(initMapping)); + + assertThat(initMapper.mappers().getMapper("name"), notNullValue()); + + String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("cowboy") + .startObject("_parent").field("type", "parent").endObject() + .startObject("properties") + .startObject("age").field("type", "byte").endObject() + .endObject().endObject().endObject().string(); + DocumentMapper updatedMapper = parser.parse("cowboy", new CompressedXContent(updatedMapping)); + Exception e = expectThrows(IllegalArgumentException.class, () -> initMapper.merge(updatedMapper.mapping(), false)); + assertThat(e.getMessage(), containsString("The _parent field's type option can't be changed: [null]->[parent]")); + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index 49864768edf..d3d099672ba 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; @@ -148,6 +149,92 @@ public class DocumentParserTests extends ESSingleNodeTestCase { e.getMessage()); } + public void testNestedHaveIdAndTypeFields() throws Exception { + DocumentMapperParser mapperParser1 = createIndex("index1", Settings.builder() + .put("index.mapping.single_type", false).build() + ).mapperService().documentMapperParser(); + DocumentMapperParser mapperParser2 = createIndex("index2", Settings.builder() + .put("index.mapping.single_type", true).build() + ).mapperService().documentMapperParser(); + + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties"); + { + mapping.startObject("foo"); + mapping.field("type", "nested"); + { + mapping.startObject("properties"); + { + + mapping.startObject("bar"); + mapping.field("type", "keyword"); + mapping.endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + } + { + mapping.startObject("baz"); + mapping.field("type", "keyword"); + mapping.endObject(); + } + mapping.endObject().endObject().endObject(); + DocumentMapper mapper1 = mapperParser1.parse("type", new CompressedXContent(mapping.string())); + DocumentMapper mapper2 = mapperParser2.parse("type", new CompressedXContent(mapping.string())); + + XContentBuilder doc = XContentFactory.jsonBuilder().startObject(); + { + doc.startArray("foo"); + { + doc.startObject(); + doc.field("bar", "value1"); + doc.endObject(); + } + doc.endArray(); + doc.field("baz", "value2"); + } + doc.endObject(); + + // Verify in the case where multiple types are allowed that the _uid field is added to nested documents: + ParsedDocument result = mapper1.parse(SourceToParse.source("index1", "type", "1", doc.bytes(), XContentType.JSON)); + assertEquals(2, result.docs().size()); + // Nested document: + assertNull(result.docs().get(0).getField(IdFieldMapper.NAME)); + assertNotNull(result.docs().get(0).getField(UidFieldMapper.NAME)); + assertEquals("type#1", result.docs().get(0).getField(UidFieldMapper.NAME).stringValue()); + assertEquals(UidFieldMapper.Defaults.NESTED_FIELD_TYPE, result.docs().get(0).getField(UidFieldMapper.NAME).fieldType()); + assertNotNull(result.docs().get(0).getField(TypeFieldMapper.NAME)); + assertEquals("__foo", result.docs().get(0).getField(TypeFieldMapper.NAME).stringValue()); + assertEquals("value1", result.docs().get(0).getField("foo.bar").binaryValue().utf8ToString()); + // Root document: + assertNull(result.docs().get(1).getField(IdFieldMapper.NAME)); + assertNotNull(result.docs().get(1).getField(UidFieldMapper.NAME)); + assertEquals("type#1", result.docs().get(1).getField(UidFieldMapper.NAME).stringValue()); + assertEquals(UidFieldMapper.Defaults.FIELD_TYPE, result.docs().get(1).getField(UidFieldMapper.NAME).fieldType()); + assertNotNull(result.docs().get(1).getField(TypeFieldMapper.NAME)); + assertEquals("type", result.docs().get(1).getField(TypeFieldMapper.NAME).stringValue()); + assertEquals("value2", result.docs().get(1).getField("baz").binaryValue().utf8ToString()); + + // Verify in the case where only a single type is allowed that the _id field is added to nested documents: + result = mapper2.parse(SourceToParse.source("index2", "type", "1", doc.bytes(), XContentType.JSON)); + assertEquals(2, result.docs().size()); + // Nested document: + assertNull(result.docs().get(0).getField(UidFieldMapper.NAME)); + assertNotNull(result.docs().get(0).getField(IdFieldMapper.NAME)); + assertEquals("1", result.docs().get(0).getField(IdFieldMapper.NAME).stringValue()); + assertEquals(IdFieldMapper.Defaults.NESTED_FIELD_TYPE, result.docs().get(0).getField(IdFieldMapper.NAME).fieldType()); + assertNotNull(result.docs().get(0).getField(TypeFieldMapper.NAME)); + assertEquals("__foo", result.docs().get(0).getField(TypeFieldMapper.NAME).stringValue()); + assertEquals("value1", result.docs().get(0).getField("foo.bar").binaryValue().utf8ToString()); + // Root document: + assertNull(result.docs().get(1).getField(UidFieldMapper.NAME)); + assertNotNull(result.docs().get(1).getField(IdFieldMapper.NAME)); + assertEquals("1", result.docs().get(1).getField(IdFieldMapper.NAME).stringValue()); + assertEquals(IdFieldMapper.Defaults.FIELD_TYPE, result.docs().get(1).getField(IdFieldMapper.NAME).fieldType()); + assertNull(result.docs().get(1).getField(TypeFieldMapper.NAME)); + assertEquals("value2", result.docs().get(1).getField("baz").binaryValue().utf8ToString()); + } + public void testPropagateDynamicWithExistingMapper() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") @@ -639,7 +726,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .value(0) .value(1) .endArray().endObject().bytes(); - MapperParsingException exception = expectThrows(MapperParsingException.class, + MapperParsingException exception = expectThrows(MapperParsingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("Could not dynamically add mapping for field [foo.bar.baz]. " + "Existing mapping for [foo] must be of type object but found [long].", exception.getMessage()); @@ -758,7 +845,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { BytesReference bytes = XContentFactory.jsonBuilder() .startObject().field("foo.bar.baz", 0) .endObject().bytes(); - MapperParsingException exception = expectThrows(MapperParsingException.class, + MapperParsingException exception = expectThrows(MapperParsingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("Could not dynamically add mapping for field [foo.bar.baz]. " + "Existing mapping for [foo] must be of type object but found [long].", exception.getMessage()); @@ -880,7 +967,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { BytesReference bytes = XContentFactory.jsonBuilder().startObject().startObject("foo.bar.baz").field("a", 0).endObject().endObject() .bytes(); - MapperParsingException exception = expectThrows(MapperParsingException.class, + MapperParsingException exception = expectThrows(MapperParsingException.class, () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); assertEquals("Could not dynamically add mapping for field [foo.bar.baz]. " @@ -1017,7 +1104,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .field("test2", "value2") .startObject("inner").field("inner_field", "inner_value").endObject() .endObject() - .bytes(), + .bytes(), XContentType.JSON)); assertThat(doc.rootDoc().get("test1"), equalTo("value1")); @@ -1036,7 +1123,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .field("test2", "value2") .startObject("inner").field("inner_field", "inner_value").endObject() .endObject().endObject() - .bytes(), + .bytes(), XContentType.JSON)); assertThat(doc.rootDoc().get("type.test1"), equalTo("value1")); @@ -1056,7 +1143,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .field("test2", "value2") .startObject("inner").field("inner_field", "inner_value").endObject() .endObject() - .bytes(), + .bytes(), XContentType.JSON)); assertThat(doc.rootDoc().get("type"), equalTo("value_type")); @@ -1077,7 +1164,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .field("test2", "value2") .startObject("inner").field("inner_field", "inner_value").endObject() .endObject().endObject() - .bytes(), + .bytes(), XContentType.JSON)); assertThat(doc.rootDoc().get("type.type"), equalTo("value_type")); @@ -1098,7 +1185,7 @@ public class DocumentParserTests extends ESSingleNodeTestCase { .field("test2", "value2") .startObject("inner").field("inner_field", "inner_value").endObject() .endObject() - .bytes(), + .bytes(), XContentType.JSON)); // in this case, we analyze the type object as the actual document, and ignore the other same level fields diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java index f1a2e97f0bf..eb1148e9f45 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java @@ -132,7 +132,6 @@ public class MultiFieldTests extends ESSingleNodeTestCase { ), indexService.mapperService()).build(indexService.mapperService()); String builtMapping = builderDocMapper.mappingSource().string(); -// System.out.println(builtMapping); // reparse it DocumentMapper docMapper = indexService.mapperService().documentMapperParser().parse("person", new CompressedXContent(builtMapping)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java index 0f976e12f39..861586370ae 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java @@ -24,8 +24,10 @@ import org.apache.lucene.analysis.CannedTokenStream; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.Token; import org.apache.lucene.analysis.TokenStream; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -144,4 +146,55 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase { ); assertThat(e.getMessage(), containsString("name cannot be empty string")); } + + public void testParseNullValue() throws Exception { + DocumentMapper mapper = createIndexWithTokenCountField(); + ParseContext.Document doc = parseDocument(mapper, createDocument(null)); + assertNull(doc.getField("test.tc")); + } + + public void testParseEmptyValue() throws Exception { + DocumentMapper mapper = createIndexWithTokenCountField(); + ParseContext.Document doc = parseDocument(mapper, createDocument("")); + assertEquals(0, doc.getField("test.tc").numericValue()); + } + + public void testParseNotNullValue() throws Exception { + DocumentMapper mapper = createIndexWithTokenCountField(); + ParseContext.Document doc = parseDocument(mapper, createDocument("three tokens string")); + assertEquals(3, doc.getField("test.tc").numericValue()); + } + + private DocumentMapper createIndexWithTokenCountField() throws IOException { + final String content = XContentFactory.jsonBuilder().startObject() + .startObject("person") + .startObject("properties") + .startObject("test") + .field("type", "text") + .startObject("fields") + .startObject("tc") + .field("type", "token_count") + .field("analyzer", "standard") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject().endObject().string(); + + return createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(content)); + } + + private SourceToParse createDocument(String fieldValue) throws Exception { + BytesReference request = XContentFactory.jsonBuilder() + .startObject() + .field("test", fieldValue) + .endObject().bytes(); + + return SourceToParse.source("test", "person", "1", request, XContentType.JSON); + } + + private ParseContext.Document parseDocument(DocumentMapper mapper, SourceToParse request) { + return mapper.parse(request) + .docs().stream().findFirst().orElseThrow(() -> new IllegalStateException("Test object not parsed")); + } } diff --git a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 18da1d37b72..31687c5d9ff 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -430,7 +430,7 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase 0); Query query = multiMatchQuery("test").field(STRING_FIELD_NAME).field(STRING_FIELD_NAME_2).useDisMax(false).toQuery(createShardContext()); - assertThat(query, instanceOf(BooleanQuery.class)); - BooleanQuery bQuery = (BooleanQuery) query; - assertThat(bQuery.clauses().size(), equalTo(2)); - assertThat(assertBooleanSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); - assertThat(assertBooleanSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test"))); + assertThat(query, instanceOf(DisjunctionMaxQuery.class)); + DisjunctionMaxQuery dQuery = (DisjunctionMaxQuery) query; + assertThat(dQuery.getTieBreakerMultiplier(), equalTo(1.0f)); + assertThat(dQuery.getDisjuncts().size(), equalTo(2)); + assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); + assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test"))); } public void testToQueryMultipleFieldsDisMaxQuery() throws Exception { @@ -198,6 +200,7 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase disjuncts = disMaxQuery.getDisjuncts(); assertThat(disjuncts.get(0), instanceOf(TermQuery.class)); assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); @@ -208,11 +211,12 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase 0); Query query = multiMatchQuery("test").field("mapped_str*").useDisMax(false).toQuery(createShardContext()); - assertThat(query, instanceOf(BooleanQuery.class)); - BooleanQuery bQuery = (BooleanQuery) query; - assertThat(bQuery.clauses().size(), equalTo(2)); - assertThat(assertBooleanSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); - assertThat(assertBooleanSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test"))); + assertThat(query, instanceOf(DisjunctionMaxQuery.class)); + DisjunctionMaxQuery dQuery = (DisjunctionMaxQuery) query; + assertThat(dQuery.getTieBreakerMultiplier(), equalTo(1.0f)); + assertThat(dQuery.getDisjuncts().size(), equalTo(2)); + assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); + assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test"))); } public void testToQueryFieldMissing() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java index a9e1b6ce23c..f25bcc879cb 100644 --- a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -55,7 +55,7 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase 0); Query query = queryStringQuery("test").field("mapped_str*").useDisMax(false).toQuery(createShardContext()); - assertThat(query, instanceOf(BooleanQuery.class)); - BooleanQuery bQuery = (BooleanQuery) query; - assertThat(bQuery.clauses().size(), equalTo(2)); - assertThat(assertBooleanSubQuery(query, TermQuery.class, 0).getTerm(), + assertThat(query, instanceOf(DisjunctionMaxQuery.class)); + DisjunctionMaxQuery dQuery = (DisjunctionMaxQuery) query; + assertThat(dQuery.getDisjuncts().size(), equalTo(2)); + assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 0).getTerm(), equalTo(new Term(STRING_FIELD_NAME, "test"))); - assertThat(assertBooleanSubQuery(query, TermQuery.class, 1).getTerm(), + assertThat(assertDisjunctionSubQuery(query, TermQuery.class, 1).getTerm(), equalTo(new Term(STRING_FIELD_NAME_2, "test"))); } @@ -397,6 +400,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); + assumeTrue("5.x behaves differently, so skip on non-6.x indices", + indexVersionCreated.onOrAfter(Version.V_6_0_0_alpha1)); + QueryShardContext context = createShardContext(); QueryStringQueryBuilder queryBuilder = new QueryStringQueryBuilder("foo:*"); Query query = queryBuilder.toQuery(context); @@ -858,9 +865,9 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0) { Query luceneQuery = queryBuilder.toQuery(shardContext); - assertThat(luceneQuery, instanceOf(BooleanQuery.class)); + assertThat(luceneQuery, anyOf(instanceOf(BooleanQuery.class), instanceOf(DisjunctionMaxQuery.class))); } } @@ -229,30 +235,39 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase 1) { - assertThat(query, instanceOf(BooleanQuery.class)); - BooleanQuery boolQuery = (BooleanQuery) query; - for (BooleanClause clause : boolQuery.clauses()) { - if (clause.getQuery() instanceof TermQuery) { - TermQuery inner = (TermQuery) clause.getQuery(); - assertThat(inner.getTerm().bytes().toString(), is(inner.getTerm().bytes().toString().toLowerCase(Locale.ROOT))); + assertThat(query, anyOf(instanceOf(BooleanQuery.class), instanceOf(DisjunctionMaxQuery.class))); + if (query instanceof BooleanQuery) { + BooleanQuery boolQuery = (BooleanQuery) query; + for (BooleanClause clause : boolQuery.clauses()) { + if (clause.getQuery() instanceof TermQuery) { + TermQuery inner = (TermQuery) clause.getQuery(); + assertThat(inner.getTerm().bytes().toString(), is(inner.getTerm().bytes().toString().toLowerCase(Locale.ROOT))); + } + } + assertThat(boolQuery.clauses().size(), equalTo(queryBuilder.fields().size())); + Iterator> fieldsIterator = queryBuilder.fields().entrySet().iterator(); + for (BooleanClause booleanClause : boolQuery) { + Map.Entry field = fieldsIterator.next(); + assertTermOrBoostQuery(booleanClause.getQuery(), field.getKey(), queryBuilder.value(), field.getValue()); + } + if (queryBuilder.minimumShouldMatch() != null) { + assertThat(boolQuery.getMinimumNumberShouldMatch(), greaterThan(0)); + } + } else if (query instanceof DisjunctionMaxQuery) { + DisjunctionMaxQuery maxQuery = (DisjunctionMaxQuery) query; + for (Query disjunct : maxQuery.getDisjuncts()) { + if (disjunct instanceof TermQuery) { + TermQuery inner = (TermQuery) disjunct; + assertThat(inner.getTerm().bytes().toString(), is(inner.getTerm().bytes().toString().toLowerCase(Locale.ROOT))); + } + } + assertThat(maxQuery.getDisjuncts().size(), equalTo(queryBuilder.fields().size())); + Iterator> fieldsIterator = queryBuilder.fields().entrySet().iterator(); + for (Query disjunct : maxQuery) { + Map.Entry field = fieldsIterator.next(); + assertTermOrBoostQuery(disjunct, field.getKey(), queryBuilder.value(), field.getValue()); } } - assertThat(boolQuery.clauses().size(), equalTo(queryBuilder.fields().size())); - Iterator> fieldsIterator = queryBuilder.fields().entrySet().iterator(); - for (BooleanClause booleanClause : boolQuery) { - Map.Entry field = fieldsIterator.next(); - assertTermOrBoostQuery(booleanClause.getQuery(), field.getKey(), queryBuilder.value(), field.getValue()); - } - /** - * TODO: - * Test disabled because we cannot check min should match consistently: - * https://github.com/elastic/elasticsearch/issues/23966 - * - if (queryBuilder.minimumShouldMatch() != null && !boolQuery.isCoordDisabled()) { - assertThat(boolQuery.getMinimumNumberShouldMatch(), greaterThan(0)); - } - * - **/ } else if (queryBuilder.fields().size() == 1) { Map.Entry field = queryBuilder.fields().entrySet().iterator().next(); assertTermOrBoostQuery(query, field.getKey(), queryBuilder.value(), field.getValue()); @@ -261,7 +276,8 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase { @Override protected TypeQueryBuilder doCreateTestQueryBuilder() { - return new TypeQueryBuilder(getRandomType()); + return new TypeQueryBuilder("doc"); } @Override @@ -40,7 +45,11 @@ public class TypeQueryBuilderTests extends AbstractQueryTestCase typeMapping : mappings.entrySet()) { metaData.putMapping(typeMapping.getKey(), typeMapping.getValue()); } diff --git a/core/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/core/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index c13177a6250..9b2200d8be3 100644 --- a/core/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/core/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -238,7 +238,7 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase .source("{}", XContentType.JSON) ); assertTrue(response.isFailed()); - assertNoOpTranslogOperationForDocumentFailure(shards, 1, failureMessage); + assertNoOpTranslogOperationForDocumentFailure(shards, 1, shards.getPrimary().getPrimaryTerm(), failureMessage); shards.assertAllEqual(0); // add some replicas @@ -252,7 +252,7 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase .source("{}", XContentType.JSON) ); assertTrue(response.isFailed()); - assertNoOpTranslogOperationForDocumentFailure(shards, 2, failureMessage); + assertNoOpTranslogOperationForDocumentFailure(shards, 2, shards.getPrimary().getPrimaryTerm(), failureMessage); shards.assertAllEqual(0); } } @@ -323,6 +323,7 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase private static void assertNoOpTranslogOperationForDocumentFailure( Iterable replicationGroup, int expectedOperation, + long expectedPrimaryTerm, String failureMessage) throws IOException { for (IndexShard indexShard : replicationGroup) { try(Translog.View view = indexShard.acquireTranslogView()) { @@ -333,6 +334,7 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase do { assertThat(op.opType(), equalTo(Translog.Operation.Type.NO_OP)); assertThat(op.seqNo(), equalTo(expectedSeqNo)); + assertThat(op.primaryTerm(), equalTo(expectedPrimaryTerm)); assertThat(((Translog.NoOp) op).reason(), containsString(failureMessage)); op = snapshot.next(); expectedSeqNo++; diff --git a/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index ddd69c08495..1c7705d534a 100644 --- a/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -333,7 +333,8 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC replica, (indexShard, node) -> new RecoveryTarget(indexShard, node, recoveryListener, l -> {}) { @Override - public long indexTranslogOperations(final List operations, final int totalTranslogOps) { + public long indexTranslogOperations(final List operations, final int totalTranslogOps) + throws IOException { // index a doc which is not part of the snapshot, but also does not complete on replica replicaEngineFactory.latchIndexers(); threadPool.generic().submit(() -> { @@ -445,7 +446,7 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC } @Override - public long indexTranslogOperations(List operations, int totalTranslogOps) { + public long indexTranslogOperations(List operations, int totalTranslogOps) throws IOException { if (hasBlocked() == false) { blockIfNeeded(RecoveryState.Stage.TRANSLOG); } diff --git a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java index be6b0c3865a..a005d7009ea 100644 --- a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java @@ -93,13 +93,14 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { Query parsedQuery = multiMatchQuery("banon").field("name.first", 2).field("name.last", 3).field("foobar").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext); try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { Query rewrittenQuery = searcher.searcher().rewrite(parsedQuery); - - BooleanQuery.Builder expected = new BooleanQuery.Builder(); - expected.add(new TermQuery(new Term("foobar", "banon")), BooleanClause.Occur.SHOULD); Query tq1 = new BoostQuery(new TermQuery(new Term("name.first", "banon")), 2); Query tq2 = new BoostQuery(new TermQuery(new Term("name.last", "banon")), 3); - expected.add(new DisjunctionMaxQuery(Arrays.asList(tq1, tq2), 0f), BooleanClause.Occur.SHOULD); - assertEquals(expected.build(), rewrittenQuery); + Query expected = new DisjunctionMaxQuery( + Arrays.asList( + new TermQuery(new Term("foobar", "banon")), + new DisjunctionMaxQuery(Arrays.asList(tq1, tq2), 0f) + ), 0f); + assertEquals(expected, rewrittenQuery); } } @@ -110,7 +111,7 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { ft2.setName("bar"); Term[] terms = new Term[] { new Term("foo", "baz"), new Term("bar", "baz") }; float[] boosts = new float[] {2, 3}; - Query expected = BlendedTermQuery.booleanBlendedQuery(terms, boosts); + Query expected = BlendedTermQuery.dismaxBlendedQuery(terms, boosts, 1.0f); Query actual = MultiMatchQuery.blendTerm( indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }), new BytesRef("baz"), null, 1f, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3)); @@ -126,7 +127,7 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { ft2.setBoost(10); Term[] terms = new Term[] { new Term("foo", "baz"), new Term("bar", "baz") }; float[] boosts = new float[] {200, 30}; - Query expected = BlendedTermQuery.booleanBlendedQuery(terms, boosts); + Query expected = BlendedTermQuery.dismaxBlendedQuery(terms, boosts, 1.0f); Query actual = MultiMatchQuery.blendTerm( indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }), new BytesRef("baz"), null, 1f, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3)); @@ -145,7 +146,7 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { ft2.setName("bar"); Term[] terms = new Term[] { new Term("foo", "baz") }; float[] boosts = new float[] {2}; - Query expected = BlendedTermQuery.booleanBlendedQuery(terms, boosts); + Query expected = BlendedTermQuery.dismaxBlendedQuery(terms, boosts, 1.0f); Query actual = MultiMatchQuery.blendTerm( indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }), new BytesRef("baz"), null, 1f, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3)); @@ -164,12 +165,13 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { ft2.setName("bar"); Term[] terms = new Term[] { new Term("foo", "baz") }; float[] boosts = new float[] {2}; - Query expectedClause1 = BlendedTermQuery.booleanBlendedQuery(terms, boosts); - Query expectedClause2 = new BoostQuery(new MatchAllDocsQuery(), 3); - Query expected = new BooleanQuery.Builder() - .add(expectedClause1, Occur.SHOULD) - .add(expectedClause2, Occur.SHOULD) - .build(); + Query expectedDisjunct1 = BlendedTermQuery.dismaxBlendedQuery(terms, boosts, 1.0f); + Query expectedDisjunct2 = new BoostQuery(new MatchAllDocsQuery(), 3); + Query expected = new DisjunctionMaxQuery( + Arrays.asList( + expectedDisjunct2, + expectedDisjunct1 + ), 1.0f); Query actual = MultiMatchQuery.blendTerm( indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }), new BytesRef("baz"), null, 1f, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3)); diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardOperationPermitsTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardOperationPermitsTests.java index 18a250a4282..41dc8f520cc 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardOperationPermitsTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardOperationPermitsTests.java @@ -25,20 +25,30 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.threadpool.ThreadPoolStats; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import java.util.ArrayList; +import java.util.Collections; import java.util.List; +import java.util.Set; +import java.util.concurrent.BrokenBarrierException; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.instanceOf; public class IndexShardOperationPermitsTests extends ESTestCase { @@ -143,7 +153,7 @@ public class IndexShardOperationPermitsTests extends ESTestCase { public void testOperationsDelayedIfBlock() throws ExecutionException, InterruptedException, TimeoutException { PlainActionFuture future = new PlainActionFuture<>(); - try (Releasable releasable = blockAndWait()) { + try (Releasable ignored = blockAndWait()) { permits.acquire(future, ThreadPool.Names.GENERIC, true); assertFalse(future.isDone()); } @@ -184,7 +194,7 @@ public class IndexShardOperationPermitsTests extends ESTestCase { } }; - try (Releasable releasable = blockAndWait()) { + try (Releasable ignored = blockAndWait()) { // we preserve the thread context here so that we have a different context in the call to acquire than the context present // when the releasable is closed try (ThreadContext.StoredContext ignore = context.newStoredContext(false)) { @@ -238,6 +248,202 @@ public class IndexShardOperationPermitsTests extends ESTestCase { }; } + public void testAsyncBlockOperationsOperationWhileBlocked() throws InterruptedException { + final CountDownLatch blockAcquired = new CountDownLatch(1); + final CountDownLatch releaseBlock = new CountDownLatch(1); + final AtomicBoolean blocked = new AtomicBoolean(); + permits.asyncBlockOperations( + 30, + TimeUnit.MINUTES, + () -> { + blocked.set(true); + blockAcquired.countDown(); + releaseBlock.await(); + }, + e -> { + throw new RuntimeException(e); + }); + blockAcquired.await(); + assertTrue(blocked.get()); + + // an operation that is submitted while there is a delay in place should be delayed + final CountDownLatch delayedOperation = new CountDownLatch(1); + final AtomicBoolean delayed = new AtomicBoolean(); + final Thread thread = new Thread(() -> + permits.acquire( + new ActionListener() { + @Override + public void onResponse(Releasable releasable) { + delayed.set(true); + releasable.close(); + delayedOperation.countDown(); + } + + @Override + public void onFailure(Exception e) { + + } + }, + ThreadPool.Names.GENERIC, + false)); + thread.start(); + assertFalse(delayed.get()); + releaseBlock.countDown(); + delayedOperation.await(); + assertTrue(delayed.get()); + thread.join(); + } + + public void testAsyncBlockOperationsOperationBeforeBlocked() throws InterruptedException, BrokenBarrierException { + final CyclicBarrier barrier = new CyclicBarrier(2); + final CountDownLatch operationExecutingLatch = new CountDownLatch(1); + final CountDownLatch firstOperationLatch = new CountDownLatch(1); + final CountDownLatch firstOperationCompleteLatch = new CountDownLatch(1); + final Thread firstOperationThread = + new Thread(controlledAcquire(barrier, operationExecutingLatch, firstOperationLatch, firstOperationCompleteLatch)); + firstOperationThread.start(); + + barrier.await(); + + operationExecutingLatch.await(); + + // now we will delay operations while the first operation is still executing (because it is latched) + final CountDownLatch blockedLatch = new CountDownLatch(1); + final AtomicBoolean onBlocked = new AtomicBoolean(); + permits.asyncBlockOperations( + 30, + TimeUnit.MINUTES, + () -> { + onBlocked.set(true); + blockedLatch.countDown(); + }, e -> { + throw new RuntimeException(e); + }); + + assertFalse(onBlocked.get()); + + // if we submit another operation, it should be delayed + final CountDownLatch secondOperationExecuting = new CountDownLatch(1); + final CountDownLatch secondOperationComplete = new CountDownLatch(1); + final AtomicBoolean secondOperation = new AtomicBoolean(); + final Thread secondOperationThread = new Thread(() -> { + secondOperationExecuting.countDown(); + permits.acquire( + new ActionListener() { + @Override + public void onResponse(Releasable releasable) { + secondOperation.set(true); + releasable.close(); + secondOperationComplete.countDown(); + } + + @Override + public void onFailure(Exception e) { + throw new RuntimeException(e); + } + }, + ThreadPool.Names.GENERIC, + false); + }); + secondOperationThread.start(); + + secondOperationExecuting.await(); + assertFalse(secondOperation.get()); + + firstOperationLatch.countDown(); + firstOperationCompleteLatch.await(); + blockedLatch.await(); + assertTrue(onBlocked.get()); + + secondOperationComplete.await(); + assertTrue(secondOperation.get()); + + firstOperationThread.join(); + secondOperationThread.join(); + } + + public void testAsyncBlockOperationsRace() throws Exception { + // we racily submit operations and a delay, and then ensure that all operations were actually completed + final int operations = scaledRandomIntBetween(1, 64); + final CyclicBarrier barrier = new CyclicBarrier(1 + 1 + operations); + final CountDownLatch operationLatch = new CountDownLatch(1 + operations); + final Set values = Collections.newSetFromMap(new ConcurrentHashMap<>()); + final List threads = new ArrayList<>(); + for (int i = 0; i < operations; i++) { + final int value = i; + final Thread thread = new Thread(() -> { + try { + barrier.await(); + } catch (final BrokenBarrierException | InterruptedException e) { + throw new RuntimeException(e); + } + permits.acquire( + new ActionListener() { + @Override + public void onResponse(Releasable releasable) { + values.add(value); + releasable.close(); + operationLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + + } + }, + ThreadPool.Names.GENERIC, + false); + }); + thread.start(); + threads.add(thread); + } + + final Thread blockingThread = new Thread(() -> { + try { + barrier.await(); + } catch (final BrokenBarrierException | InterruptedException e) { + throw new RuntimeException(e); + } + permits.asyncBlockOperations( + 30, + TimeUnit.MINUTES, + () -> { + values.add(operations); + operationLatch.countDown(); + }, e -> { + throw new RuntimeException(e); + }); + }); + blockingThread.start(); + + barrier.await(); + + operationLatch.await(); + for (final Thread thread : threads) { + thread.join(); + } + blockingThread.join(); + + // check that all operations completed + for (int i = 0; i < operations; i++) { + assertTrue(values.contains(i)); + } + assertTrue(values.contains(operations)); + /* + * The block operation is executed on another thread and the operations can have completed before this thread has returned all the + * permits to the semaphore. We wait here until all generic threads are idle as an indication that all permits have been returned to + * the semaphore. + */ + awaitBusy(() -> { + for (final ThreadPoolStats.Stats stats : threadPool.stats()) { + if (ThreadPool.Names.GENERIC.equals(stats.getName())) { + return stats.getActive() == 0; + } + } + return false; + }); + } + public void testActiveOperationsCount() throws ExecutionException, InterruptedException { PlainActionFuture future1 = new PlainActionFuture<>(); permits.acquire(future1, ThreadPool.Names.GENERIC, true); @@ -267,4 +473,136 @@ public class IndexShardOperationPermitsTests extends ESTestCase { future3.get().close(); assertThat(permits.getActiveOperationsCount(), equalTo(0)); } + + public void testAsyncBlockOperationsOnFailure() throws InterruptedException { + final AtomicReference reference = new AtomicReference<>(); + final CountDownLatch onFailureLatch = new CountDownLatch(1); + permits.asyncBlockOperations( + 10, + TimeUnit.MINUTES, + () -> { + throw new RuntimeException("simulated"); + }, + e -> { + reference.set(e); + onFailureLatch.countDown(); + }); + onFailureLatch.await(); + assertThat(reference.get(), instanceOf(RuntimeException.class)); + assertThat(reference.get(), hasToString(containsString("simulated"))); + } + + public void testTimeout() throws BrokenBarrierException, InterruptedException { + final CyclicBarrier barrier = new CyclicBarrier(2); + final CountDownLatch operationExecutingLatch = new CountDownLatch(1); + final CountDownLatch operationLatch = new CountDownLatch(1); + final CountDownLatch operationCompleteLatch = new CountDownLatch(1); + + final Thread thread = new Thread(controlledAcquire(barrier, operationExecutingLatch, operationLatch, operationCompleteLatch)); + thread.start(); + + barrier.await(); + + operationExecutingLatch.await(); + + { + final TimeoutException e = + expectThrows(TimeoutException.class, () -> permits.blockOperations(1, TimeUnit.MILLISECONDS, () -> {})); + assertThat(e, hasToString(containsString("timeout while blocking operations"))); + } + + { + final AtomicReference reference = new AtomicReference<>(); + final CountDownLatch onFailureLatch = new CountDownLatch(1); + permits.asyncBlockOperations( + 1, + TimeUnit.MILLISECONDS, + () -> {}, + e -> { + reference.set(e); + onFailureLatch.countDown(); + }); + onFailureLatch.await(); + assertThat(reference.get(), hasToString(containsString("timeout while blocking operations"))); + } + + operationLatch.countDown(); + + operationCompleteLatch.await(); + + thread.join(); + } + + public void testNoPermitsRemaining() throws InterruptedException { + permits.semaphore.tryAcquire(IndexShardOperationPermits.TOTAL_PERMITS, 1, TimeUnit.SECONDS); + final IllegalStateException e = expectThrows( + IllegalStateException.class, + () -> this.permits.acquire( + new ActionListener() { + @Override + public void onResponse(Releasable releasable) { + assert false; + } + + @Override + public void onFailure(Exception e) { + assert false; + } + }, + ThreadPool.Names.GENERIC, + false)); + assertThat(e, hasToString(containsString("failed to obtain permit but operations are not delayed"))); + permits.semaphore.release(IndexShardOperationPermits.TOTAL_PERMITS); + } + + /** + * Returns an operation that acquires a permit and synchronizes in the following manner: + *
    + *
  • waits on the {@code barrier} before acquiring a permit
  • + *
  • counts down the {@code operationExecutingLatch} when it acquires the permit
  • + *
  • waits on the {@code operationLatch} before releasing the permit
  • + *
  • counts down the {@code operationCompleteLatch} after releasing the permit
  • + *
+ * + * @param barrier the barrier to wait on + * @param operationExecutingLatch the latch to countdown after acquiring the permit + * @param operationLatch the latch to wait on before releasing the permit + * @param operationCompleteLatch the latch to countdown after releasing the permit + * @return a controllable runnable that acquires a permit + */ + private Runnable controlledAcquire( + final CyclicBarrier barrier, + final CountDownLatch operationExecutingLatch, + final CountDownLatch operationLatch, + final CountDownLatch operationCompleteLatch) { + return () -> { + try { + barrier.await(); + } catch (final BrokenBarrierException | InterruptedException e) { + throw new RuntimeException(e); + } + permits.acquire( + new ActionListener() { + @Override + public void onResponse(Releasable releasable) { + operationExecutingLatch.countDown(); + try { + operationLatch.await(); + } catch (final InterruptedException e) { + throw new RuntimeException(e); + } + releasable.close(); + operationCompleteLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + throw new RuntimeException(e); + } + }, + ThreadPool.Names.GENERIC, + false); + }; + } + } diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 6dce3dab3a9..5072e7a3b89 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -102,12 +102,14 @@ import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; +import java.nio.charset.Charset; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -131,16 +133,20 @@ import static java.util.Collections.emptySet; import static org.elasticsearch.common.lucene.Lucene.cleanLuceneIndex; import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.VersionType.EXTERNAL; import static org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY; +import static org.elasticsearch.index.engine.Engine.Operation.Origin.REPLICA; import static org.elasticsearch.repositories.RepositoryData.EMPTY_REPO_GEN; import static org.elasticsearch.test.hamcrest.RegexMatcher.matches; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; /** @@ -280,6 +286,178 @@ public class IndexShardTests extends IndexShardTestCase { } } + public void testPrimaryPromotionDelaysOperations() throws IOException, BrokenBarrierException, InterruptedException { + final IndexShard indexShard = newStartedShard(false); + + final int operations = scaledRandomIntBetween(1, 64); + final CyclicBarrier barrier = new CyclicBarrier(1 + operations); + final CountDownLatch latch = new CountDownLatch(operations); + final CountDownLatch operationLatch = new CountDownLatch(1); + final List threads = new ArrayList<>(); + for (int i = 0; i < operations; i++) { + final Thread thread = new Thread(() -> { + try { + barrier.await(); + } catch (final BrokenBarrierException | InterruptedException e) { + throw new RuntimeException(e); + } + indexShard.acquireReplicaOperationPermit( + indexShard.getPrimaryTerm(), + new ActionListener() { + @Override + public void onResponse(Releasable releasable) { + latch.countDown(); + try { + operationLatch.await(); + } catch (final InterruptedException e) { + throw new RuntimeException(e); + } + releasable.close(); + } + + @Override + public void onFailure(Exception e) { + throw new RuntimeException(e); + } + }, + ThreadPool.Names.INDEX); + }); + thread.start(); + threads.add(thread); + } + + barrier.await(); + latch.await(); + + // promote the replica + final ShardRouting replicaRouting = indexShard.routingEntry(); + final ShardRouting primaryRouting = + TestShardRouting.newShardRouting( + replicaRouting.shardId(), + replicaRouting.currentNodeId(), + null, + true, + ShardRoutingState.STARTED, + replicaRouting.allocationId()); + indexShard.updateRoutingEntry(primaryRouting); + indexShard.updatePrimaryTerm(indexShard.getPrimaryTerm() + 1); + + final int delayedOperations = scaledRandomIntBetween(1, 64); + final CyclicBarrier delayedOperationsBarrier = new CyclicBarrier(1 + delayedOperations); + final CountDownLatch delayedOperationsLatch = new CountDownLatch(delayedOperations); + final AtomicLong counter = new AtomicLong(); + final List delayedThreads = new ArrayList<>(); + for (int i = 0; i < delayedOperations; i++) { + final Thread thread = new Thread(() -> { + try { + delayedOperationsBarrier.await(); + } catch (final BrokenBarrierException | InterruptedException e) { + throw new RuntimeException(e); + } + indexShard.acquirePrimaryOperationPermit( + new ActionListener() { + @Override + public void onResponse(Releasable releasable) { + counter.incrementAndGet(); + releasable.close(); + delayedOperationsLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + throw new RuntimeException(e); + } + }, + ThreadPool.Names.INDEX); + }); + thread.start(); + delayedThreads.add(thread); + } + + delayedOperationsBarrier.await(); + + assertThat(counter.get(), equalTo(0L)); + + operationLatch.countDown(); + for (final Thread thread : threads) { + thread.join(); + } + + delayedOperationsLatch.await(); + + assertThat(counter.get(), equalTo((long) delayedOperations)); + + for (final Thread thread : delayedThreads) { + thread.join(); + } + + closeShards(indexShard); + } + + public void testPrimaryFillsSeqNoGapsOnPromotion() throws Exception { + final IndexShard indexShard = newStartedShard(false); + + // most of the time this is large enough that most of the time there will be at least one gap + final int operations = 1024 - scaledRandomIntBetween(0, 1024); + int max = Math.toIntExact(SequenceNumbersService.NO_OPS_PERFORMED); + boolean gap = false; + for (int i = 0; i < operations; i++) { + final String id = Integer.toString(i); + final ParsedDocument doc = testParsedDocument(id, "test", null, new ParseContext.Document(), new BytesArray("{}"), null); + if (!rarely()) { + final Term uid = new Term("_id", doc.id()); + final Engine.Index index = + new Engine.Index(uid, doc, i, indexShard.getPrimaryTerm(), 1, EXTERNAL, REPLICA, System.nanoTime(), -1, false); + indexShard.index(index); + max = i; + } else { + gap = true; + } + } + + final int maxSeqNo = max; + if (gap) { + assertThat(indexShard.getLocalCheckpoint(), not(equalTo(maxSeqNo))); + } + + // promote the replica + final ShardRouting replicaRouting = indexShard.routingEntry(); + final ShardRouting primaryRouting = + TestShardRouting.newShardRouting( + replicaRouting.shardId(), + replicaRouting.currentNodeId(), + null, + true, + ShardRoutingState.STARTED, + replicaRouting.allocationId()); + indexShard.updateRoutingEntry(primaryRouting); + indexShard.updatePrimaryTerm(indexShard.getPrimaryTerm() + 1); + + /* + * This operation completing means that the delay operation executed as part of increasing the primary term has completed and the + * gaps are filled. + */ + final CountDownLatch latch = new CountDownLatch(1); + indexShard.acquirePrimaryOperationPermit( + new ActionListener() { + @Override + public void onResponse(Releasable releasable) { + releasable.close(); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + throw new RuntimeException(e); + } + }, + ThreadPool.Names.GENERIC); + + latch.await(); + assertThat(indexShard.getLocalCheckpoint(), equalTo((long) maxSeqNo)); + closeShards(indexShard); + } + public void testOperationPermitsOnPrimaryShards() throws InterruptedException, ExecutionException, IOException { final ShardId shardId = new ShardId("test", "_na_", 0); final IndexShard indexShard; @@ -705,6 +883,26 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(shard); } + public void testRefreshMetric() throws IOException { + IndexShard shard = newStartedShard(); + assertThat(shard.refreshStats().getTotal(), equalTo(2L)); // one refresh on end of recovery, one on starting shard + long initialTotalTime = shard.refreshStats().getTotalTimeInMillis(); + // check time advances + for (int i = 1; shard.refreshStats().getTotalTimeInMillis() == initialTotalTime; i++) { + indexDoc(shard, "test", "test"); + assertThat(shard.refreshStats().getTotal(), equalTo(2L + i - 1)); + shard.refresh("test"); + assertThat(shard.refreshStats().getTotal(), equalTo(2L + i)); + assertThat(shard.refreshStats().getTotalTimeInMillis(), greaterThanOrEqualTo(initialTotalTime)); + } + long refreshCount = shard.refreshStats().getTotal(); + indexDoc(shard, "test", "test"); + try (Engine.GetResult ignored = shard.get(new Engine.Get(true, "test", "test", new Term("_id", "test")))) { + assertThat(shard.refreshStats().getTotal(), equalTo(refreshCount + 1)); + } + closeShards(shard); + } + private ParsedDocument testParsedDocument(String id, String type, String routing, ParseContext.Document document, BytesReference source, Mapping mappingUpdate) { Field idField = new Field("_id", id, IdFieldMapper.Defaults.FIELD_TYPE); @@ -1064,7 +1262,7 @@ public class IndexShardTests extends IndexShardTestCase { test = otherShard.prepareIndexOnReplica( SourceToParse.source(shard.shardId().getIndexName(), test.type(), test.id(), test.source(), XContentType.JSON), - 1, 1, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + 1, 1, 1, EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); otherShard.index(test); final ShardRouting primaryShardRouting = shard.routingEntry(); @@ -1083,7 +1281,7 @@ public class IndexShardTests extends IndexShardTestCase { while((operation = snapshot.next()) != null) { if (operation.opType() == Translog.Operation.Type.NO_OP) { numNoops++; - assertEquals(1, operation.primaryTerm()); + assertEquals(newShard.getPrimaryTerm(), operation.primaryTerm()); assertEquals(0, operation.seqNo()); } } @@ -1431,7 +1629,7 @@ public class IndexShardTests extends IndexShardTestCase { new RecoveryTarget(shard, discoveryNode, recoveryListener, aLong -> { }) { @Override - public long indexTranslogOperations(List operations, int totalTranslogOps) { + public long indexTranslogOperations(List operations, int totalTranslogOps) throws IOException { final long localCheckpoint = super.indexTranslogOperations(operations, totalTranslogOps); assertFalse(replica.getTranslog().syncNeeded()); return localCheckpoint; @@ -1441,6 +1639,112 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(primary, replica); } + public void testRecoverFromTranslog() throws IOException { + Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .build(); + IndexMetaData metaData = IndexMetaData.builder("test") + .putMapping("test", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .settings(settings) + .primaryTerm(0, 1).build(); + IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); + List operations = new ArrayList<>(); + int numTotalEntries = randomIntBetween(0, 10); + int numCorruptEntries = 0; + for (int i = 0; i < numTotalEntries; i++) { + if (randomBoolean()) { + operations.add(new Translog.Index("test", "1", 0, 1, VersionType.INTERNAL, + "{\"foo\" : \"bar\"}".getBytes(Charset.forName("UTF-8")), null, null, -1)); + } else { + // corrupt entry + operations.add(new Translog.Index("test", "2", 1, 1, VersionType.INTERNAL, + "{\"foo\" : \"bar}".getBytes(Charset.forName("UTF-8")), null, null, -1)); + numCorruptEntries++; + } + } + + Iterator iterator = operations.iterator(); + Translog.Snapshot snapshot = new Translog.Snapshot() { + + @Override + public int totalOperations() { + return numTotalEntries; + } + + @Override + public Translog.Operation next() throws IOException { + return iterator.hasNext() ? iterator.next() : null; + } + }; + primary.markAsRecovering("store", new RecoveryState(primary.routingEntry(), + getFakeDiscoNode(primary.routingEntry().currentNodeId()), + null)); + primary.recoverFromStore(); + + primary.runTranslogRecovery(primary.getEngine(), snapshot); + assertThat(primary.recoveryState().getTranslog().totalOperationsOnStart(), equalTo(numTotalEntries)); + assertThat(primary.recoveryState().getTranslog().totalOperations(), equalTo(numTotalEntries)); + assertThat(primary.recoveryState().getTranslog().recoveredOperations(), equalTo(numTotalEntries - numCorruptEntries)); + + closeShards(primary); + } + + public void testTranslogOpToEngineOpConverter() throws IOException { + Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .build(); + IndexMetaData metaData = IndexMetaData.builder("test") + .putMapping("test", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") + .settings(settings) + .primaryTerm(0, 1).build(); + IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); + TranslogOpToEngineOpConverter converter = new TranslogOpToEngineOpConverter(primary.shardId(), primary.mapperService()); + + Engine.Operation.Origin origin = randomFrom(Engine.Operation.Origin.values()); + // convert index op + Translog.Index translogIndexOp = new Translog.Index(randomAlphaOfLength(10), randomAlphaOfLength(10), randomNonNegativeLong(), + randomNonNegativeLong(), randomFrom(VersionType.values()), "{\"foo\" : \"bar\"}".getBytes(Charset.forName("UTF-8")), + randomAlphaOfLength(5), randomAlphaOfLength(5), randomLong()); + Engine.Index engineIndexOp = (Engine.Index) converter.convertToEngineOp(translogIndexOp, origin); + assertEquals(engineIndexOp.origin(), origin); + assertEquals(engineIndexOp.primaryTerm(), translogIndexOp.primaryTerm()); + assertEquals(engineIndexOp.seqNo(), translogIndexOp.seqNo()); + assertEquals(engineIndexOp.version(), translogIndexOp.version()); + assertEquals(engineIndexOp.versionType(), translogIndexOp.versionType().versionTypeForReplicationAndRecovery()); + assertEquals(engineIndexOp.id(), translogIndexOp.id()); + assertEquals(engineIndexOp.type(), translogIndexOp.type()); + assertEquals(engineIndexOp.getAutoGeneratedIdTimestamp(), translogIndexOp.getAutoGeneratedIdTimestamp()); + assertEquals(engineIndexOp.parent(), translogIndexOp.parent()); + assertEquals(engineIndexOp.routing(), translogIndexOp.routing()); + assertEquals(engineIndexOp.source(), translogIndexOp.source()); + + // convert delete op + Translog.Delete translogDeleteOp = new Translog.Delete(randomAlphaOfLength(5), randomAlphaOfLength(5), + new Term(randomAlphaOfLength(5), randomAlphaOfLength(5)), randomNonNegativeLong(), randomNonNegativeLong(), + randomNonNegativeLong(), randomFrom(VersionType.values())); + Engine.Delete engineDeleteOp = (Engine.Delete) converter.convertToEngineOp(translogDeleteOp, origin); + assertEquals(engineDeleteOp.origin(), origin); + assertEquals(engineDeleteOp.primaryTerm(), translogDeleteOp.primaryTerm()); + assertEquals(engineDeleteOp.seqNo(), translogDeleteOp.seqNo()); + assertEquals(engineDeleteOp.version(), translogDeleteOp.version()); + assertEquals(engineDeleteOp.versionType(), translogDeleteOp.versionType().versionTypeForReplicationAndRecovery()); + assertEquals(engineDeleteOp.id(), translogDeleteOp.id()); + assertEquals(engineDeleteOp.type(), translogDeleteOp.type()); + assertEquals(engineDeleteOp.uid(), translogDeleteOp.uid()); + + // convert noop + Translog.NoOp translogNoOp = new Translog.NoOp(randomNonNegativeLong(), randomNonNegativeLong(), randomAlphaOfLength(5)); + Engine.NoOp engineNoOp = (Engine.NoOp) converter.convertToEngineOp(translogNoOp, origin); + assertEquals(engineNoOp.origin(), origin); + assertEquals(engineNoOp.primaryTerm(), translogNoOp.primaryTerm()); + assertEquals(engineNoOp.seqNo(), translogNoOp.seqNo()); + assertEquals(engineNoOp.reason(), translogNoOp.reason()); + + closeShards(primary); + } + public void testShardActiveDuringInternalRecovery() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "type", "0"); @@ -1488,7 +1792,7 @@ public class IndexShardTests extends IndexShardTestCase { } @Override - public long indexTranslogOperations(List operations, int totalTranslogOps) { + public long indexTranslogOperations(List operations, int totalTranslogOps) throws IOException { final long localCheckpoint = super.indexTranslogOperations(operations, totalTranslogOps); // Shard should now be active since we did recover: assertTrue(replica.isActive()); diff --git a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index f99de847238..6b5bd57aed9 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -41,7 +41,6 @@ import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.engine.InternalEngine; -import org.elasticsearch.index.engine.InternalEngineTests.TranslogHandler; import org.elasticsearch.index.fieldvisitor.SingleFieldsVisitor; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.ParseContext.Document; @@ -64,6 +63,7 @@ import org.junit.Before; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.concurrent.atomic.AtomicBoolean; @@ -108,19 +108,18 @@ public class RefreshListenersTests extends ESTestCase { store = new Store(shardId, indexSettings, directoryService, new DummyShardLock(shardId)); IndexWriterConfig iwc = newIndexWriterConfig(); TranslogConfig translogConfig = new TranslogConfig(shardId, createTempDir("translog"), indexSettings, - BigArrays.NON_RECYCLING_INSTANCE); + BigArrays.NON_RECYCLING_INSTANCE); Engine.EventListener eventListener = new Engine.EventListener() { @Override public void onFailedEngine(String reason, @Nullable Exception e) { // we don't need to notify anybody in this test } }; - TranslogHandler translogHandler = new TranslogHandler(xContentRegistry(), shardId.getIndexName(), Settings.EMPTY, logger); EngineConfig config = new EngineConfig(EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG, shardId, threadPool, indexSettings, null, store, newMergePolicy(), iwc.getAnalyzer(), - iwc.getSimilarity(), new CodecService(null, logger), eventListener, translogHandler, + iwc.getSimilarity(), new CodecService(null, logger), eventListener, IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, - TimeValue.timeValueMinutes(5), listeners, null); + TimeValue.timeValueMinutes(5), Collections.singletonList(listeners), null, null); engine = new InternalEngine(config); listeners.setTranslog(engine.getTranslog()); } @@ -298,7 +297,7 @@ public class RefreshListenersTests extends ESTestCase { listener.assertNoError(); Engine.Get get = new Engine.Get(false, "test", threadId, new Term(IdFieldMapper.NAME, threadId)); - try (Engine.GetResult getResult = engine.get(get)) { + try (Engine.GetResult getResult = engine.get(get, engine::acquireSearcher)) { assertTrue("document not found", getResult.exists()); assertEquals(iteration, getResult.version()); SingleFieldsVisitor visitor = new SingleFieldsVisitor("test"); diff --git a/core/src/test/java/org/elasticsearch/index/shard/SearchOperationListenerTests.java b/core/src/test/java/org/elasticsearch/index/shard/SearchOperationListenerTests.java index fafdbe6755b..28bab8da0fd 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/SearchOperationListenerTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/SearchOperationListenerTests.java @@ -21,6 +21,8 @@ package org.elasticsearch.index.shard; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TestSearchContext; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequest.Empty; import java.lang.reflect.Proxy; import java.util.ArrayList; @@ -112,7 +114,7 @@ public class SearchOperationListenerTests extends ESTestCase { } @Override - public void validateSearchContext(SearchContext context) { + public void validateSearchContext(SearchContext context, TransportRequest request) { assertNotNull(context); validateSearchContext.incrementAndGet(); } @@ -267,9 +269,10 @@ public class SearchOperationListenerTests extends ESTestCase { assertEquals(0, validateSearchContext.get()); if (throwingListeners == 0) { - compositeListener.validateSearchContext(ctx); + compositeListener.validateSearchContext(ctx, Empty.INSTANCE); } else { - RuntimeException expected = expectThrows(RuntimeException.class, () -> compositeListener.validateSearchContext(ctx)); + RuntimeException expected = + expectThrows(RuntimeException.class, () -> compositeListener.validateSearchContext(ctx, Empty.INSTANCE)); assertNull(expected.getMessage()); assertEquals(throwingListeners - 1, expected.getSuppressed().length); if (throwingListeners > 1) { diff --git a/core/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java b/core/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java index c047235ada4..39fdd40162d 100644 --- a/core/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java @@ -73,7 +73,8 @@ public class TermVectorsServiceTests extends ESSingleNodeTestCase { TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request, longs.iterator()::next); assertThat(response, notNullValue()); - assertThat(response.getTookInMillis(), equalTo(TimeUnit.NANOSECONDS.toMillis(longs.get(1) - longs.get(0)))); + assertThat(response.getTook().getMillis(), + equalTo(TimeUnit.NANOSECONDS.toMillis(longs.get(1) - longs.get(0)))); } public void testDocFreqs() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 5f75610e2e8..4fe97919c38 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -104,12 +104,15 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; import java.util.stream.LongStream; +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomLongBetween; import static org.elasticsearch.common.util.BigArrays.NON_RECYCLING_INSTANCE; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasToString; +import static org.hamcrest.Matchers.lessThanOrEqualTo; @LuceneTestCase.SuppressFileSystems("ExtrasFS") public class TranslogTests extends ESTestCase { @@ -126,7 +129,8 @@ public class TranslogTests extends ESTestCase { if (translog.isOpen()) { if (translog.currentFileGeneration() > 1) { - translog.commit(translog.currentFileGeneration()); + markCurrentGenAsCommitted(translog); + translog.trimUnreferencedReaders(); assertFileDeleted(translog, translog.currentFileGeneration() - 1); } translog.close(); @@ -136,6 +140,30 @@ public class TranslogTests extends ESTestCase { } + protected Translog createTranslog(TranslogConfig config, String translogUUID) throws IOException { + return new Translog(config, translogUUID, new TranslogDeletionPolicy(), () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); + } + + private void markCurrentGenAsCommitted(Translog translog) throws IOException { + commit(translog, translog.currentFileGeneration()); + } + + private void rollAndCommit(Translog translog) throws IOException { + translog.rollGeneration(); + commit(translog, translog.currentFileGeneration()); + } + + private void commit(Translog translog, long genToCommit) throws IOException { + final TranslogDeletionPolicy deletionPolicy = translog.getDeletionPolicy(); + deletionPolicy.setMinTranslogGenerationForRecovery(genToCommit); + translog.trimUnreferencedReaders(); + if (deletionPolicy.pendingViewsCount() == 0) { + assertThat(deletionPolicy.minTranslogGenRequired(), equalTo(genToCommit)); + } + // we may have some views closed concurrently causing the deletion policy to increase it's minTranslogGenRequired + assertThat(translog.getMinFileGeneration(), lessThanOrEqualTo(deletionPolicy.minTranslogGenRequired())); + } + @Override @Before public void setUp() throws Exception { @@ -149,7 +177,7 @@ public class TranslogTests extends ESTestCase { @After public void tearDown() throws Exception { try { - assertEquals("there are still open views", 0, translog.getNumOpenViews()); + assertEquals("there are still open views", 0, translog.getDeletionPolicy().pendingViewsCount()); translog.close(); } finally { super.tearDown(); @@ -158,7 +186,7 @@ public class TranslogTests extends ESTestCase { private Translog create(Path path) throws IOException { globalCheckpoint = new AtomicLong(SequenceNumbersService.UNASSIGNED_SEQ_NO); - return new Translog(getTranslogConfig(path), null, () -> globalCheckpoint.get()); + return new Translog(getTranslogConfig(path), null, new TranslogDeletionPolicy(), () -> globalCheckpoint.get()); } private TranslogConfig getTranslogConfig(final Path path) { @@ -182,7 +210,7 @@ public class TranslogTests extends ESTestCase { return new TranslogConfig(shardId, path, indexSettings, NON_RECYCLING_INSTANCE, bufferSize); } - protected void addToTranslogAndList(Translog translog, ArrayList list, Translog.Operation op) throws IOException { + private void addToTranslogAndList(Translog translog, ArrayList list, Translog.Operation op) throws IOException { list.add(op); translog.add(op); } @@ -282,14 +310,14 @@ public class TranslogTests extends ESTestCase { assertNull(snapshot.next()); long firstId = translog.currentFileGeneration(); - translog.prepareCommit(); + translog.rollGeneration(); assertThat(translog.currentFileGeneration(), Matchers.not(equalTo(firstId))); snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.equalsTo(ops)); assertThat(snapshot.totalOperations(), equalTo(ops.size())); - translog.commit(translog.currentFileGeneration()); + markCurrentGenAsCommitted(translog); snapshot = translog.newSnapshot(); assertThat(snapshot, SnapshotMatchers.size(0)); assertThat(snapshot.totalOperations(), equalTo(0)); @@ -328,25 +356,25 @@ public class TranslogTests extends ESTestCase { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(2L)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(139L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(146L)); } translog.add(new Translog.Delete("test", "3", 2, newUid("3"))); { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(3L)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(181L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(195L)); } translog.add(new Translog.NoOp(3, 1, randomAlphaOfLength(16))); { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(4L)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(223L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(237L)); } - final long expectedSizeInBytes = 266L; - translog.prepareCommit(); + final long expectedSizeInBytes = 280L; + translog.rollGeneration(); { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(4L)); @@ -373,7 +401,7 @@ public class TranslogTests extends ESTestCase { } } - translog.commit(translog.currentFileGeneration()); + markCurrentGenAsCommitted(translog); { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(0L)); @@ -441,12 +469,12 @@ public class TranslogTests extends ESTestCase { assertThat(snapshot1, SnapshotMatchers.equalsTo(ops.get(0))); - translog.prepareCommit(); + translog.rollGeneration(); addToTranslogAndList(translog, ops, new Translog.Index("test", "3", 2, new byte[]{3})); try (Translog.View view = translog.newView()) { Translog.Snapshot snapshot2 = translog.newSnapshot(); - translog.commit(translog.currentFileGeneration()); + markCurrentGenAsCommitted(translog); assertThat(snapshot2, SnapshotMatchers.equalsTo(ops)); assertThat(snapshot2.totalOperations(), equalTo(ops.size())); } @@ -465,7 +493,7 @@ public class TranslogTests extends ESTestCase { } public void assertFileIsPresent(Translog translog, long id) { - if (Files.exists(translogDir.resolve(Translog.getFilename(id)))) { + if (Files.exists(translog.location().resolve(Translog.getFilename(id)))) { return; } fail(Translog.getFilename(id) + " is not present in any location: " + translog.location()); @@ -475,6 +503,15 @@ public class TranslogTests extends ESTestCase { assertFalse("translog [" + id + "] still exists", Files.exists(translog.location().resolve(Translog.getFilename(id)))); } + private void assertFilePresences(Translog translog) { + for (long gen = translog.getMinFileGeneration(); gen < translog.currentFileGeneration(); gen++) { + assertFileIsPresent(translog, gen); + } + for (long gen = 1; gen < translog.getMinFileGeneration(); gen++) { + assertFileDeleted(translog, gen); + } + } + static class LocationOperation implements Comparable { final Translog.Operation operation; final Translog.Location location; @@ -517,7 +554,7 @@ public class TranslogTests extends ESTestCase { threads[i].join(60 * 1000); } - List collect = writtenOperations.stream().collect(Collectors.toList()); + List collect = new ArrayList<>(writtenOperations); Collections.sort(collect); Translog.Snapshot snapshot = translog.newSnapshot(); for (LocationOperation locationOperation : collect) { @@ -581,7 +618,7 @@ public class TranslogTests extends ESTestCase { corruptionsCaught.incrementAndGet(); } } - expectThrows(TranslogCorruptedException.class, () -> snapshot.next()); + expectThrows(TranslogCorruptedException.class, snapshot::next); assertThat("at least one corruption was caused and caught", corruptionsCaught.get(), greaterThanOrEqualTo(1)); } @@ -725,8 +762,8 @@ public class TranslogTests extends ESTestCase { if (id % flushEveryOps == 0) { synchronized (flushMutex) { // we need not do this concurrently as we need to make sure that the generation - // we're committing - translog.currentFileGeneration() - is still present when we're committing - translog.commit(translog.currentFileGeneration()); + // we're committing - is still present when we're committing + rollAndCommit(translog); } } if (id % 7 == 0) { @@ -872,7 +909,7 @@ public class TranslogTests extends ESTestCase { assertTrue("we only synced a previous operation yet", translog.syncNeeded()); } if (rarely()) { - translog.commit(translog.currentFileGeneration()); + rollAndCommit(translog); assertFalse("location is from a previous translog - already synced", translog.ensureSynced(location)); // not syncing now assertFalse("no sync needed since no operations in current translog", translog.syncNeeded()); } @@ -892,7 +929,7 @@ public class TranslogTests extends ESTestCase { ArrayList locations = new ArrayList<>(); for (int op = 0; op < translogOperations; op++) { if (rarely()) { - translog.commit(translog.currentFileGeneration()); // do this first so that there is at least one pending tlog entry + rollAndCommit(translog); // do this first so that there is at least one pending tlog entry } final Translog.Location location = translog.add(new Translog.Index("test", "" + op, op, Integer.toString(++count).getBytes(Charset.forName("UTF-8")))); @@ -904,7 +941,7 @@ public class TranslogTests extends ESTestCase { assertTrue("this operation has not been synced", translog.ensureSynced(locations.stream())); assertFalse("the last call to ensureSycned synced all previous ops", translog.syncNeeded()); // we are the last location so everything should be synced } else if (rarely()) { - translog.commit(translog.currentFileGeneration()); + rollAndCommit(translog); assertFalse("location is from a previous translog - already synced", translog.ensureSynced(locations.stream())); // not syncing now assertFalse("no sync needed since no operations in current translog", translog.syncNeeded()); } else { @@ -925,7 +962,7 @@ public class TranslogTests extends ESTestCase { locations.add( translog.add(new Translog.Index("test", "" + op, op, Integer.toString(++count).getBytes(Charset.forName("UTF-8"))))); if (rarely() && translogOperations > op + 1) { - translog.commit(translog.currentFileGeneration()); + rollAndCommit(translog); } } Collections.shuffle(locations, random()); @@ -996,7 +1033,7 @@ public class TranslogTests extends ESTestCase { } public void testTranslogWriter() throws IOException { - final TranslogWriter writer = translog.createWriter(0); + final TranslogWriter writer = translog.createWriter(translog.currentFileGeneration() + 1); final int numOps = randomIntBetween(8, 128); byte[] bytes = new byte[4]; ByteArrayDataOutput out = new ByteArrayDataOutput(bytes); @@ -1056,7 +1093,7 @@ public class TranslogTests extends ESTestCase { } public void testCloseIntoReader() throws IOException { - try (TranslogWriter writer = translog.createWriter(0)) { + try (TranslogWriter writer = translog.createWriter(translog.currentFileGeneration() + 1)) { final int numOps = randomIntBetween(8, 128); final byte[] bytes = new byte[4]; final ByteArrayDataOutput out = new ByteArrayDataOutput(bytes); @@ -1091,7 +1128,7 @@ public class TranslogTests extends ESTestCase { locations.add(translog.add(new Translog.Index("test", "" + op, op, Integer.toString(op).getBytes(Charset.forName("UTF-8"))))); final boolean commit = commitOften ? frequently() : rarely(); if (commit && op < translogOperations - 1) { - translog.commit(translog.currentFileGeneration()); + rollAndCommit(translog); minUncommittedOp = op + 1; translogGeneration = translog.getGeneration(); } @@ -1100,14 +1137,15 @@ public class TranslogTests extends ESTestCase { TranslogConfig config = translog.getConfig(); translog.close(); - translog = new Translog(config, translogGeneration,() -> SequenceNumbersService.UNASSIGNED_SEQ_NO); if (translogGeneration == null) { + translog = createTranslog(config, null); assertEquals(0, translog.stats().estimatedNumberOfOperations()); assertEquals(1, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); Translog.Snapshot snapshot = translog.newSnapshot(); assertNull(snapshot.next()); } else { + translog = new Translog(config, translogGeneration.translogUUID, translog.getDeletionPolicy(), () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); assertEquals("lastCommitted must be 1 less than current", translogGeneration.translogFileGeneration + 1, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); Translog.Snapshot snapshot = translog.newSnapshot(); @@ -1130,7 +1168,7 @@ public class TranslogTests extends ESTestCase { locations.add(translog.add(new Translog.Index("test", "" + op, op, Integer.toString(op).getBytes(Charset.forName("UTF-8"))))); if (op == prepareOp) { translogGeneration = translog.getGeneration(); - translog.prepareCommit(); + translog.rollGeneration(); assertEquals("expected this to be the first commit", 1L, translogGeneration.translogFileGeneration); assertNotNull(translogGeneration.translogUUID); } @@ -1141,7 +1179,9 @@ public class TranslogTests extends ESTestCase { // we intentionally don't close the tlog that is in the prepareCommit stage since we try to recovery the uncommitted // translog here as well. TranslogConfig config = translog.getConfig(); - try (Translog translog = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { + final String translogUUID = translog.getTranslogUUID(); + final TranslogDeletionPolicy deletionPolicy = translog.getDeletionPolicy(); + try (Translog translog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { assertNotNull(translogGeneration); assertEquals("lastCommitted must be 2 less than current - we never finished the commit", translogGeneration.translogFileGeneration + 2, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); @@ -1154,7 +1194,7 @@ public class TranslogTests extends ESTestCase { } } if (randomBoolean()) { // recover twice - try (Translog translog = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { + try (Translog translog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { assertNotNull(translogGeneration); assertEquals("lastCommitted must be 3 less than current - we never finished the commit and run recovery twice", translogGeneration.translogFileGeneration + 3, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); @@ -1180,7 +1220,7 @@ public class TranslogTests extends ESTestCase { locations.add(translog.add(new Translog.Index("test", "" + op, op, Integer.toString(op).getBytes(Charset.forName("UTF-8"))))); if (op == prepareOp) { translogGeneration = translog.getGeneration(); - translog.prepareCommit(); + translog.rollGeneration(); assertEquals("expected this to be the first commit", 1L, translogGeneration.translogFileGeneration); assertNotNull(translogGeneration.translogUUID); } @@ -1195,7 +1235,9 @@ public class TranslogTests extends ESTestCase { Checkpoint read = Checkpoint.read(ckp); Files.copy(ckp, config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation))); - try (Translog translog = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { + final String translogUUID = translog.getTranslogUUID(); + final TranslogDeletionPolicy deletionPolicy = translog.getDeletionPolicy(); + try (Translog translog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { assertNotNull(translogGeneration); assertEquals("lastCommitted must be 2 less than current - we never finished the commit", translogGeneration.translogFileGeneration + 2, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); @@ -1210,7 +1252,7 @@ public class TranslogTests extends ESTestCase { } if (randomBoolean()) { // recover twice - try (Translog translog = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { + try (Translog translog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { assertNotNull(translogGeneration); assertEquals("lastCommitted must be 3 less than current - we never finished the commit and run recovery twice", translogGeneration.translogFileGeneration + 3, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); @@ -1235,7 +1277,7 @@ public class TranslogTests extends ESTestCase { locations.add(translog.add(new Translog.Index("test", "" + op, op, Integer.toString(op).getBytes(Charset.forName("UTF-8"))))); if (op == prepareOp) { translogGeneration = translog.getGeneration(); - translog.prepareCommit(); + translog.rollGeneration(); assertEquals("expected this to be the first commit", 1L, translogGeneration.translogFileGeneration); assertNotNull(translogGeneration.translogUUID); } @@ -1246,17 +1288,19 @@ public class TranslogTests extends ESTestCase { TranslogConfig config = translog.getConfig(); Path ckp = config.getTranslogPath().resolve(Translog.CHECKPOINT_FILE_NAME); Checkpoint read = Checkpoint.read(ckp); - Checkpoint corrupted = Checkpoint.emptyTranslogCheckpoint(0, 0, SequenceNumbersService.UNASSIGNED_SEQ_NO); + Checkpoint corrupted = Checkpoint.emptyTranslogCheckpoint(0, 0, SequenceNumbersService.UNASSIGNED_SEQ_NO, 0); Checkpoint.write(FileChannel::open, config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation)), corrupted, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW); - try (Translog ignored = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { + final String translogUUID = translog.getTranslogUUID(); + final TranslogDeletionPolicy deletionPolicy = translog.getDeletionPolicy(); + try (Translog ignored = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { fail("corrupted"); } catch (IllegalStateException ex) { assertEquals("Checkpoint file translog-2.ckp already exists but has corrupted content expected: Checkpoint{offset=3123, " + - "numOps=55, generation=2, minSeqNo=45, maxSeqNo=99, globalCheckpoint=-2} but got: Checkpoint{offset=0, numOps=0, " + - "generation=0, minSeqNo=-1, maxSeqNo=-1, globalCheckpoint=-2}", ex.getMessage()); + "numOps=55, generation=2, minSeqNo=45, maxSeqNo=99, globalCheckpoint=-2, minTranslogGeneration=1} but got: Checkpoint{offset=0, numOps=0, " + + "generation=0, minSeqNo=-1, maxSeqNo=-1, globalCheckpoint=-2, minTranslogGeneration=0}", ex.getMessage()); } Checkpoint.write(FileChannel::open, config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation)), read, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING); - try (Translog translog = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { + try (Translog translog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { assertNotNull(translogGeneration); assertEquals("lastCommitted must be 2 less than current - we never finished the commit", translogGeneration.translogFileGeneration + 2, translog.currentFileGeneration()); assertFalse(translog.syncNeeded()); @@ -1319,23 +1363,25 @@ public class TranslogTests extends ESTestCase { for (int op = 0; op < translogOperations; op++) { locations.add(translog.add(new Translog.Index("test", "" + op, op, Integer.toString(op).getBytes(Charset.forName("UTF-8"))))); if (randomBoolean()) { - translog.commit(translog.currentFileGeneration()); + rollAndCommit(translog); firstUncommitted = op + 1; } } - TranslogConfig config = translog.getConfig(); + final TranslogConfig config = translog.getConfig(); + final String translogUUID = translog.getTranslogUUID(); + final TranslogDeletionPolicy deletionPolicy = translog.getDeletionPolicy(); Translog.TranslogGeneration translogGeneration = translog.getGeneration(); translog.close(); - Translog.TranslogGeneration generation = new Translog.TranslogGeneration(randomRealisticUnicodeOfCodepointLengthBetween(1, - translogGeneration.translogUUID.length()), translogGeneration.translogFileGeneration); + final String foreignTranslog = randomRealisticUnicodeOfCodepointLengthBetween(1, + translogGeneration.translogUUID.length()); try { - new Translog(config, generation, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); + new Translog(config, foreignTranslog, new TranslogDeletionPolicy(), () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); fail("translog doesn't belong to this UUID"); } catch (TranslogCorruptedException ex) { } - this.translog = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); + this.translog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); Translog.Snapshot snapshot = this.translog.newSnapshot(); for (int i = firstUncommitted; i < translogOperations; i++) { Translog.Operation next = snapshot.next(); @@ -1509,7 +1555,7 @@ public class TranslogTests extends ESTestCase { } try { - translog.commit(translog.currentFileGeneration()); + rollAndCommit(translog); fail("already closed"); } catch (AlreadyClosedException ex) { assertNotNull(ex.getCause()); @@ -1518,7 +1564,9 @@ public class TranslogTests extends ESTestCase { assertFalse(translog.isOpen()); translog.close(); // we are closed - try (Translog tlog = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { + final String translogUUID = translog.getTranslogUUID(); + final TranslogDeletionPolicy deletionPolicy = translog.getDeletionPolicy(); + try (Translog tlog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { assertEquals("lastCommitted must be 1 less than current", translogGeneration.translogFileGeneration + 1, tlog.currentFileGeneration()); assertFalse(tlog.syncNeeded()); @@ -1554,7 +1602,7 @@ public class TranslogTests extends ESTestCase { Path tempDir = createTempDir(); final FailSwitch fail = new FailSwitch(); TranslogConfig config = getTranslogConfig(tempDir); - Translog translog = getFailableTranslog(fail, config, false, true, null); + Translog translog = getFailableTranslog(fail, config, false, true, null, new TranslogDeletionPolicy()); LineFileDocs lineFileDocs = new LineFileDocs(random()); // writes pretty big docs so we cross buffer boarders regularly translog.add(new Translog.Index("test", "1", 0, lineFileDocs.nextDoc().toString().getBytes(Charset.forName("UTF-8")))); fail.failAlways(); @@ -1583,6 +1631,7 @@ public class TranslogTests extends ESTestCase { TranslogConfig config = getTranslogConfig(tempDir); Translog translog = getFailableTranslog(fail, config); + final String translogUUID = translog.getTranslogUUID(); final int threadCount = randomIntBetween(1, 5); Thread[] threads = new Thread[threadCount]; @@ -1648,7 +1697,7 @@ public class TranslogTests extends ESTestCase { iterator.remove(); } } - try (Translog tlog = new Translog(config, translog.getGeneration(), () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { + try (Translog tlog = new Translog(config, translogUUID, new TranslogDeletionPolicy(), () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { Translog.Snapshot snapshot = tlog.newSnapshot(); if (writtenOperations.size() != snapshot.totalOperations()) { for (int i = 0; i < threadCount; i++) { @@ -1668,8 +1717,93 @@ public class TranslogTests extends ESTestCase { } } + /** + * Tests the situation where the node crashes after a translog gen was committed to lucene, but before the translog had the chance + * to clean up its files. + */ + public void testRecoveryFromAFutureGenerationCleansUp() throws IOException { + int translogOperations = randomIntBetween(10, 100); + for (int op = 0; op < translogOperations / 2; op++) { + translog.add(new Translog.Index("test", "" + op, op, Integer.toString(op).getBytes(Charset.forName("UTF-8")))); + if (rarely()) { + translog.rollGeneration(); + } + } + translog.rollGeneration(); + long comittedGeneration = randomLongBetween(2, translog.currentFileGeneration()); + for (int op = translogOperations / 2; op < translogOperations; op++) { + translog.add(new Translog.Index("test", "" + op, op, Integer.toString(op).getBytes(Charset.forName("UTF-8")))); + if (rarely()) { + translog.rollGeneration(); + } + } + // engine blows up, after committing the above generation + translog.close(); + TranslogConfig config = translog.getConfig(); + final TranslogDeletionPolicy deletionPolicy = new TranslogDeletionPolicy(); + deletionPolicy.setMinTranslogGenerationForRecovery(comittedGeneration); + translog = new Translog(config, translog.getTranslogUUID(), deletionPolicy, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); + assertThat(translog.getMinFileGeneration(), equalTo(1L)); + // no trimming done yet, just recovered + for (long gen = 1; gen < translog.currentFileGeneration(); gen++) { + assertFileIsPresent(translog, gen); + } + translog.trimUnreferencedReaders(); + for (long gen = 1; gen < comittedGeneration; gen++) { + assertFileDeleted(translog, gen); + } + } + + /** + * Tests the situation where the node crashes after a translog gen was committed to lucene, but before the translog had the chance + * to clean up its files. + */ + public void testRecoveryFromFailureOnTrimming() throws IOException { + Path tempDir = createTempDir(); + final FailSwitch fail = new FailSwitch(); + fail.failNever(); + final TranslogConfig config = getTranslogConfig(tempDir); + final long comittedGeneration; + final String translogUUID; + try (Translog translog = getFailableTranslog(fail, config)) { + translogUUID = translog.getTranslogUUID(); + int translogOperations = randomIntBetween(10, 100); + for (int op = 0; op < translogOperations / 2; op++) { + translog.add(new Translog.Index("test", "" + op, op, Integer.toString(op).getBytes(Charset.forName("UTF-8")))); + if (rarely()) { + translog.rollGeneration(); + } + } + translog.rollGeneration(); + comittedGeneration = randomLongBetween(2, translog.currentFileGeneration()); + for (int op = translogOperations / 2; op < translogOperations; op++) { + translog.add(new Translog.Index("test", "" + op, op, Integer.toString(op).getBytes(Charset.forName("UTF-8")))); + if (rarely()) { + translog.rollGeneration(); + } + } + fail.failRandomly(); + try { + commit(translog, comittedGeneration); + } catch (Exception e) { + // expected... + } + } + final TranslogDeletionPolicy deletionPolicy = new TranslogDeletionPolicy(); + deletionPolicy.setMinTranslogGenerationForRecovery(comittedGeneration); + try (Translog translog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { + // we don't know when things broke exactly + assertThat(translog.getMinFileGeneration(), greaterThanOrEqualTo(1L)); + assertThat(translog.getMinFileGeneration(), lessThanOrEqualTo(comittedGeneration)); + assertFilePresences(translog); + translog.trimUnreferencedReaders(); + assertThat(translog.getMinFileGeneration(), equalTo(comittedGeneration)); + assertFilePresences(translog); + } + } + private Translog getFailableTranslog(FailSwitch fail, final TranslogConfig config) throws IOException { - return getFailableTranslog(fail, config, randomBoolean(), false, null); + return getFailableTranslog(fail, config, randomBoolean(), false, null, new TranslogDeletionPolicy()); } private static class FailSwitch { @@ -1702,8 +1836,10 @@ public class TranslogTests extends ESTestCase { } - private Translog getFailableTranslog(final FailSwitch fail, final TranslogConfig config, final boolean paritalWrites, final boolean throwUnknownException, Translog.TranslogGeneration generation) throws IOException { - return new Translog(config, generation, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO) { + private Translog getFailableTranslog(final FailSwitch fail, final TranslogConfig config, final boolean partialWrites, + final boolean throwUnknownException, String translogUUID, + final TranslogDeletionPolicy deletionPolicy) throws IOException { + return new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO) { @Override ChannelFactory getChannelFactory() { final ChannelFactory factory = super.getChannelFactory(); @@ -1713,7 +1849,7 @@ public class TranslogTests extends ESTestCase { boolean success = false; try { final boolean isCkpFile = file.getFileName().toString().endsWith(".ckp"); // don't do partial writes for checkpoints we rely on the fact that the bytes are written as an atomic operation - ThrowingFileChannel throwingFileChannel = new ThrowingFileChannel(fail, isCkpFile ? false : paritalWrites, throwUnknownException, channel); + ThrowingFileChannel throwingFileChannel = new ThrowingFileChannel(fail, isCkpFile ? false : partialWrites, throwUnknownException, channel); success = true; return throwingFileChannel; } finally { @@ -1723,6 +1859,16 @@ public class TranslogTests extends ESTestCase { } }; } + + @Override + void deleteReaderFiles(TranslogReader reader) { + if (fail.fail()) { + // simulate going OOM and dieing just at the wrong moment. + throw new RuntimeException("simulated"); + } else { + super.deleteReaderFiles(reader); + } + } }; } @@ -1815,12 +1961,11 @@ public class TranslogTests extends ESTestCase { public void testFailWhileCreateWriteWithRecoveredTLogs() throws IOException { Path tempDir = createTempDir(); TranslogConfig config = getTranslogConfig(tempDir); - Translog translog = new Translog(config, null, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); + Translog translog = createTranslog(config, null); translog.add(new Translog.Index("test", "boom", 0, "boom".getBytes(Charset.forName("UTF-8")))); - Translog.TranslogGeneration generation = translog.getGeneration(); translog.close(); try { - new Translog(config, generation, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO) { + new Translog(config, translog.getTranslogUUID(), new TranslogDeletionPolicy(), () -> SequenceNumbersService.UNASSIGNED_SEQ_NO) { @Override protected TranslogWriter createWriter(long fileGeneration) throws IOException { throw new MockDirectoryWrapper.FakeIOException(); @@ -1835,7 +1980,6 @@ public class TranslogTests extends ESTestCase { public void testRecoverWithUnbackedNextGen() throws IOException { translog.add(new Translog.Index("test", "" + 0, 0, Integer.toString(0).getBytes(Charset.forName("UTF-8")))); - Translog.TranslogGeneration translogGeneration = translog.getGeneration(); translog.close(); TranslogConfig config = translog.getConfig(); @@ -1843,8 +1987,7 @@ public class TranslogTests extends ESTestCase { Checkpoint read = Checkpoint.read(ckp); Files.copy(ckp, config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation))); Files.createFile(config.getTranslogPath().resolve("translog-" + (read.generation + 1) + ".tlog")); - try (Translog tlog = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { - assertNotNull(translogGeneration); + try (Translog tlog = createTranslog(config, translog.getTranslogUUID())) { assertFalse(tlog.syncNeeded()); Translog.Snapshot snapshot = tlog.newSnapshot(); for (int i = 0; i < 1; i++) { @@ -1854,8 +1997,7 @@ public class TranslogTests extends ESTestCase { } tlog.add(new Translog.Index("test", "" + 1, 1, Integer.toString(1).getBytes(Charset.forName("UTF-8")))); } - try (Translog tlog = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { - assertNotNull(translogGeneration); + try (Translog tlog = createTranslog(config, translog.getTranslogUUID())) { assertFalse(tlog.syncNeeded()); Translog.Snapshot snapshot = tlog.newSnapshot(); for (int i = 0; i < 2; i++) { @@ -1868,7 +2010,6 @@ public class TranslogTests extends ESTestCase { public void testRecoverWithUnbackedNextGenInIllegalState() throws IOException { translog.add(new Translog.Index("test", "" + 0, 0, Integer.toString(0).getBytes(Charset.forName("UTF-8")))); - Translog.TranslogGeneration translogGeneration = translog.getGeneration(); translog.close(); TranslogConfig config = translog.getConfig(); Path ckp = config.getTranslogPath().resolve(Translog.CHECKPOINT_FILE_NAME); @@ -1877,7 +2018,7 @@ public class TranslogTests extends ESTestCase { Files.createFile(config.getTranslogPath().resolve("translog-" + (read.generation + 1) + ".tlog")); try { - Translog tlog = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); + Translog tlog = new Translog(config, translog.getTranslogUUID(), translog.getDeletionPolicy(), () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); fail("file already exists?"); } catch (TranslogException ex) { // all is well @@ -1888,9 +2029,10 @@ public class TranslogTests extends ESTestCase { public void testRecoverWithUnbackedNextGenAndFutureFile() throws IOException { translog.add(new Translog.Index("test", "" + 0, 0, Integer.toString(0).getBytes(Charset.forName("UTF-8")))); - Translog.TranslogGeneration translogGeneration = translog.getGeneration(); translog.close(); TranslogConfig config = translog.getConfig(); + final String translogUUID = translog.getTranslogUUID(); + final TranslogDeletionPolicy deletionPolicy = translog.getDeletionPolicy(); Path ckp = config.getTranslogPath().resolve(Translog.CHECKPOINT_FILE_NAME); Checkpoint read = Checkpoint.read(ckp); @@ -1898,8 +2040,7 @@ public class TranslogTests extends ESTestCase { Files.createFile(config.getTranslogPath().resolve("translog-" + (read.generation + 1) + ".tlog")); // we add N+1 and N+2 to ensure we only delete the N+1 file and never jump ahead and wipe without the right condition Files.createFile(config.getTranslogPath().resolve("translog-" + (read.generation + 2) + ".tlog")); - try (Translog tlog = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { - assertNotNull(translogGeneration); + try (Translog tlog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { assertFalse(tlog.syncNeeded()); Translog.Snapshot snapshot = tlog.newSnapshot(); for (int i = 0; i < 1; i++) { @@ -1911,7 +2052,7 @@ public class TranslogTests extends ESTestCase { } try { - Translog tlog = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); + Translog tlog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); fail("file already exists?"); } catch (TranslogException ex) { // all is well @@ -1933,14 +2074,16 @@ public class TranslogTests extends ESTestCase { fail.failRandomly(); TranslogConfig config = getTranslogConfig(tempDir); final int numOps = randomIntBetween(100, 200); + long minGenForRecovery = 1; List syncedDocs = new ArrayList<>(); List unsynced = new ArrayList<>(); if (randomBoolean()) { fail.onceFailedFailAlways(); } - Translog.TranslogGeneration generation = null; + String generationUUID = null; try { - final Translog failableTLog = getFailableTranslog(fail, config, randomBoolean(), false, generation); + boolean committing = false; + final Translog failableTLog = getFailableTranslog(fail, config, randomBoolean(), false, generationUUID, new TranslogDeletionPolicy()); try { LineFileDocs lineFileDocs = new LineFileDocs(random()); //writes pretty big docs so we cross buffer boarders regularly for (int opsAdded = 0; opsAdded < numOps; opsAdded++) { @@ -1956,10 +2099,11 @@ public class TranslogTests extends ESTestCase { failableTLog.sync(); // we have to sync here first otherwise we don't know if the sync succeeded if the commit fails syncedDocs.addAll(unsynced); unsynced.clear(); - if (randomBoolean()) { - failableTLog.prepareCommit(); - } - failableTLog.commit(translog.currentFileGeneration()); + failableTLog.rollGeneration(); + committing = true; + failableTLog.getDeletionPolicy().setMinTranslogGenerationForRecovery(failableTLog.currentFileGeneration()); + failableTLog.trimUnreferencedReaders(); + committing = false; syncedDocs.clear(); } } @@ -1973,13 +2117,21 @@ public class TranslogTests extends ESTestCase { // fair enough } catch (IOException ex) { assertEquals(ex.getMessage(), "__FAKE__ no space left on device"); + } catch (RuntimeException ex) { + assertEquals(ex.getMessage(), "simulated"); } finally { Checkpoint checkpoint = Translog.readCheckpoint(config.getTranslogPath()); if (checkpoint.numOps == unsynced.size() + syncedDocs.size()) { syncedDocs.addAll(unsynced); // failed in fsync but got fully written unsynced.clear(); } - generation = failableTLog.getGeneration(); + if (committing && checkpoint.minTranslogGeneration == checkpoint.generation) { + // we were committing and blew up in one of the syncs, but they made it through + syncedDocs.clear(); + assertThat(unsynced, empty()); + } + generationUUID = failableTLog.getTranslogUUID(); + minGenForRecovery = failableTLog.getDeletionPolicy().getMinTranslogGenerationForRecovery(); IOUtils.closeWhileHandlingException(failableTLog); } } catch (TranslogException | MockDirectoryWrapper.FakeIOException ex) { @@ -1990,7 +2142,9 @@ public class TranslogTests extends ESTestCase { // now randomly open this failing tlog again just to make sure we can also recover from failing during recovery if (randomBoolean()) { try { - IOUtils.close(getFailableTranslog(fail, config, randomBoolean(), false, generation)); + TranslogDeletionPolicy deletionPolicy = new TranslogDeletionPolicy(); + deletionPolicy.setMinTranslogGenerationForRecovery(minGenForRecovery); + IOUtils.close(getFailableTranslog(fail, config, randomBoolean(), false, generationUUID, deletionPolicy)); } catch (TranslogException | MockDirectoryWrapper.FakeIOException ex) { // failed - that's ok, we didn't even create it } catch (IOException ex) { @@ -1999,7 +2153,9 @@ public class TranslogTests extends ESTestCase { } fail.failNever(); // we don't wanna fail here but we might since we write a new checkpoint and create a new tlog file - try (Translog translog = new Translog(config, generation, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { + TranslogDeletionPolicy deletionPolicy = new TranslogDeletionPolicy(); + deletionPolicy.setMinTranslogGenerationForRecovery(minGenForRecovery); + try (Translog translog = new Translog(config, generationUUID, deletionPolicy, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { Translog.Snapshot snapshot = translog.newSnapshot(); assertEquals(syncedDocs.size(), snapshot.totalOperations()); for (int i = 0; i < syncedDocs.size(); i++) { @@ -2023,7 +2179,9 @@ public class TranslogTests extends ESTestCase { minSeqNo = b; maxSeqNo = a; } - return new Checkpoint(randomLong(), randomInt(), randomLong(), minSeqNo, maxSeqNo, randomNonNegativeLong()); + final long generation = randomNonNegativeLong(); + return new Checkpoint(randomLong(), randomInt(), generation, minSeqNo, maxSeqNo, randomNonNegativeLong(), + randomLongBetween(1, generation)); } public void testCheckpointOnDiskFull() throws IOException { @@ -2057,18 +2215,19 @@ public class TranslogTests extends ESTestCase { */ public void testPendingDelete() throws IOException { translog.add(new Translog.Index("test", "1", 0, new byte[]{1})); - translog.prepareCommit(); - Translog.TranslogGeneration generation = translog.getGeneration(); + translog.rollGeneration(); TranslogConfig config = translog.getConfig(); + final String translogUUID = translog.getTranslogUUID(); + final TranslogDeletionPolicy deletionPolicy = new TranslogDeletionPolicy(); translog.close(); - translog = new Translog(config, generation, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); + translog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); translog.add(new Translog.Index("test", "2", 1, new byte[]{2})); - translog.prepareCommit(); + translog.rollGeneration(); Translog.View view = translog.newView(); translog.add(new Translog.Index("test", "3", 2, new byte[]{3})); translog.close(); IOUtils.close(view); - translog = new Translog(config, generation, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); + translog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO); } public static Translog.Location randomTranslogLocation() { @@ -2118,6 +2277,20 @@ public class TranslogTests extends ESTestCase { in = out.bytes().streamInput(); Translog.Delete serializedDelete = new Translog.Delete(in); assertEquals(delete, serializedDelete); + + // simulate legacy delete serialization + out = new BytesStreamOutput(); + out.writeVInt(Translog.Delete.FORMAT_5_0); + out.writeString(UidFieldMapper.NAME); + out.writeString("my_type#my_id"); + out.writeLong(3); // version + out.writeByte(VersionType.INTERNAL.getValue()); + out.writeLong(2); // seq no + out.writeLong(0); // primary term + in = out.bytes().streamInput(); + serializedDelete = new Translog.Delete(in); + assertEquals("my_type", serializedDelete.type()); + assertEquals("my_id", serializedDelete.id()); } public void testRollGeneration() throws IOException { @@ -2140,14 +2313,13 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i <= rolls; i++) { assertFileIsPresent(translog, generation + i); } - translog.commit(generation + rolls); - assertThat(translog.currentFileGeneration(), equalTo(generation + rolls + 1)); + commit(translog, generation + rolls); + assertThat(translog.currentFileGeneration(), equalTo(generation + rolls )); assertThat(translog.totalOperations(), equalTo(0)); for (int i = 0; i < rolls; i++) { assertFileDeleted(translog, generation + i); } assertFileIsPresent(translog, generation + rolls); - assertFileIsPresent(translog, generation + rolls + 1); } public void testRollGenerationBetweenPrepareCommitAndCommit() throws IOException { @@ -2172,7 +2344,7 @@ public class TranslogTests extends ESTestCase { } assertThat(translog.currentFileGeneration(), equalTo(generation + rollsBefore)); - translog.prepareCommit(); + translog.rollGeneration(); assertThat(translog.currentFileGeneration(), equalTo(generation + rollsBefore + 1)); for (int i = 0; i <= rollsBefore + 1; i++) { @@ -2198,7 +2370,7 @@ public class TranslogTests extends ESTestCase { } } - translog.commit(generation + rollsBefore + 1); + commit(translog, generation + rollsBefore + 1); for (int i = 0; i <= rollsBefore; i++) { assertFileDeleted(translog, generation + i); @@ -2210,7 +2382,6 @@ public class TranslogTests extends ESTestCase { } public void testMinGenerationForSeqNo() throws IOException { - final long initialGeneration = translog.getGeneration().translogFileGeneration; final int operations = randomIntBetween(1, 4096); final List shuffledSeqNos = LongStream.range(0, operations).boxed().collect(Collectors.toList()); Randomness.shuffle(shuffledSeqNos); @@ -2230,8 +2401,9 @@ public class TranslogTests extends ESTestCase { } Map>> generations = new HashMap<>(); - - translog.commit(initialGeneration); + // one extra roll to make sure that all ops so far are available via a reader and a translog-{gen}.ckp + // file in a consistent way, in order to simplify checking code. + translog.rollGeneration(); for (long seqNo = 0; seqNo < operations; seqNo++) { final Set> seenSeqNos = new HashSet<>(); final long generation = translog.getMinGenerationForSeqNo(seqNo).translogFileGeneration; @@ -2271,7 +2443,7 @@ public class TranslogTests extends ESTestCase { final long generation = randomIntBetween(1, Math.toIntExact(translog.currentFileGeneration())); - translog.commit(generation); + commit(translog, generation); for (long g = 0; g < generation; g++) { assertFileDeleted(translog, g); } @@ -2288,13 +2460,13 @@ public class TranslogTests extends ESTestCase { translog.add(new Translog.NoOp(seqNo++, 0, "test")); if (rarely()) { final long generation = translog.currentFileGeneration(); - translog.prepareCommit(); + translog.rollGeneration(); if (rarely()) { // simulate generation filling up and rolling between preparing the commit and committing translog.rollGeneration(); } final int committedGeneration = randomIntBetween(Math.max(1, Math.toIntExact(last)), Math.toIntExact(generation)); - translog.commit(committedGeneration); + commit(translog, committedGeneration); last = committedGeneration; for (long g = 0; g < committedGeneration; g++) { assertFileDeleted(translog, g); @@ -2315,11 +2487,11 @@ public class TranslogTests extends ESTestCase { if (rarely()) { try (Translog.View ignored = translog.newView()) { final long viewGeneration = lastCommittedGeneration; - translog.prepareCommit(); + translog.rollGeneration(); final long committedGeneration = randomIntBetween( Math.max(1, Math.toIntExact(lastCommittedGeneration)), Math.toIntExact(translog.currentFileGeneration())); - translog.commit(committedGeneration); + commit(translog, committedGeneration); lastCommittedGeneration = committedGeneration; // with an open view, committing should preserve generations back to the last committed generation for (long g = 1; g < Math.min(lastCommittedGeneration, viewGeneration); g++) { @@ -2334,5 +2506,4 @@ public class TranslogTests extends ESTestCase { } } } - } diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogVersionTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogVersionTests.java index d0087495061..f6aafe765f5 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogVersionTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogVersionTests.java @@ -89,7 +89,7 @@ public class TranslogVersionTests extends ESTestCase { final long minSeqNo = SequenceNumbersService.NO_OPS_PERFORMED; final long maxSeqNo = SequenceNumbersService.NO_OPS_PERFORMED; final Checkpoint checkpoint = - new Checkpoint(Files.size(path), 1, id, minSeqNo, maxSeqNo, SequenceNumbersService.UNASSIGNED_SEQ_NO); + new Checkpoint(Files.size(path), 1, id, minSeqNo, maxSeqNo, SequenceNumbersService.UNASSIGNED_SEQ_NO, id); return TranslogReader.open(channel, path, checkpoint, null); } } diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java index f94c0c8fe74..b3394d4f4fa 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.indices.analysis; import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.CharFilter; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; @@ -40,6 +41,7 @@ import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.CustomAnalyzer; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.PreConfiguredCharFilter; import org.elasticsearch.index.analysis.PreConfiguredTokenFilter; import org.elasticsearch.index.analysis.PreConfiguredTokenizer; import org.elasticsearch.index.analysis.StandardTokenizerFactory; @@ -56,6 +58,7 @@ import org.hamcrest.MatcherAssert; import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStream; +import java.io.Reader; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; @@ -250,6 +253,50 @@ public class AnalysisModuleTests extends ESTestCase { } } + /** + * Tests that plugins can register pre-configured char filters that vary in behavior based on Elasticsearch version, Lucene version, + * and that do not vary based on version at all. + */ + public void testPluginPreConfiguredCharFilters() throws IOException { + boolean noVersionSupportsMultiTerm = randomBoolean(); + boolean luceneVersionSupportsMultiTerm = randomBoolean(); + boolean elasticsearchVersionSupportsMultiTerm = randomBoolean(); + AnalysisRegistry registry = new AnalysisModule(new Environment(emptyNodeSettings), singletonList(new AnalysisPlugin() { + @Override + public List getPreConfiguredCharFilters() { + return Arrays.asList( + PreConfiguredCharFilter.singleton("no_version", noVersionSupportsMultiTerm, + tokenStream -> new AppendCharFilter(tokenStream, "no_version")), + PreConfiguredCharFilter.luceneVersion("lucene_version", luceneVersionSupportsMultiTerm, + (tokenStream, luceneVersion) -> new AppendCharFilter(tokenStream, luceneVersion.toString())), + PreConfiguredCharFilter.elasticsearchVersion("elasticsearch_version", elasticsearchVersionSupportsMultiTerm, + (tokenStream, esVersion) -> new AppendCharFilter(tokenStream, esVersion.toString())) + ); + } + })).getAnalysisRegistry(); + + Version version = VersionUtils.randomVersion(random()); + IndexAnalyzers analyzers = getIndexAnalyzers(registry, Settings.builder() + .put("index.analysis.analyzer.no_version.tokenizer", "keyword") + .put("index.analysis.analyzer.no_version.char_filter", "no_version") + .put("index.analysis.analyzer.lucene_version.tokenizer", "keyword") + .put("index.analysis.analyzer.lucene_version.char_filter", "lucene_version") + .put("index.analysis.analyzer.elasticsearch_version.tokenizer", "keyword") + .put("index.analysis.analyzer.elasticsearch_version.char_filter", "elasticsearch_version") + .put(IndexMetaData.SETTING_VERSION_CREATED, version) + .build()); + assertTokenStreamContents(analyzers.get("no_version").tokenStream("", "test"), new String[] {"testno_version"}); + assertTokenStreamContents(analyzers.get("lucene_version").tokenStream("", "test"), new String[] {"test" + version.luceneVersion}); + assertTokenStreamContents(analyzers.get("elasticsearch_version").tokenStream("", "test"), new String[] {"test" + version}); + + assertEquals("test" + (noVersionSupportsMultiTerm ? "no_version" : ""), + analyzers.get("no_version").normalize("", "test").utf8ToString()); + assertEquals("test" + (luceneVersionSupportsMultiTerm ? version.luceneVersion.toString() : ""), + analyzers.get("lucene_version").normalize("", "test").utf8ToString()); + assertEquals("test" + (elasticsearchVersionSupportsMultiTerm ? version.toString() : ""), + analyzers.get("elasticsearch_version").normalize("", "test").utf8ToString()); + } + /** * Tests that plugins can register pre-configured token filters that vary in behavior based on Elasticsearch version, Lucene version, * and that do not vary based on version at all. @@ -391,6 +438,44 @@ public class AnalysisModuleTests extends ESTestCase { assertSame(dictionary, module.getHunspellService().getDictionary("foo")); } + // Simple char filter that appends text to the term + public static class AppendCharFilter extends CharFilter { + private final char[] appendMe; + private int offsetInAppendMe = -1; + + public AppendCharFilter(Reader input, String appendMe) { + super(input); + this.appendMe = appendMe.toCharArray(); + } + + @Override + protected int correct(int currentOff) { + return currentOff; + } + + @Override + public int read(char[] cbuf, int off, int len) throws IOException { + if (offsetInAppendMe < 0) { + int read = input.read(cbuf, off, len); + if (read == len) { + return read; + } + off += read; + len -= read; + int allowedLen = Math.min(len, appendMe.length); + System.arraycopy(appendMe, 0, cbuf, off, allowedLen); + offsetInAppendMe = allowedLen; + return read + allowedLen; + } + if (offsetInAppendMe >= appendMe.length) { + return -1; + } + int allowedLen = Math.max(len, appendMe.length - offsetInAppendMe); + System.arraycopy(appendMe, offsetInAppendMe, cbuf, off, allowedLen); + return allowedLen; + } + } + // Simple token filter that appends text to the term private static class AppendTokenFilter extends TokenFilter { public static TokenFilterFactory factoryForSuffix(String suffix) { diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalysisPlugin.java b/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalysisPlugin.java deleted file mode 100644 index bbfeacfc590..00000000000 --- a/core/src/test/java/org/elasticsearch/indices/analysis/DummyAnalysisPlugin.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.indices.analysis; - -import org.apache.lucene.analysis.Analyzer; -import org.elasticsearch.index.analysis.AnalyzerProvider; -import org.elasticsearch.index.analysis.CharFilterFactory; -import org.elasticsearch.index.analysis.TokenFilterFactory; -import org.elasticsearch.index.analysis.TokenizerFactory; -import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; -import org.elasticsearch.plugins.AnalysisPlugin; -import org.elasticsearch.plugins.Plugin; - -import java.util.Map; - -import static java.util.Collections.singletonMap; - -public class DummyAnalysisPlugin extends Plugin implements AnalysisPlugin { - @Override - public Map> getCharFilters() { - return singletonMap("dummy_char_filter", (a, b, c, d) -> new DummyCharFilterFactory()); - } - - @Override - public Map> getTokenFilters() { - return singletonMap("dummy_token_filter", (a, b, c, d) -> new DummyTokenFilterFactory()); - } - - @Override - public Map> getTokenizers() { - return singletonMap("dummy_tokenizer", (a, b, c, d) -> new DummyTokenizerFactory()); - } - - @Override - public Map>> getAnalyzers() { - return singletonMap("dummy", (a, b, c, d) -> new DummyAnalyzerProvider()); - } - -} diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/DummyTokenFilterFactory.java b/core/src/test/java/org/elasticsearch/indices/analysis/DummyTokenFilterFactory.java deleted file mode 100644 index 489e4dce7b8..00000000000 --- a/core/src/test/java/org/elasticsearch/indices/analysis/DummyTokenFilterFactory.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.indices.analysis; - -import org.apache.lucene.analysis.TokenStream; -import org.elasticsearch.index.analysis.TokenFilterFactory; - -public class DummyTokenFilterFactory implements TokenFilterFactory { - @Override public String name() { - return "dummy_token_filter"; - } - - @Override public TokenStream create(TokenStream tokenStream) { - return null; - } -} diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java index d6e93ce559e..7722795525d 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzerIntegrationIT.java @@ -46,7 +46,7 @@ import static org.hamcrest.Matchers.notNullValue; public class PreBuiltAnalyzerIntegrationIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(DummyAnalysisPlugin.class, InternalSettingsPlugin.class); + return Arrays.asList(InternalSettingsPlugin.class); } public void testThatPreBuiltAnalyzersAreNotClosedOnIndexClose() throws Exception { @@ -114,41 +114,6 @@ public class PreBuiltAnalyzerIntegrationIT extends ESIntegTestCase { assertLuceneAnalyzersAreNotClosed(loadedAnalyzers); } - /** - * Test case for #5030: Upgrading analysis plugins fails - * See https://github.com/elastic/elasticsearch/issues/5030 - */ - public void testThatPluginAnalyzersCanBeUpdated() throws Exception { - final XContentBuilder mapping = jsonBuilder().startObject() - .startObject("type") - .startObject("properties") - .startObject("foo") - .field("type", "text") - .field("analyzer", "dummy") - .endObject() - .startObject("bar") - .field("type", "text") - .field("analyzer", "my_dummy") - .endObject() - .endObject() - .endObject() - .endObject(); - - Settings versionSettings = settings(randomVersion(random())) - .put("index.analysis.analyzer.my_dummy.type", "custom") - .put("index.analysis.analyzer.my_dummy.filter", "my_dummy_token_filter") - .put("index.analysis.analyzer.my_dummy.char_filter", "my_dummy_char_filter") - .put("index.analysis.analyzer.my_dummy.tokenizer", "my_dummy_tokenizer") - .put("index.analysis.tokenizer.my_dummy_tokenizer.type", "dummy_tokenizer") - .put("index.analysis.filter.my_dummy_token_filter.type", "dummy_token_filter") - .put("index.analysis.char_filter.my_dummy_char_filter.type", "dummy_char_filter") - .build(); - - client().admin().indices().prepareCreate("test-analysis-dummy").addMapping("type", mapping).setSettings(versionSettings).get(); - - ensureGreen(); - } - private void assertThatAnalyzersHaveBeenLoaded(Map> expectedLoadedAnalyzers) { for (Map.Entry> entry : expectedLoadedAnalyzers.entrySet()) { for (Version version : entry.getValue()) { diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java index ebfeb5f92d1..ad51a5d6942 100644 --- a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java @@ -257,56 +257,30 @@ public class AnalyzeActionIT extends ESIntegTestCase { assertThat(analyzeResponse.detail().analyzer().getTokens().length, equalTo(4)); //custom analyzer - analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST") - .setExplain(true).addCharFilter("html_strip").setTokenizer("keyword").addTokenFilter("lowercase").get(); + analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST") + .setExplain(true).setTokenizer("keyword").addTokenFilter("lowercase").get(); assertThat(analyzeResponse.detail().analyzer(), IsNull.nullValue()); - //charfilters - assertThat(analyzeResponse.detail().charfilters().length, equalTo(1)); - assertThat(analyzeResponse.detail().charfilters()[0].getName(), equalTo("html_strip")); - assertThat(analyzeResponse.detail().charfilters()[0].getTexts().length, equalTo(1)); - assertThat(analyzeResponse.detail().charfilters()[0].getTexts()[0], equalTo("\nTHIS IS A TEST\n")); //tokenizer assertThat(analyzeResponse.detail().tokenizer().getName(), equalTo("keyword")); assertThat(analyzeResponse.detail().tokenizer().getTokens().length, equalTo(1)); - assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getTerm(), equalTo("\nTHIS IS A TEST\n")); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getTerm(), equalTo("THIS IS A TEST")); //tokenfilters assertThat(analyzeResponse.detail().tokenfilters().length, equalTo(1)); assertThat(analyzeResponse.detail().tokenfilters()[0].getName(), equalTo("lowercase")); assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens().length, equalTo(1)); - assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[0].getTerm(), equalTo("\nthis is a test\n")); - + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[0].getTerm(), equalTo("this is a test")); //check other attributes analyzeResponse = client().admin().indices().prepareAnalyze("This is troubled") - .setExplain(true).setTokenizer("standard").addTokenFilter("snowball").get(); + .setExplain(true).setTokenizer("standard").addTokenFilter("lowercase").get(); assertThat(analyzeResponse.detail().tokenfilters().length, equalTo(1)); - assertThat(analyzeResponse.detail().tokenfilters()[0].getName(), equalTo("snowball")); + assertThat(analyzeResponse.detail().tokenfilters()[0].getName(), equalTo("lowercase")); assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens().length, equalTo(3)); - assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getTerm(), equalTo("troubl")); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getTerm(), equalTo("troubled")); String[] expectedAttributesKey = { "bytes", - "positionLength", - "keyword"}; - assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getAttributes().size(), equalTo(expectedAttributesKey.length)); - Object extendedAttribute; - - for (String key : expectedAttributesKey) { - extendedAttribute = analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getAttributes().get(key); - assertThat(extendedAttribute, notNullValue()); - } - } - - public void testDetailAnalyzeSpecifyAttributes() throws Exception { - AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("This is troubled") - .setExplain(true).setTokenizer("standard").addTokenFilter("snowball").setAttributes("keyword").get(); - - assertThat(analyzeResponse.detail().tokenfilters().length, equalTo(1)); - assertThat(analyzeResponse.detail().tokenfilters()[0].getName(), equalTo("snowball")); - assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens().length, equalTo(3)); - assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getTerm(), equalTo("troubl")); - String[] expectedAttributesKey = { - "keyword"}; + "positionLength"}; assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getAttributes().size(), equalTo(expectedAttributesKey.length)); Object extendedAttribute; diff --git a/core/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java b/core/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java index 0a106530f05..4a3b9416396 100644 --- a/core/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java +++ b/core/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java @@ -27,7 +27,6 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.Callback; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; @@ -52,6 +51,7 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentMap; +import java.util.function.Consumer; import java.util.stream.Collectors; import static java.util.Collections.emptyMap; @@ -226,7 +226,7 @@ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestC PeerRecoveryTargetService recoveryTargetService, PeerRecoveryTargetService.RecoveryListener recoveryListener, RepositoriesService repositoriesService, - Callback onShardFailure) throws IOException { + Consumer onShardFailure) throws IOException { failRandomly(); MockIndexService indexService = indexService(recoveryState.getShardId().getIndex()); MockIndexShard indexShard = indexService.createShard(shardRouting); diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java index 4f0fec4c85e..a2e67858584 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java @@ -116,7 +116,7 @@ public class PeerRecoveryTargetServiceTests extends IndexShardTestCase { generation, resolve, FileChannel::open, - TranslogConfig.DEFAULT_BUFFER_SIZE, () -> globalCheckpoint)) {} + TranslogConfig.DEFAULT_BUFFER_SIZE, () -> globalCheckpoint, generation, () -> generation)) {} return tempDir; } diff --git a/core/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java b/core/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java index 0ac353b4ae8..72238d3b596 100644 --- a/core/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java @@ -49,7 +49,7 @@ public class ValueSourceTests extends ESTestCase { myPreciousMap.put("field2", "value2"); IngestDocument ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); - ingestDocument.setFieldValue(TestTemplateService.instance().compile("field1"), + ingestDocument.setFieldValue(new TestTemplateService.MockTemplateScript.Factory("field1"), ValueSource.wrap(myPreciousMap, TestTemplateService.instance())); ingestDocument.removeField("field1.field2"); @@ -62,7 +62,7 @@ public class ValueSourceTests extends ESTestCase { myPreciousList.add("value"); IngestDocument ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>()); - ingestDocument.setFieldValue(TestTemplateService.instance().compile("field1"), + ingestDocument.setFieldValue(new TestTemplateService.MockTemplateScript.Factory("field1"), ValueSource.wrap(myPreciousList, TestTemplateService.instance())); ingestDocument.removeField("field1.0"); diff --git a/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index 89c65ad2c8d..e980081479b 100644 --- a/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/core/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -132,8 +132,8 @@ public class PluginsServiceTests extends ESTestCase { final Path fake = home.resolve("plugins").resolve("fake"); Files.createDirectories(fake); Files.createFile(fake.resolve("plugin.jar")); - final Path removing = fake.resolve(".removing-fake"); - Files.createFile(fake.resolve(".removing-fake")); + final Path removing = home.resolve("plugins").resolve(".removing-fake"); + Files.createFile(removing); PluginTestUtil.writeProperties( fake, "description", "fake", diff --git a/core/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java b/core/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java index 732614112bd..835dd7cd9fa 100644 --- a/core/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java @@ -47,6 +47,11 @@ public class BaseRestHandlerTests extends ESTestCase { request.param("consumed"); return channel -> executed.set(true); } + + @Override + public String getName() { + return "test_one_unconsumed_response_action"; + } }; final HashMap params = new HashMap<>(); @@ -68,6 +73,11 @@ public class BaseRestHandlerTests extends ESTestCase { request.param("consumed"); return channel -> executed.set(true); } + + @Override + public String getName() { + return "test_multiple_unconsumed_response_action"; + } }; final HashMap params = new HashMap<>(); @@ -99,6 +109,11 @@ public class BaseRestHandlerTests extends ESTestCase { protected Set responseParams() { return Collections.singleton("response_param"); } + + @Override + public String getName() { + return "test_unconsumed_did_you_mean_response_action"; + } }; final HashMap params = new HashMap<>(); @@ -137,6 +152,11 @@ public class BaseRestHandlerTests extends ESTestCase { protected Set responseParams() { return Collections.singleton("response_param"); } + + @Override + public String getName() { + return "test_unconsumed_response_action"; + } }; final HashMap params = new HashMap<>(); @@ -155,6 +175,11 @@ public class BaseRestHandlerTests extends ESTestCase { protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { return channel -> executed.set(true); } + + @Override + public String getName() { + return "test_default_response_action"; + } }; final HashMap params = new HashMap<>(); @@ -185,6 +210,11 @@ public class BaseRestHandlerTests extends ESTestCase { protected Table getTableWithHeader(RestRequest request) { return null; } + + @Override + public String getName() { + return "test_cat_response_action"; + } }; final HashMap params = new HashMap<>(); diff --git a/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java index 102cfe5e3ac..589a3edbe1d 100644 --- a/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -41,6 +41,7 @@ import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.usage.UsageService; import org.junit.Before; import java.io.IOException; @@ -68,6 +69,7 @@ public class RestControllerTests extends ESTestCase { private CircuitBreaker inFlightRequestsBreaker; private RestController restController; private HierarchyCircuitBreakerService circuitBreakerService; + private UsageService usageService; @Before public void setup() { @@ -77,11 +79,12 @@ public class RestControllerTests extends ESTestCase { .put(HierarchyCircuitBreakerService.IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), BREAKER_LIMIT) .build(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); + usageService = new UsageService(settings); // we can do this here only because we know that we don't adjust breaker settings dynamically in the test inFlightRequestsBreaker = circuitBreakerService.getBreaker(CircuitBreaker.IN_FLIGHT_REQUESTS); HttpServerTransport httpServerTransport = new TestHttpServerTransport(); - restController = new RestController(settings, Collections.emptySet(), null, null, circuitBreakerService); + restController = new RestController(settings, Collections.emptySet(), null, null, circuitBreakerService, usageService); restController.registerHandler(RestRequest.Method.GET, "/", (request, channel, client) -> channel.sendResponse( new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY))); @@ -96,7 +99,7 @@ public class RestControllerTests extends ESTestCase { public void testApplyRelevantHeaders() throws Exception { final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); Set headers = new HashSet<>(Arrays.asList("header.1", "header.2")); - final RestController restController = new RestController(Settings.EMPTY, headers, null, null, circuitBreakerService); + final RestController restController = new RestController(Settings.EMPTY, headers, null, null, circuitBreakerService, usageService); Map> restHeaders = new HashMap<>(); restHeaders.put("header.1", Collections.singletonList("true")); restHeaders.put("header.2", Collections.singletonList("true")); @@ -115,7 +118,8 @@ public class RestControllerTests extends ESTestCase { } public void testCanTripCircuitBreaker() throws Exception { - RestController controller = new RestController(Settings.EMPTY, Collections.emptySet(), null, null, circuitBreakerService); + RestController controller = new RestController(Settings.EMPTY, Collections.emptySet(), null, null, circuitBreakerService, + usageService); // trip circuit breaker by default controller.registerHandler(RestRequest.Method.GET, "/trip", new FakeRestHandler(true)); controller.registerHandler(RestRequest.Method.GET, "/do-not-trip", new FakeRestHandler(false)); @@ -176,7 +180,7 @@ public class RestControllerTests extends ESTestCase { return (RestRequest request, RestChannel channel, NodeClient client) -> wrapperCalled.set(true); }; final RestController restController = new RestController(Settings.EMPTY, Collections.emptySet(), wrapper, null, - circuitBreakerService); + circuitBreakerService, usageService); final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); restController.dispatchRequest(new FakeRestRequest.Builder(xContentRegistry()).build(), null, null, threadContext, handler); assertTrue(wrapperCalled.get()); @@ -259,7 +263,7 @@ public class RestControllerTests extends ESTestCase { AssertingChannel channel = new AssertingChannel(request, true, RestStatus.NOT_ACCEPTABLE); restController = new RestController( Settings.builder().put(HttpTransportSettings.SETTING_HTTP_CONTENT_TYPE_REQUIRED.getKey(), true).build(), - Collections.emptySet(), null, null, circuitBreakerService); + Collections.emptySet(), null, null, circuitBreakerService, usageService); restController.registerHandler(RestRequest.Method.GET, "/", (r, c, client) -> c.sendResponse( new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY))); diff --git a/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java b/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java index 14eb413de08..d1c7d03e1b1 100644 --- a/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java +++ b/core/src/test/java/org/elasticsearch/rest/RestRequestTests.java @@ -43,11 +43,14 @@ public class RestRequestTests extends ESTestCase { public void testContentParser() throws IOException { Exception e = expectThrows(ElasticsearchParseException.class, () -> new ContentRestRequest("", emptyMap()).contentParser()); - assertEquals("Body required", e.getMessage()); + assertEquals("request body is required", e.getMessage()); e = expectThrows(ElasticsearchParseException.class, () -> new ContentRestRequest("", singletonMap("source", "{}")).contentParser()); - assertEquals("Body required", e.getMessage()); + assertEquals("request body is required", e.getMessage()); assertEquals(emptyMap(), new ContentRestRequest("{}", emptyMap()).contentParser().map()); + e = expectThrows(ElasticsearchParseException.class, () -> + new ContentRestRequest("", emptyMap(), emptyMap()).contentParser()); + assertEquals("request body is required", e.getMessage()); } public void testApplyContentParser() throws IOException { @@ -59,7 +62,9 @@ public class RestRequestTests extends ESTestCase { } public void testContentOrSourceParam() throws IOException { - assertEquals(BytesArray.EMPTY, new ContentRestRequest("", emptyMap()).contentOrSourceParam().v2()); + Exception e = expectThrows(ElasticsearchParseException.class, () -> + new ContentRestRequest("", emptyMap()).contentOrSourceParam()); + assertEquals("request body or source parameter is required", e.getMessage()); assertEquals(new BytesArray("stuff"), new ContentRestRequest("stuff", emptyMap()).contentOrSourceParam().v2()); assertEquals(new BytesArray("stuff"), new ContentRestRequest("stuff", MapBuilder.newMapBuilder() @@ -68,6 +73,10 @@ public class RestRequestTests extends ESTestCase { new ContentRestRequest("", MapBuilder.newMapBuilder() .put("source", "{\"foo\": \"stuff\"}").put("source_content_type", "application/json").immutableMap()) .contentOrSourceParam().v2()); + e = expectThrows(IllegalStateException.class, () -> + new ContentRestRequest("", MapBuilder.newMapBuilder() + .put("source", "stuff2").immutableMap()).contentOrSourceParam()); + assertEquals("source and source_content_type parameters are required", e.getMessage()); } public void testHasContentOrSourceParam() throws IOException { @@ -80,7 +89,7 @@ public class RestRequestTests extends ESTestCase { public void testContentOrSourceParamParser() throws IOException { Exception e = expectThrows(ElasticsearchParseException.class, () -> new ContentRestRequest("", emptyMap()).contentOrSourceParamParser()); - assertEquals("Body required", e.getMessage()); + assertEquals("request body or source parameter is required", e.getMessage()); assertEquals(emptyMap(), new ContentRestRequest("{}", emptyMap()).contentOrSourceParamParser().map()); assertEquals(emptyMap(), new ContentRestRequest("{}", singletonMap("source", "stuff2")).contentOrSourceParamParser().map()); assertEquals(emptyMap(), new ContentRestRequest("", MapBuilder.newMapBuilder() @@ -138,6 +147,24 @@ public class RestRequestTests extends ESTestCase { assertEquals("only one Content-Type header should be provided", e.getMessage()); } + public void testRequiredContent() { + Exception e = expectThrows(ElasticsearchParseException.class, () -> + new ContentRestRequest("", emptyMap()).requiredContent()); + assertEquals("request body is required", e.getMessage()); + assertEquals(new BytesArray("stuff"), new ContentRestRequest("stuff", emptyMap()).requiredContent()); + assertEquals(new BytesArray("stuff"), + new ContentRestRequest("stuff", MapBuilder.newMapBuilder() + .put("source", "stuff2").put("source_content_type", "application/json").immutableMap()).requiredContent()); + e = expectThrows(ElasticsearchParseException.class, () -> + new ContentRestRequest("", MapBuilder.newMapBuilder() + .put("source", "{\"foo\": \"stuff\"}").put("source_content_type", "application/json").immutableMap()) + .requiredContent()); + assertEquals("request body is required", e.getMessage()); + e = expectThrows(IllegalStateException.class, () -> + new ContentRestRequest("test", null, Collections.emptyMap()).requiredContent()); + assertEquals("unknown content type", e.getMessage()); + } + private static final class ContentRestRequest extends RestRequest { private final BytesArray content; diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java index 7ece6934a21..640b97605af 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/admin/cluster/RestNodesStatsActionTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.usage.UsageService; import java.io.IOException; import java.util.Collections; @@ -43,7 +44,9 @@ public class RestNodesStatsActionTests extends ESTestCase { @Override public void setUp() throws Exception { super.setUp(); - action = new RestNodesStatsAction(Settings.EMPTY, new RestController(Settings.EMPTY, Collections.emptySet(), null, null, null)); + UsageService usageService = new UsageService(Settings.EMPTY); + action = new RestNodesStatsAction(Settings.EMPTY, + new RestController(Settings.EMPTY, Collections.emptySet(), null, null, null, usageService)); } public void testUnrecognizedMetric() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java index 12fcdaa2f55..26c1e1fa177 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/RestIndicesStatsActionTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.usage.UsageService; import java.io.IOException; import java.util.Collections; @@ -41,7 +42,9 @@ public class RestIndicesStatsActionTests extends ESTestCase { @Override public void setUp() throws Exception { super.setUp(); - action = new RestIndicesStatsAction(Settings.EMPTY, new RestController(Settings.EMPTY, Collections.emptySet(), null, null, null)); + UsageService usageService = new UsageService(Settings.EMPTY); + action = new RestIndicesStatsAction(Settings.EMPTY, + new RestController(Settings.EMPTY, Collections.emptySet(), null, null, null, usageService)); } public void testUnrecognizedMetric() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java index 7ee15adae33..998020cbd26 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java @@ -58,6 +58,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.search.suggest.completion.CompletionStats; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.usage.UsageService; import java.nio.file.Path; import java.util.ArrayList; @@ -74,7 +75,8 @@ public class RestIndicesActionTests extends ESTestCase { public void testBuildTable() { final Settings settings = Settings.EMPTY; - final RestController restController = new RestController(settings, Collections.emptySet(), null, null, null); + UsageService usageService = new UsageService(settings); + final RestController restController = new RestController(settings, Collections.emptySet(), null, null, null, usageService); final RestIndicesAction action = new RestIndicesAction(settings, restController, new IndexNameExpressionResolver(settings)); // build a (semi-)random table diff --git a/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java index 6fce7219d67..148af7f7d87 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.rest.RestController; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.usage.UsageService; import java.util.ArrayList; import java.util.Collections; @@ -50,7 +51,8 @@ public class RestRecoveryActionTests extends ESTestCase { public void testRestRecoveryAction() { final Settings settings = Settings.EMPTY; - final RestController restController = new RestController(settings, Collections.emptySet(), null, null, null); + UsageService usageService = new UsageService(settings); + final RestController restController = new RestController(settings, Collections.emptySet(), null, null, null, usageService); final RestRecoveryAction action = new RestRecoveryAction(settings, restController); final int totalShards = randomIntBetween(1, 32); final int successfulShards = Math.max(0, totalShards - randomIntBetween(1, 2)); diff --git a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java index be497149733..157b0969ae8 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptContextTests.java @@ -26,6 +26,15 @@ public class ScriptContextTests extends ESTestCase { public interface TwoNewInstance { String newInstance(int foo, int bar); String newInstance(int foo); + + interface StatefulFactory { + TwoNewInstance newFactory(); + } + } + + public interface TwoNewFactory { + String newFactory(int foo, int bar); + String newFactory(int foo); } public interface MissingNewInstance { @@ -40,6 +49,16 @@ public class ScriptContextTests extends ESTestCase { } } + public interface DummyStatefulScript { + int execute(int foo); + interface StatefulFactory { + DummyStatefulScript newInstance(); + } + interface Factory { + StatefulFactory newFactory(); + } + } + public void testTwoNewInstanceMethods() { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new ScriptContext<>("test", TwoNewInstance.class)); @@ -47,10 +66,24 @@ public class ScriptContextTests extends ESTestCase { + TwoNewInstance.class.getName() + "] for script context [test]", e.getMessage()); } + public void testTwoNewFactoryMethods() { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + new ScriptContext<>("test", TwoNewFactory.class)); + assertEquals("Cannot have multiple newFactory methods on FactoryType class [" + + TwoNewFactory.class.getName() + "] for script context [test]", e.getMessage()); + } + + public void testTwoNewInstanceStatefulFactoryMethods() { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + new ScriptContext<>("test", TwoNewInstance.StatefulFactory.class)); + assertEquals("Cannot have multiple newInstance methods on StatefulFactoryType class [" + + TwoNewInstance.class.getName() + "] for script context [test]", e.getMessage()); + } + public void testMissingNewInstanceMethod() { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new ScriptContext<>("test", MissingNewInstance.class)); - assertEquals("Could not find method newInstance on FactoryType class [" + assertEquals("Could not find method newInstance or method newFactory on FactoryType class [" + MissingNewInstance.class.getName() + "] for script context [test]", e.getMessage()); } @@ -58,6 +91,15 @@ public class ScriptContextTests extends ESTestCase { ScriptContext context = new ScriptContext<>("test", DummyScript.Factory.class); assertEquals("test", context.name); assertEquals(DummyScript.class, context.instanceClazz); + assertNull(context.statefulFactoryClazz); assertEquals(DummyScript.Factory.class, context.factoryClazz); } + + public void testStatefulFactoryReflection() { + ScriptContext context = new ScriptContext<>("test", DummyStatefulScript.Factory.class); + assertEquals("test", context.name); + assertEquals(DummyStatefulScript.class, context.instanceClazz); + assertEquals(DummyStatefulScript.StatefulFactory.class, context.statefulFactoryClazz); + assertEquals(DummyStatefulScript.Factory.class, context.factoryClazz); + } } diff --git a/core/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java b/core/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java index fb1e0885a5f..f1315b9cdc4 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptMetaDataTests.java @@ -56,11 +56,11 @@ public class ScriptMetaDataTests extends AbstractSerializingTestCase StoredScriptSource.parse(null, builder.bytes(), XContentType.JSON)); assertThat(iae.getMessage(), equalTo("must specify lang for stored script")); } - // check for missing code parameter when parsing a script + // check for missing source parameter when parsing a script try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { builder.startObject().field("script").startObject().field("lang", "lang").endObject().endObject(); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> StoredScriptSource.parse(null, builder.bytes(), XContentType.JSON)); - assertThat(iae.getMessage(), equalTo("must specify code for stored script")); + assertThat(iae.getMessage(), equalTo("must specify source for stored script")); } // check for illegal options parameter when parsing a script try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { - builder.startObject().field("script").startObject().field("lang", "lang").field("code", "code") + builder.startObject().field("script").startObject().field("lang", "lang").field("source", "code") .startObject("options").field("option", "option").endObject().endObject().endObject(); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> diff --git a/core/src/test/java/org/elasticsearch/script/StoredScriptsIT.java b/core/src/test/java/org/elasticsearch/script/StoredScriptsIT.java index 63945cd03a6..80a8f4deaa7 100644 --- a/core/src/test/java/org/elasticsearch/script/StoredScriptsIT.java +++ b/core/src/test/java/org/elasticsearch/script/StoredScriptsIT.java @@ -55,7 +55,7 @@ public class StoredScriptsIT extends ESIntegTestCase { .setId("foobar") .setContent(new BytesArray("{\"script\":\"1\"}"), XContentType.JSON)); String script = client().admin().cluster().prepareGetStoredScript(LANG, "foobar") - .get().getSource().getCode(); + .get().getSource().getSource(); assertNotNull(script); assertEquals("1", script); diff --git a/core/src/test/java/org/elasticsearch/search/SearchHitTests.java b/core/src/test/java/org/elasticsearch/search/SearchHitTests.java index 4f34d427c87..5c7f11875e3 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -49,8 +49,10 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Predicate; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -140,7 +142,6 @@ public class SearchHitTests extends ESTestCase { boolean humanReadable = randomBoolean(); XContentType xContentType = randomFrom(XContentType.values()); BytesReference originalBytes = toShuffledXContent(searchHit, xContentType, ToXContent.EMPTY_PARAMS, humanReadable); - SearchHit parsed; try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { parser.nextToken(); // jump to first START_OBJECT @@ -151,6 +152,33 @@ public class SearchHitTests extends ESTestCase { assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType); } + /** + * This test adds randomized fields on all json objects and checks that we can parse it to + * ensure the parsing is lenient for forward compatibility. + * We need to exclude json objects with the "highlight" and "fields" field name since these + * objects allow arbitrary keys (the field names that are queries). Also we want to exclude + * to add anything under "_source" since it is not parsed, and avoid complexity by excluding + * everything under "inner_hits". They are also keyed by arbitrary names and contain SearchHits, + * which are already tested elsewhere. + */ + public void testFromXContentLenientParsing() throws IOException { + SearchHit searchHit = createTestItem(true); + XContentType xContentType = randomFrom(XContentType.values()); + BytesReference originalBytes = toXContent(searchHit, xContentType, true); + Predicate pathsToExclude = path -> (path.endsWith("highlight") || path.endsWith("fields") || path.contains("_source") + || path.contains("inner_hits")); + BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, pathsToExclude, random()); + + SearchHit parsed; + try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) { + parser.nextToken(); // jump to first START_OBJECT + parsed = SearchHit.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertNull(parser.nextToken()); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, true), xContentType); + } + /** * When e.g. with "stored_fields": "_none_", only "_index" and "_score" are returned. */ diff --git a/core/src/test/java/org/elasticsearch/search/SearchHitsTests.java b/core/src/test/java/org/elasticsearch/search/SearchHitsTests.java index 4c41e6fbcda..decfe804a42 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchHitsTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchHitsTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search; +import org.apache.lucene.util.TestUtil; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContent; @@ -30,8 +31,10 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.Collections; +import java.util.function.Predicate; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; public class SearchHitsTests extends ESTestCase { @@ -42,7 +45,7 @@ public class SearchHitsTests extends ESTestCase { for (int i = 0; i < searchHits; i++) { hits[i] = SearchHitTests.createTestItem(false); // creating random innerHits could create loops } - long totalHits = randomLong(); + long totalHits = frequently() ? TestUtil.nextLong(random(), 0, Long.MAX_VALUE) : -1; float maxScore = frequently() ? randomFloat() : Float.NaN; return new SearchHits(hits, totalHits, maxScore); } @@ -54,6 +57,10 @@ public class SearchHitsTests extends ESTestCase { BytesReference originalBytes = toShuffledXContent(searchHits, xcontentType, ToXContent.EMPTY_PARAMS, humanReadable); SearchHits parsed; try (XContentParser parser = createParser(xcontentType.xContent(), originalBytes)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals(SearchHits.Fields.HITS, parser.currentName()); + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); parsed = SearchHits.fromXContent(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); @@ -62,6 +69,35 @@ public class SearchHitsTests extends ESTestCase { assertToXContentEquivalent(originalBytes, toXContent(parsed, xcontentType, humanReadable), xcontentType); } + /** + * This test adds randomized fields on all json objects and checks that we + * can parse it to ensure the parsing is lenient for forward compatibility. + * We need to exclude json objects with the "highlight" and "fields" field + * name since these objects allow arbitrary keys (the field names that are + * queries). Also we want to exclude to add anything under "_source" since + * it is not parsed. + */ + public void testFromXContentLenientParsing() throws IOException { + SearchHits searchHits = createTestItem(); + XContentType xcontentType = randomFrom(XContentType.values()); + BytesReference originalBytes = toXContent(searchHits, xcontentType, ToXContent.EMPTY_PARAMS, true); + Predicate pathsToExclude = path -> (path.isEmpty() || path.endsWith("highlight") || path.endsWith("fields") + || path.contains("_source")); + BytesReference withRandomFields = insertRandomFields(xcontentType, originalBytes, pathsToExclude, random()); + SearchHits parsed = null; + try (XContentParser parser = createParser(xcontentType.xContent(), withRandomFields)) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals(SearchHits.Fields.HITS, parser.currentName()); + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + parsed = SearchHits.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, xcontentType, true), xcontentType); + } + public void testToXContent() throws IOException { SearchHit[] hits = new SearchHit[] { new SearchHit(1, "id1", new Text("type"), Collections.emptyMap()), diff --git a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java index 96767c99b9d..4b053b1968f 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -56,7 +56,7 @@ import org.elasticsearch.search.fetch.subphase.highlight.CustomHighlighter; import org.elasticsearch.search.fetch.subphase.highlight.FastVectorHighlighter; import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; import org.elasticsearch.search.fetch.subphase.highlight.PlainHighlighter; -import org.elasticsearch.search.fetch.subphase.highlight.PostingsHighlighter; +import org.elasticsearch.search.fetch.subphase.highlight.UnifiedHighlighter; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.suggest.CustomSuggesterSearchIT.CustomSuggestionBuilder; import org.elasticsearch.search.suggest.SuggestionBuilder; @@ -204,7 +204,7 @@ public class SearchModuleTests extends ModuleTestCase { Map highlighters = module.getHighlighters(); assertEquals(FastVectorHighlighter.class, highlighters.get("fvh").getClass()); assertEquals(PlainHighlighter.class, highlighters.get("plain").getClass()); - assertEquals(PostingsHighlighter.class, highlighters.get("postings").getClass()); + assertEquals(UnifiedHighlighter.class, highlighters.get("unified").getClass()); assertSame(highlighters.get("custom"), customHighlighter); } @@ -279,7 +279,6 @@ public class SearchModuleTests extends ModuleTestCase { "more_like_this", "multi_match", "nested", - "parent_id", "prefix", "query_string", "range", diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java index 2a1df90a956..c6e1156bb71 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java @@ -147,6 +147,11 @@ public class AggregationsTests extends ESTestCase { @Before public void init() throws Exception { for (InternalAggregationTestCase aggsTest : aggsTests) { + if (aggsTest instanceof InternalMultiBucketAggregationTestCase) { + // Lower down the number of buckets generated by multi bucket aggregation tests in + // order to avoid too many aggregations to be created. + ((InternalMultiBucketAggregationTestCase) aggsTest).maxNumberOfBuckets = 3; + } aggsTest.setUp(); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregationTestCase.java index 64f7c3cca20..bc1ac5976ff 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregationTestCase.java @@ -36,7 +36,22 @@ import static java.util.Collections.emptyMap; public abstract class InternalMultiBucketAggregationTestCase extends InternalAggregationTestCase { + private static final int DEFAULT_MAX_NUMBER_OF_BUCKETS = 10; + Supplier subAggregationsSupplier; + int maxNumberOfBuckets = DEFAULT_MAX_NUMBER_OF_BUCKETS; + + protected int randomNumberOfBuckets() { + return randomIntBetween(minNumberOfBuckets(), maxNumberOfBuckets()); + } + + protected int minNumberOfBuckets() { + return 0; + } + + protected int maxNumberOfBuckets() { + return maxNumberOfBuckets; + } @Override public void setUp() throws Exception { @@ -57,7 +72,10 @@ public abstract class InternalMultiBucketAggregationTestCase pipelineAggregators, Map metaData) { - return createTestInstance(name, pipelineAggregators, metaData, subAggregationsSupplier.get()); + T instance = createTestInstance(name, pipelineAggregators, metaData, subAggregationsSupplier.get()); + assert instance.getBuckets().size() <= maxNumberOfBuckets() : + "Maximum number of buckets exceeded for " + instance.getClass().getSimpleName() + " aggregation"; + return instance; } protected abstract T createTestInstance(String name, List pipelineAggregators, diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java index 2363c21c7d1..e55b2c9e648 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java @@ -30,9 +30,8 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; -import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.filter.Filter; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; @@ -41,9 +40,7 @@ import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.stats.Stats; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; import org.elasticsearch.search.aggregations.metrics.sum.Sum; -import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.test.ESIntegTestCase; -import org.hamcrest.Matchers; import java.io.IOException; import java.util.ArrayList; @@ -61,12 +58,10 @@ import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; -import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.max; import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; @@ -75,7 +70,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsNull.notNullValue; @ESIntegTestCase.SuiteScopeTestCase @@ -287,85 +281,6 @@ public class DoubleTermsIT extends AbstractTermsTestCase { assertThat(exception.getMessage(), containsString("[size] must be greater than 0. Found [0] in [terms]")); } - public void testSingleValueField() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .execute().actionGet(); - - assertSearchResponse(response); - - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - for (int i = 0; i < 5; i++) { - Terms.Bucket bucket = terms.getBucketByKey("" + (double) i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (double)i)); - assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - } - - public void testSingleValueFieldWithMaxSize() throws Exception { - SearchResponse response = client().prepareSearch("high_card_idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .size(20) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.key(true))) // we need to sort by terms cause we're checking the first 20 values - .execute().actionGet(); - - assertSearchResponse(response); - - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(20)); - - for (int i = 0; i < 20; i++) { - Terms.Bucket bucket = terms.getBucketByKey("" + (double) i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (double) i)); - assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - } - - public void testSingleValueFieldWithFiltering() throws Exception { - double includes[] = { 1, 2, 3, 98.2 }; - double excludes[] = { 2, 4, 99 }; - double empty[] = {}; - testIncludeExcludeResults(includes, empty, new double[] { 1, 2, 3 }); - testIncludeExcludeResults(includes, excludes, new double[] { 1, 3 }); - testIncludeExcludeResults(empty, excludes, new double[] { 0, 1, 3 }); - } - - private void testIncludeExcludeResults(double[] includes, double[] excludes, double[] expecteds) { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .includeExclude(new IncludeExclude(includes, excludes)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .execute().actionGet(); - assertSearchResponse(response); - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(expecteds.length)); - - for (int i = 0; i < expecteds.length; i++) { - Terms.Bucket bucket = terms.getBucketByKey("" + expecteds[i]); - assertThat(bucket, notNullValue()); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - } - public void testSingleValueFieldWithPartitionedFiltering() throws Exception { runTestFieldWithPartitionedFiltering(SINGLE_VALUED_FIELD_NAME); } @@ -403,120 +318,6 @@ public class DoubleTermsIT extends AbstractTermsTestCase { assertEquals(expectedCardinality, foundTerms.size()); } - public void testSingleValueFieldOrderedByTermAsc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.key(true))) - .execute().actionGet(); - - assertSearchResponse(response); - - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - int i = 0; - for (Terms.Bucket bucket : terms.getBuckets()) { - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (double)i)); - assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - i++; - } - } - - public void testSingleValueFieldOrderedByTermDesc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.key(false))) - .execute().actionGet(); - - assertSearchResponse(response); - - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - int i = 4; - for (Terms.Bucket bucket : terms.getBuckets()) { - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (double) i)); - assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - i--; - } - } - - public void testSingleValueFieldOrderedByTieBreaker() throws Exception { - SearchResponse response = client().prepareSearch("idx").setTypes("type") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("max_constant", randomBoolean())) - .subAggregation(max("max_constant").field("constant"))) - .execute().actionGet(); - - assertSearchResponse(response); - - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - int i = 0; - for (Terms.Bucket bucket : terms.getBuckets()) { - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (double)i)); - assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - i++; - } - } - - public void testSingleValuedFieldWithSubAggregation() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sum").field(MULTI_VALUED_FIELD_NAME))) - .execute().actionGet(); - - assertSearchResponse(response); - - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - assertThat(((InternalAggregation)terms).getProperty("_bucket_count"), equalTo(5)); - Object[] propertiesKeys = (Object[]) ((InternalAggregation)terms).getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)terms).getProperty("_count"); - Object[] propertiesCounts = (Object[]) ((InternalAggregation)terms).getProperty("sum.value"); - - for (int i = 0; i < 5; i++) { - Terms.Bucket bucket = terms.getBucketByKey("" + (double) i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (double) i)); - assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - Sum sum = bucket.getAggregations().get("sum"); - assertThat(sum, notNullValue()); - assertThat((long) sum.getValue(), equalTo(i+i+1L)); - assertThat((double) propertiesKeys[i], equalTo((double) i)); - assertThat((long) propertiesDocCounts[i], equalTo(1L)); - assertThat((double) propertiesCounts[i], equalTo((double) i + i + 1L)); - } - } - public void testSingleValuedFieldWithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(terms("terms") @@ -542,34 +343,6 @@ public class DoubleTermsIT extends AbstractTermsTestCase { } } - public void testMultiValuedField() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .execute().actionGet(); - - assertSearchResponse(response); - - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(6)); - - for (int i = 0; i < 6; i++) { - Terms.Bucket bucket = terms.getBucketByKey("" + (double) i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (double) i)); - assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i)); - if (i == 0 || i == 5) { - assertThat(bucket.getDocCount(), equalTo(1L)); - } else { - assertThat(bucket.getDocCount(), equalTo(2L)); - } - } - } - public void testMultiValuedFieldWithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(terms("terms") @@ -696,23 +469,6 @@ public class DoubleTermsIT extends AbstractTermsTestCase { } } - public void testUnmapped() throws Exception { - SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .size(randomIntBetween(1, 5)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .execute().actionGet(); - - assertSearchResponse(response); - - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(0)); - } - public void testPartiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped", "idx") .addAggregation(terms("terms") @@ -763,53 +519,6 @@ public class DoubleTermsIT extends AbstractTermsTestCase { } } - public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1L).minDocCount(0) - .subAggregation(terms("terms").field(SINGLE_VALUED_FIELD_NAME))) - .execute().actionGet(); - - assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L)); - Histogram histo = searchResponse.getAggregations().get("histo"); - assertThat(histo, Matchers.notNullValue()); - Histogram.Bucket bucket = histo.getBuckets().get(1); - assertThat(bucket, Matchers.notNullValue()); - - Terms terms = bucket.getAggregations().get("terms"); - assertThat(terms, Matchers.notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().isEmpty(), is(true)); - } - - public void testSingleValuedFieldOrderedBySingleValueSubAggregationAsc() throws Exception { - boolean asc = true; - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("avg_i", asc)).subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME))) - .execute().actionGet(); - - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - for (int i = 0; i < 5; i++) { - Terms.Bucket bucket = terms.getBucketByKey("" + (double) i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (double)i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - Avg avg = bucket.getAggregations().get("avg_i"); - assertThat(avg, notNullValue()); - assertThat(avg.getValue(), equalTo((double) i)); - } - } - public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscWithSubTermsAgg() throws Exception { boolean asc = true; SearchResponse response = client() @@ -1019,36 +728,6 @@ public class DoubleTermsIT extends AbstractTermsTestCase { } } - public void testSingleValuedFieldOrderedBySingleValueSubAggregationDesc() throws Exception { - boolean asc = false; - SearchResponse response = client() - .prepareSearch("idx") - .addAggregation( - terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("avg_i", asc)).subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME))) - .execute().actionGet(); - - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - for (int i = 4; i >= 0; i--) { - - Terms.Bucket bucket = terms.getBucketByKey("" + (double) i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + (double)i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - - Avg avg = bucket.getAggregations().get("avg_i"); - assertThat(avg, notNullValue()); - assertThat(avg.getValue(), equalTo((double) i)); - } - } - public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws Exception { boolean asc = true; SearchResponse response = client() diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpTermsIT.java index 3a69812df1f..402d7d2648b 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpTermsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/IpTermsIT.java @@ -18,9 +18,6 @@ */ package org.elasticsearch.search.aggregations.bucket; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; - import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.plugins.Plugin; @@ -35,6 +32,9 @@ import java.util.Collections; import java.util.Map; import java.util.function.Function; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; + public class IpTermsIT extends AbstractTermsTestCase { @Override @@ -62,30 +62,6 @@ public class IpTermsIT extends AbstractTermsTestCase { } } - public void testBasics() throws Exception { - assertAcked(prepareCreate("index").addMapping("type", "ip", "type=ip")); - indexRandom(true, - client().prepareIndex("index", "type", "1").setSource("ip", "192.168.1.7"), - client().prepareIndex("index", "type", "2").setSource("ip", "192.168.1.7"), - client().prepareIndex("index", "type", "3").setSource("ip", "2001:db8::2:1")); - - SearchResponse response = client().prepareSearch("index").addAggregation( - AggregationBuilders.terms("my_terms").field("ip").executionHint(randomExecutionHint())).get(); - assertSearchResponse(response); - Terms terms = response.getAggregations().get("my_terms"); - assertEquals(2, terms.getBuckets().size()); - - Terms.Bucket bucket1 = terms.getBuckets().get(0); - assertEquals(2, bucket1.getDocCount()); - assertEquals("192.168.1.7", bucket1.getKey()); - assertEquals("192.168.1.7", bucket1.getKeyAsString()); - - Terms.Bucket bucket2 = terms.getBuckets().get(1); - assertEquals(1, bucket2.getDocCount()); - assertEquals("2001:db8::2:1", bucket2.getKey()); - assertEquals("2001:db8::2:1", bucket2.getKeyAsString()); - } - public void testScriptValue() throws Exception { assertAcked(prepareCreate("index").addMapping("type", "ip", "type=ip")); indexRandom(true, diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java index 565cdaaa87e..db66a46f312 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java @@ -29,9 +29,8 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; -import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.filter.Filter; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; @@ -40,9 +39,7 @@ import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.stats.Stats; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; import org.elasticsearch.search.aggregations.metrics.sum.Sum; -import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.test.ESIntegTestCase; -import org.hamcrest.Matchers; import java.io.IOException; import java.util.ArrayList; @@ -59,11 +56,9 @@ import java.util.Set; import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; -import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.max; import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; @@ -72,7 +67,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsNull.notNullValue; @ESIntegTestCase.SuiteScopeTestCase @@ -280,74 +274,6 @@ public class LongTermsIT extends AbstractTermsTestCase { assertThat(exception.getMessage(), containsString("[size] must be greater than 0. Found [0] in [terms]")); } - public void testSingleValueField() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .execute().actionGet(); - - assertSearchResponse(response); - - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - for (int i = 0; i < 5; i++) { - Terms.Bucket bucket = terms.getBucketByKey("" + i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + i)); - assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - } - - public void testSingleValueFieldWithFiltering() throws Exception { - long includes[] = { 1, 2, 3, 98 }; - long excludes[] = { -1, 2, 4 }; - long empty[] = {}; - testIncludeExcludeResults(1, includes, empty, new long[] { 1, 2, 3 }, new long[0]); - testIncludeExcludeResults(1, includes, excludes, new long[] { 1, 3 }, new long[0]); - testIncludeExcludeResults(1, empty, excludes, new long[] { 0, 1, 3 }, new long[0]); - - testIncludeExcludeResults(0, includes, empty, new long[] { 1, 2, 3}, new long[] { 98 }); - testIncludeExcludeResults(0, includes, excludes, new long[] { 1, 3 }, new long[] { 98 }); - testIncludeExcludeResults(0, empty, excludes, new long[] { 0, 1, 3 }, new long[] {5, 6, 7, 8, 9, 10, 11}); - } - - private void testIncludeExcludeResults(int minDocCount, long[] includes, long[] excludes, - long[] expectedWithCounts, long[] expectedZeroCounts) { - SearchResponse response = client().prepareSearch("idx", "high_card_idx") - .setQuery(QueryBuilders.boolQuery().mustNot(QueryBuilders.termQuery("_index", "high_card_idx"))) - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .includeExclude(new IncludeExclude(includes, excludes)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .minDocCount(minDocCount)) - .execute().actionGet(); - assertSearchResponse(response); - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(expectedWithCounts.length + expectedZeroCounts.length)); - - for (int i = 0; i < expectedWithCounts.length; i++) { - Terms.Bucket bucket = terms.getBucketByKey("" + expectedWithCounts[i]); - assertThat(bucket, notNullValue()); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - - for (int i = 0; i < expectedZeroCounts.length; i++) { - Terms.Bucket bucket = terms.getBucketByKey("" + expectedZeroCounts[i]); - assertThat(bucket, notNullValue()); - assertThat(bucket.getDocCount(), equalTo(0L)); - } - } - - - public void testSingleValueFieldWithPartitionedFiltering() throws Exception { runTestFieldWithPartitionedFiltering(SINGLE_VALUED_FIELD_NAME); } @@ -387,142 +313,6 @@ public class LongTermsIT extends AbstractTermsTestCase { assertEquals(expectedCardinality, foundTerms.size()); } - - public void testSingleValueFieldWithMaxSize() throws Exception { - SearchResponse response = client().prepareSearch("high_card_idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .size(20) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.key(true))) // we need to sort by terms cause we're checking the first 20 values - .execute().actionGet(); - - assertSearchResponse(response); - - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(20)); - - for (int i = 0; i < 20; i++) { - Terms.Bucket bucket = terms.getBucketByKey("" + i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + i)); - assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - } - - public void testSingleValueFieldOrderedByTermAsc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.key(true))) - .execute().actionGet(); - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - int i = 0; - for (Terms.Bucket bucket : terms.getBuckets()) { - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + i)); - assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - i++; - } - } - - public void testSingleValueFieldOrderedByTermDesc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.key(false))) - .execute().actionGet(); - - assertSearchResponse(response); - - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - int i = 4; - for (Terms.Bucket bucket : terms.getBuckets()) { - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + i)); - assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - i--; - } - } - - public void testSingleValueFieldOrderedByTieBreaker() throws Exception { - SearchResponse response = client().prepareSearch("idx").setTypes("type") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("max_constant", randomBoolean())) - .subAggregation(max("max_constant").field("constant"))) - .execute().actionGet(); - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - int i = 0; - for (Terms.Bucket bucket : terms.getBuckets()) { - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + i)); - assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - i++; - } - } - - public void testSingleValuedFieldWithSubAggregation() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sum").field(MULTI_VALUED_FIELD_NAME))) - .execute().actionGet(); - - assertSearchResponse(response); - - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - Object[] propertiesKeys = (Object[]) ((InternalAggregation)terms).getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)terms).getProperty("_count"); - Object[] propertiesCounts = (Object[]) ((InternalAggregation)terms).getProperty("sum.value"); - - for (int i = 0; i < 5; i++) { - Terms.Bucket bucket = terms.getBucketByKey("" + i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + i)); - assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - Sum sum = bucket.getAggregations().get("sum"); - assertThat(sum, notNullValue()); - assertThat((long) sum.getValue(), equalTo(i+i+1L)); - assertThat((long) propertiesKeys[i], equalTo((long) i)); - assertThat((long) propertiesDocCounts[i], equalTo(1L)); - assertThat((double) propertiesCounts[i], equalTo((double) i + i + 1L)); - } - } - public void testSingleValuedFieldWithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(terms("terms") @@ -548,34 +338,6 @@ public class LongTermsIT extends AbstractTermsTestCase { } } - public void testMultiValuedField() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .execute().actionGet(); - - assertSearchResponse(response); - - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(6)); - - for (int i = 0; i < 6; i++) { - Terms.Bucket bucket = terms.getBucketByKey("" + i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + i)); - assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i)); - if (i == 0 || i == 5) { - assertThat(bucket.getDocCount(), equalTo(1L)); - } else { - assertThat(bucket.getDocCount(), equalTo(2L)); - } - } - } - public void testMultiValuedFieldWithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") .addAggregation(terms("terms") @@ -704,23 +466,6 @@ public class LongTermsIT extends AbstractTermsTestCase { } } - public void testUnmapped() throws Exception { - SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .size(randomIntBetween(1, 5)) - .collectMode(randomFrom(SubAggCollectionMode.values()))) - .execute().actionGet(); - - assertSearchResponse(response); - - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(0)); - } - public void testPartiallyUnmapped() throws Exception { SearchResponse response = client().prepareSearch("idx_unmapped", "idx") .addAggregation(terms("terms") @@ -771,54 +516,6 @@ public class LongTermsIT extends AbstractTermsTestCase { } } - public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1L).minDocCount(0) - .subAggregation(terms("terms").field(SINGLE_VALUED_FIELD_NAME))) - .execute().actionGet(); - - assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L)); - Histogram histo = searchResponse.getAggregations().get("histo"); - assertThat(histo, Matchers.notNullValue()); - Histogram.Bucket bucket = histo.getBuckets().get(1); - assertThat(bucket, Matchers.notNullValue()); - - Terms terms = bucket.getAggregations().get("terms"); - assertThat(terms, Matchers.notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().isEmpty(), is(true)); - } - - public void testSingleValuedFieldOrderedBySingleValueSubAggregationAsc() throws Exception { - boolean asc = true; - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("avg_i", asc)) - .subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME)) - ).execute().actionGet(); - - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - for (int i = 0; i < 5; i++) { - Terms.Bucket bucket = terms.getBucketByKey("" + i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - Avg avg = bucket.getAggregations().get("avg_i"); - assertThat(avg, notNullValue()); - assertThat(avg.getValue(), equalTo((double) i)); - } - } - public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscWithTermsSubAgg() throws Exception { boolean asc = true; SearchResponse response = client().prepareSearch("idx") @@ -1032,38 +729,6 @@ public class LongTermsIT extends AbstractTermsTestCase { } } - public void testSingleValuedFieldOrderedBySingleValueSubAggregationDesc() throws Exception { - boolean asc = false; - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("terms") - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("avg_i", asc)) - .subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME)) - ).execute().actionGet(); - - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - for (int i = 4; i >= 0; i--) { - - Terms.Bucket bucket = terms.getBucketByKey("" + i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("" + i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - - Avg avg = bucket.getAggregations().get("avg_i"); - assertThat(avg, notNullValue()); - assertThat(avg.getValue(), equalTo((double) i)); - } - - } - public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws Exception { boolean asc = true; SearchResponse response = client().prepareSearch("idx") diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index 394870a2a9d..b72c3befa4a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -310,6 +310,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { + "\"doc_count\":4," + "\"sig_terms\":{" + "\"doc_count\":4," + + "\"bg_count\":7," + "\"buckets\":[" + "{" + "\"key\":" + (type.equals("long") ? "0," : "\"0\",") @@ -325,6 +326,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { + "\"doc_count\":3," + "\"sig_terms\":{" + "\"doc_count\":3," + + "\"bg_count\":7," + "\"buckets\":[" + "{" + "\"key\":" + (type.equals("long") ? "1," : "\"1\",") diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java index 0c93ff2f6bb..b2575c3dc57 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.search.aggregations.bucket; -import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; @@ -34,25 +33,19 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; -import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.filter.Filter; -import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; -import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.metrics.avg.Avg; -import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.stats.Stats; import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; import org.elasticsearch.search.aggregations.metrics.sum.Sum; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount; -import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; import java.io.IOException; -import java.text.NumberFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -61,20 +54,15 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; -import static org.elasticsearch.search.aggregations.AggregationBuilders.count; import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; -import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; -import static org.elasticsearch.search.aggregations.AggregationBuilders.max; import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; @@ -82,9 +70,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsNull.notNullValue; -import static org.hamcrest.core.IsNull.nullValue; @ESIntegTestCase.SuiteScopeTestCase public class StringTermsIT extends AbstractTermsTestCase { @@ -255,217 +241,6 @@ public class StringTermsIT extends AbstractTermsTestCase { assertThat(exception.getMessage(), containsString("[size] must be greater than 0. Found [0] in [terms]")); } - public void testSingleValueField() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .setTypes("type") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values()))).execute().actionGet(); - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - Object[] propertiesKeys = (Object[]) ((InternalAggregation)terms).getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)terms).getProperty("_count"); - - for (int i = 0; i < 5; i++) { - Terms.Bucket bucket = terms.getBucketByKey("val" + i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("val" + i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - assertThat((String) propertiesKeys[i], equalTo("val" + i)); - assertThat((long) propertiesDocCounts[i], equalTo(1L)); - } - } - - public void testSingleValueFieldWithGlobalOrdinals() throws Exception { - ExecutionMode[] executionModes = new ExecutionMode[] { null, ExecutionMode.GLOBAL_ORDINALS, ExecutionMode.GLOBAL_ORDINALS_HASH, - ExecutionMode.GLOBAL_ORDINALS_LOW_CARDINALITY }; - for (ExecutionMode executionMode : executionModes) { - logger.info("Execution mode: {}", executionMode); - SearchResponse response = client() - .prepareSearch("idx") - .setTypes("type") - .addAggregation( - terms("terms").executionHint(executionMode == null ? null : executionMode.toString()) - .field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))).execute() - .actionGet(); - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - for (int i = 0; i < 5; i++) { - Terms.Bucket bucket = terms.getBucketByKey("val" + i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("val" + i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - } - } - - public void testSingleValueFieldWithRegexFiltering() throws Exception { - // include without exclude - // we should be left with: val000, val001, val002, val003, val004, val005, val006, val007, val008, val009 - - SearchResponse response = client() - .prepareSearch("high_card_idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).includeExclude(new IncludeExclude("val00.+", null))) - .execute().actionGet(); - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(10)); - - for (int i = 0; i < 10; i++) { - Terms.Bucket bucket = terms.getBucketByKey("val00" + i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("val00" + i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - - // include and exclude - // we should be left with: val002, val003, val004, val005, val006, val007, val008, val009 - - response = client() - .prepareSearch("high_card_idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .includeExclude(new IncludeExclude("val00.+", "(val000|val001)"))) - .execute().actionGet(); - - assertSearchResponse(response); - - terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(8)); - - for (int i = 2; i < 10; i++) { - Terms.Bucket bucket = terms.getBucketByKey("val00" + i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("val00" + i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - - // exclude without include - // we should be left with: val000, val001, val002, val003, val004, val005, val006, val007, val008, val009 - - response = client() - .prepareSearch("high_card_idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .includeExclude(new IncludeExclude(null, new RegExp("val0[1-9]+.+")))) - .execute().actionGet(); - - assertSearchResponse(response); - - terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(10)); - - for (int i = 0; i < 10; i++) { - Terms.Bucket bucket = terms.getBucketByKey("val00" + i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("val00" + i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - } - - public void testSingleValueFieldWithExactTermFiltering() throws Exception { - // include without exclude - String incVals[] = { "val000", "val001", "val002", "val003", "val004", "val005", "val006", "val007", "val008", "val009" }; - SearchResponse response = client() - .prepareSearch("high_card_idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).includeExclude(new IncludeExclude(incVals, null))) - .execute().actionGet(); - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(incVals.length)); - - for (String incVal : incVals) { - Terms.Bucket bucket = terms.getBucketByKey(incVal); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo(incVal)); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - - // include and exclude - // Slightly illogical example with exact terms below as include and exclude sets - // are made to overlap but the exclude set should have priority over matches. - // we should be left with: val002, val003, val004, val005, val006, val007, val008, val009 - String excVals[] = { "val000", "val001" }; - - response = client() - .prepareSearch("high_card_idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).includeExclude(new IncludeExclude(incVals, excVals))) - .execute() - .actionGet(); - - assertSearchResponse(response); - - terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(8)); - - for (int i = 2; i < 10; i++) { - Terms.Bucket bucket = terms.getBucketByKey("val00" + i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("val00" + i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - - // Check case with only exact term exclude clauses - response = client() - .prepareSearch("high_card_idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).includeExclude(new IncludeExclude(null, excVals))) - .execute().actionGet(); - - assertSearchResponse(response); - - terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(10)); - for (String key : excVals) { - Terms.Bucket bucket = terms.getBucketByKey(key); - assertThat(bucket, nullValue()); - } - NumberFormat nf = NumberFormat.getIntegerInstance(Locale.ENGLISH); - nf.setMinimumIntegerDigits(3); - for (int i = 2; i < 12; i++) { - Terms.Bucket bucket = terms.getBucketByKey("val" + nf.format(i)); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("val" + nf.format(i))); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - - } - public void testSingleValueFieldWithPartitionedFiltering() throws Exception { runTestFieldWithPartitionedFiltering(SINGLE_VALUED_FIELD_NAME); } @@ -503,116 +278,6 @@ public class StringTermsIT extends AbstractTermsTestCase { assertEquals(expectedCardinality, foundTerms.size()); } - - public void testSingleValueFieldWithMaxSize() throws Exception { - SearchResponse response = client() - .prepareSearch("high_card_idx") - .addAggregation( - terms("terms") - .executionHint(randomExecutionHint()) - .field(SINGLE_VALUED_FIELD_NAME).size(20) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.key(true))) // we need to sort by terms cause we're checking the first 20 values - .execute().actionGet(); - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(20)); - - for (int i = 0; i < 20; i++) { - Terms.Bucket bucket = terms.getBucketByKey("val" + Strings.padStart(i + "", 3, '0')); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("val" + Strings.padStart(i + "", 3, '0'))); - assertThat(bucket.getDocCount(), equalTo(1L)); - } - } - - public void testSingleValueFieldOrderedByTermAsc() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .setTypes("type") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true))).execute() - .actionGet(); - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - int i = 0; - for (Terms.Bucket bucket : terms.getBuckets()) { - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("val" + i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - i++; - } - } - - public void testSingleValueFieldOrderedByTermDesc() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .setTypes("type") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(false))).execute() - .actionGet(); - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - int i = 4; - for (Terms.Bucket bucket : terms.getBuckets()) { - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("val" + i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - i--; - } - } - - public void testSingleValuedFieldWithSubAggregation() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .setTypes("type") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(count("count").field(MULTI_VALUED_FIELD_NAME))).execute().actionGet(); - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - Object[] propertiesKeys = (Object[]) ((InternalAggregation)terms).getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)terms).getProperty("_count"); - Object[] propertiesCounts = (Object[]) ((InternalAggregation)terms).getProperty("count.value"); - - for (int i = 0; i < 5; i++) { - Terms.Bucket bucket = terms.getBucketByKey("val" + i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("val" + i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - ValueCount valueCount = bucket.getAggregations().get("count"); - assertThat(valueCount, notNullValue()); - assertThat(valueCount.getValue(), equalTo(2L)); - assertThat((String) propertiesKeys[i], equalTo("val" + i)); - assertThat((long) propertiesDocCounts[i], equalTo(1L)); - assertThat((double) propertiesCounts[i], equalTo(2.0)); - } - } - public void testSingleValuedFieldWithValueScript() throws Exception { SearchResponse response = client() .prepareSearch("idx") @@ -666,33 +331,6 @@ public class StringTermsIT extends AbstractTermsTestCase { assertThat(bucket.getDocCount(), equalTo(5L)); } - public void testMultiValuedField() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .setTypes("type") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values()))).execute().actionGet(); - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(6)); - - for (int i = 0; i < 6; i++) { - Terms.Bucket bucket = terms.getBucketByKey("val" + i); - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("val" + i)); - if (i == 0 || i == 5) { - assertThat(bucket.getDocCount(), equalTo(1L)); - } else { - assertThat(bucket.getDocCount(), equalTo(2L)); - } - } - } - public void testMultiValuedScript() throws Exception { SearchResponse response = client() .prepareSearch("idx") @@ -856,22 +494,6 @@ public class StringTermsIT extends AbstractTermsTestCase { } } - public void testUnmapped() throws Exception { - SearchResponse response = client() - .prepareSearch("idx_unmapped") - .setTypes("type") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).size(randomIntBetween(1, 5)).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values()))).execute().actionGet(); - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(0)); - } - public void testPartiallyUnmapped() throws Exception { SearchResponse response = client() .prepareSearch("idx", "idx_unmapped") @@ -922,87 +544,6 @@ public class StringTermsIT extends AbstractTermsTestCase { } } - public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client() - .prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation( - histogram("histo") - .field(SINGLE_VALUED_FIELD_NAME) - .interval(1L) - .minDocCount(0) - .subAggregation(terms("terms").field("value"))) - .execute().actionGet(); - - assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L)); - Histogram histo = searchResponse.getAggregations().get("histo"); - assertThat(histo, Matchers.notNullValue()); - Histogram.Bucket bucket = histo.getBuckets().get(1); - assertThat(bucket, Matchers.notNullValue()); - - Terms terms = bucket.getAggregations().get("terms"); - assertThat(terms, Matchers.notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().isEmpty(), is(true)); - } - - public void testSingleValuedFieldOrderedBySingleValueSubAggregationAsc() throws Exception { - boolean asc = true; - SearchResponse response = client() - .prepareSearch("idx") - .setTypes("type") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.aggregation("avg_i", asc)) - .subAggregation(avg("avg_i").field("i"))).execute().actionGet(); - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - int i = 0; - for (Terms.Bucket bucket : terms.getBuckets()) { - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("val" + i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - Avg avg = bucket.getAggregations().get("avg_i"); - assertThat(avg, notNullValue()); - assertThat(avg.getValue(), equalTo((double) i)); - i++; - } - } - - public void testSingleValuedFieldOrderedByTieBreaker() throws Exception { - SearchResponse response = client() - .prepareSearch("idx") - .setTypes("type") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.aggregation("max_constant", randomBoolean())) - .subAggregation(max("max_constant").field("constant"))).execute().actionGet(); - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - int i = 0; - for (Terms.Bucket bucket : terms.getBuckets()) { - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("val" + i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - Max max = bucket.getAggregations().get("max_constant"); - assertThat(max, notNullValue()); - assertThat(max.getValue(), equalTo((double) 1)); - i++; - } - } - public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception { boolean asc = true; try { @@ -1330,37 +871,6 @@ public class StringTermsIT extends AbstractTermsTestCase { } } - public void testSingleValuedFieldOrderedBySingleValueSubAggregationDesc() throws Exception { - boolean asc = false; - SearchResponse response = client() - .prepareSearch("idx") - .setTypes("type") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.aggregation("avg_i", asc)) - .subAggregation(avg("avg_i").field("i"))).execute().actionGet(); - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - assertThat(terms.getBuckets().size(), equalTo(5)); - - int i = 4; - for (Terms.Bucket bucket : terms.getBuckets()) { - assertThat(bucket, notNullValue()); - assertThat(key(bucket), equalTo("val" + i)); - assertThat(bucket.getDocCount(), equalTo(1L)); - - Avg avg = bucket.getAggregations().get("avg_i"); - assertThat(avg, notNullValue()); - assertThat(avg.getValue(), equalTo((double) i)); - i--; - } - - } - public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws Exception { boolean asc = true; SearchResponse response = client() diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrixTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrixTests.java index a278f644735..593376e48ac 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrixTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrixTests.java @@ -34,10 +34,19 @@ public class InternalAdjacencyMatrixTests extends InternalMultiBucketAggregation private List keys; + @Override + protected int maxNumberOfBuckets() { + return 10; + } + @Override public void setUp() throws Exception { super.setUp(); keys = new ArrayList<>(); + // InternalAdjacencyMatrix represents the upper triangular matrix: + // 2 filters (matrix of 2x2) generates 3 buckets + // 3 filters generates 6 buckets + // 4 filters generates 10 buckets int numFilters = randomIntBetween(2, 4); String[] filters = new String[numFilters]; for (int i = 0; i < numFilters; i++) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFiltersTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFiltersTests.java index ec1b71419ed..b81bec34397 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFiltersTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/filters/InternalFiltersTests.java @@ -40,7 +40,7 @@ public class InternalFiltersTests extends InternalMultiBucketAggregationTestCase super.setUp(); keyed = randomBoolean(); keys = new ArrayList<>(); - int numBuckets = randomIntBetween(1, 5); + int numBuckets = randomNumberOfBuckets(); for (int i = 0; i < numBuckets; i++) { if (keyed) { keys.add(randomAlphaOfLength(5)); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGridTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGridTests.java index afa9defc78a..c32b46ec3fb 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGridTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGridTests.java @@ -33,12 +33,22 @@ import java.util.Map; public class InternalGeoHashGridTests extends InternalMultiBucketAggregationTestCase { + @Override + protected int minNumberOfBuckets() { + return 1; + } + + @Override + protected int maxNumberOfBuckets() { + return 3; + } + @Override protected InternalGeoHashGrid createTestInstance(String name, List pipelineAggregators, Map metaData, InternalAggregations aggregations) { - int size = randomIntBetween(1, 3); + int size = randomNumberOfBuckets(); List buckets = new ArrayList<>(size); for (int i = 0; i < size; i++) { double latitude = randomDoubleBetween(-90.0, 90.0, false); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java index c34b6093e2d..01d347f0754 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java @@ -55,7 +55,7 @@ public class InternalDateHistogramTests extends InternalMultiBucketAggregationTe List pipelineAggregators, Map metaData, InternalAggregations aggregations) { - int nbBuckets = randomInt(10); + int nbBuckets = randomNumberOfBuckets(); List buckets = new ArrayList<>(nbBuckets); long startingDate = System.currentTimeMillis(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java index cb37dd9a373..1997070814b 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java @@ -51,7 +51,7 @@ public class InternalHistogramTests extends InternalMultiBucketAggregationTestCa Map metaData, InternalAggregations aggregations) { final int base = randomInt(50) - 30; - final int numBuckets = randomInt(10); + final int numBuckets = randomNumberOfBuckets(); final int interval = randomIntBetween(1, 3); List buckets = new ArrayList<>(); for (int i = 0; i < numBuckets; ++i) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java index f88300a4502..53128063460 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -36,30 +37,36 @@ public class InternalBinaryRangeTests extends InternalRangeTestCase> ranges; + @Override + protected int minNumberOfBuckets() { + return 1; + } + @Override public void setUp() throws Exception { super.setUp(); - final int numRanges = randomIntBetween(1, 10); - ranges = new ArrayList<>(numRanges); + List> listOfRanges = new ArrayList<>(); + if (randomBoolean()) { + listOfRanges.add(Tuple.tuple(null, new BytesRef(randomAlphaOfLength(15)))); + } + if (randomBoolean()) { + listOfRanges.add(Tuple.tuple(new BytesRef(randomAlphaOfLength(15)), null)); + } + if (randomBoolean()) { + listOfRanges.add(Tuple.tuple(null, null)); + } + final int numRanges = Math.max(0, randomNumberOfBuckets() - listOfRanges.size()); for (int i = 0; i < numRanges; i++) { BytesRef[] values = new BytesRef[2]; values[0] = new BytesRef(randomAlphaOfLength(15)); values[1] = new BytesRef(randomAlphaOfLength(15)); Arrays.sort(values); - ranges.add(Tuple.tuple(values[0], values[1])); - } - - if (randomBoolean()) { - ranges.add(Tuple.tuple(null, new BytesRef(randomAlphaOfLength(15)))); - } - if (randomBoolean()) { - ranges.add(Tuple.tuple(new BytesRef(randomAlphaOfLength(15)), null)); - } - if (randomBoolean()) { - ranges.add(Tuple.tuple(null, null)); + listOfRanges.add(Tuple.tuple(values[0], values[1])); } + Collections.shuffle(listOfRanges, random()); + ranges = Collections.unmodifiableList(listOfRanges); } @Override diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java index 7e217572294..50c2ed8e7b9 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java @@ -42,22 +42,7 @@ public class InternalRangeTests extends InternalRangeTestCase { super.setUp(); format = randomNumericDocValueFormat(); - final int interval = randomFrom(1, 5, 10, 25, 50, 100); - final int numRanges = randomIntBetween(1, 10); - - List> listOfRanges = new ArrayList<>(numRanges); - for (int i = 0; i < numRanges; i++) { - double from = i * interval; - double to = from + interval; - listOfRanges.add(Tuple.tuple(from, to)); - } - if (randomBoolean()) { - // Add some overlapping ranges - double max = (double) numRanges * interval; - listOfRanges.add(Tuple.tuple(0.0, max)); - listOfRanges.add(Tuple.tuple(0.0, max / 2)); - listOfRanges.add(Tuple.tuple(max / 3, max / 3 * 2)); - } + List> listOfRanges = new ArrayList<>(); if (rarely()) { listOfRanges.add(Tuple.tuple(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY)); } @@ -67,6 +52,25 @@ public class InternalRangeTests extends InternalRangeTestCase { if (rarely()) { listOfRanges.add(Tuple.tuple(randomDouble(), Double.POSITIVE_INFINITY)); } + + final int interval = randomFrom(1, 5, 10, 25, 50, 100); + final int numRanges = Math.max(0, randomNumberOfBuckets() - listOfRanges.size()); + final double max = (double) numRanges * interval; + + for (int i = 0; numRanges - listOfRanges.size() > 0; i++) { + double from = i * interval; + double to = from + interval; + + Tuple range; + if (randomBoolean()) { + range = Tuple.tuple(from, to); + } else { + // Add some overlapping range + range = Tuple.tuple(randomFrom(0.0, max / 3), randomFrom(max, max / 2, max / 3 * 2)); + } + listOfRanges.add(range); + } + Collections.shuffle(listOfRanges, random()); ranges = Collections.unmodifiableList(listOfRanges); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/date/InternalDateRangeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/date/InternalDateRangeTests.java index 3ba8fad4176..bb5709fc37c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/date/InternalDateRangeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/date/InternalDateRangeTests.java @@ -50,7 +50,7 @@ public class InternalDateRangeTests extends InternalRangeTestCase dateTime.plusHours(1), dateTime -> dateTime.plusDays(1), dateTime -> dateTime.plusMonths(1), dateTime -> dateTime.plusYears(1)); - final int numRanges = randomIntBetween(1, 10); + final int numRanges = randomNumberOfBuckets(); final List> listOfRanges = new ArrayList<>(numRanges); DateTime date = new DateTime(DateTimeZone.UTC); @@ -60,17 +60,18 @@ public class InternalDateRangeTests extends InternalRangeTestCase end) { end = to; } - } - if (randomBoolean()) { - final int randomOverlaps = randomIntBetween(1, 5); - for (int i = 0; i < randomOverlaps; i++) { + + if (randomBoolean()) { + listOfRanges.add(Tuple.tuple(from, to)); + } else { + // Add some overlapping range listOfRanges.add(Tuple.tuple(start, randomDoubleBetween(start, end, false))); } } + Collections.shuffle(listOfRanges, random()); dateRanges = Collections.unmodifiableList(listOfRanges); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/InternalGeoDistanceTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/InternalGeoDistanceTests.java index 0adc5d028e5..2d2cca70e33 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/InternalGeoDistanceTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/geodistance/InternalGeoDistanceTests.java @@ -41,21 +41,24 @@ public class InternalGeoDistanceTests extends InternalRangeTestCase> listOfRanges = new ArrayList<>(numRanges); for (int i = 0; i < numRanges; i++) { double from = i * interval; double to = from + interval; - listOfRanges.add(Tuple.tuple(from, to)); - } - if (randomBoolean()) { - // Add some overlapping ranges - double max = (double) numRanges * interval; - listOfRanges.add(Tuple.tuple(0.0, max)); - listOfRanges.add(Tuple.tuple(0.0, max / 2)); - listOfRanges.add(Tuple.tuple(max / 3, max / 3 * 2)); + + Tuple range; + if (randomBoolean()) { + range = Tuple.tuple(from, to); + } else { + // Add some overlapping range + range = Tuple.tuple(randomFrom(0.0, max / 3), randomFrom(max, max / 2, max / 3 * 2)); + } + listOfRanges.add(range); } + Collections.shuffle(listOfRanges, random()); geoDistanceRanges = Collections.unmodifiableList(listOfRanges); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/InternalSignificantTermsTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/InternalSignificantTermsTestCase.java index 9a86e44b2ac..10145aa594a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/InternalSignificantTermsTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/InternalSignificantTermsTestCase.java @@ -19,8 +19,14 @@ package org.elasticsearch.search.aggregations.bucket.significant; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.util.Arrays; @@ -33,6 +39,51 @@ import java.util.stream.Stream; public abstract class InternalSignificantTermsTestCase extends InternalMultiBucketAggregationTestCase> { + private SignificanceHeuristic significanceHeuristic; + + @Override + public void setUp() throws Exception { + super.setUp(); + significanceHeuristic = randomSignificanceHeuristic(); + } + + @Override + protected final InternalSignificantTerms createTestInstance(String name, + List pipelineAggregators, + Map metaData, + InternalAggregations aggregations) { + final int requiredSize = randomIntBetween(1, 5); + final int numBuckets = randomNumberOfBuckets(); + + long subsetSize = 0; + long supersetSize = 0; + + int[] subsetDfs = new int[numBuckets]; + int[] supersetDfs = new int[numBuckets]; + + for (int i = 0; i < numBuckets; ++i) { + int subsetDf = randomIntBetween(1, 10); + subsetDfs[i] = subsetDf; + + int supersetDf = randomIntBetween(subsetDf, 20); + supersetDfs[i] = supersetDf; + + subsetSize += subsetDf; + supersetSize += supersetDf; + } + return createTestInstance(name, pipelineAggregators, metaData, aggregations, requiredSize, numBuckets, subsetSize, subsetDfs, + supersetSize, supersetDfs, significanceHeuristic); + } + + protected abstract InternalSignificantTerms createTestInstance(String name, + List pipelineAggregators, + Map metaData, + InternalAggregations aggregations, + int requiredSize, int numBuckets, + long subsetSize, int[] subsetDfs, + long supersetSize, int[] supersetDfs, + SignificanceHeuristic significanceHeuristic); + @Override protected InternalSignificantTerms createUnmappedInstance(String name, List pipelineAggregators, @@ -72,6 +123,7 @@ public abstract class InternalSignificantTermsTestCase extends InternalMultiBuck InternalSignificantTerms expectedSigTerms = (InternalSignificantTerms) expected; ParsedSignificantTerms actualSigTerms = (ParsedSignificantTerms) actual; assertEquals(expectedSigTerms.getSubsetSize(), actualSigTerms.getSubsetSize()); + assertEquals(expectedSigTerms.getSupersetSize(), actualSigTerms.getSupersetSize()); for (SignificantTerms.Bucket bucket : (SignificantTerms) expected) { String key = bucket.getKeyAsString(); @@ -91,14 +143,22 @@ public abstract class InternalSignificantTermsTestCase extends InternalMultiBuck assertEquals(expectedSigTerm.getSignificanceScore(), actualSigTerm.getSignificanceScore(), 0.0); assertEquals(expectedSigTerm.getSubsetDf(), actualSigTerm.getSubsetDf()); + assertEquals(expectedSigTerm.getDocCount(), actualSigTerm.getSubsetDf()); assertEquals(expectedSigTerm.getSupersetDf(), actualSigTerm.getSupersetDf()); - - expectThrows(UnsupportedOperationException.class, actualSigTerm::getSubsetSize); - expectThrows(UnsupportedOperationException.class, actualSigTerm::getSupersetSize); + assertEquals(expectedSigTerm.getSubsetSize(), actualSigTerm.getSubsetSize()); + assertEquals(expectedSigTerm.getSupersetSize(), actualSigTerm.getSupersetSize()); } private static Map toCounts(Stream buckets, Function fn) { return buckets.collect(Collectors.toMap(SignificantTerms.Bucket::getKey, fn, Long::sum)); } + + private static SignificanceHeuristic randomSignificanceHeuristic() { + return randomFrom( + new JLHScore(), + new MutualInformation(randomBoolean(), randomBoolean()), + new GND(randomBoolean()), + new ChiSquare(randomBoolean(), randomBoolean())); + } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantLongTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantLongTermsTests.java index 793c6aec5c3..f41dc80c3ed 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantLongTermsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantLongTermsTests.java @@ -23,10 +23,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -38,13 +34,11 @@ import java.util.Set; public class SignificantLongTermsTests extends InternalSignificantTermsTestCase { - private SignificanceHeuristic significanceHeuristic; private DocValueFormat format; @Override public void setUp() throws Exception { super.setUp(); - significanceHeuristic = randomSignificanceHeuristic(); format = randomNumericDocValueFormat(); } @@ -52,30 +46,20 @@ public class SignificantLongTermsTests extends InternalSignificantTermsTestCase protected InternalSignificantTerms createTestInstance(String name, List pipelineAggregators, Map metaData, - InternalAggregations aggregations) { - int requiredSize = randomIntBetween(1, 5); - int shardSize = requiredSize + 2; - final int numBuckets = randomInt(shardSize); - - long globalSubsetSize = 0; - long globalSupersetSize = 0; + InternalAggregations aggs, + int requiredSize, int numBuckets, + long subsetSize, int[] subsetDfs, + long supersetSize, int[] supersetDfs, + SignificanceHeuristic significanceHeuristic) { List buckets = new ArrayList<>(numBuckets); Set terms = new HashSet<>(); for (int i = 0; i < numBuckets; ++i) { long term = randomValueOtherThanMany(l -> terms.add(l) == false, random()::nextLong); - - int subsetDf = randomIntBetween(1, 10); - int supersetDf = randomIntBetween(subsetDf, 20); - int supersetSize = randomIntBetween(supersetDf, 30); - - globalSubsetSize += subsetDf; - globalSupersetSize += supersetSize; - - buckets.add(new SignificantLongTerms.Bucket(subsetDf, subsetDf, supersetDf, supersetSize, term, aggregations, format)); + buckets.add(new SignificantLongTerms.Bucket(subsetDfs[i], subsetSize, supersetDfs[i], supersetSize, term, aggs, format)); } - return new SignificantLongTerms(name, requiredSize, 1L, pipelineAggregators, metaData, format, globalSubsetSize, - globalSupersetSize, significanceHeuristic, buckets); + return new SignificantLongTerms(name, requiredSize, 1L, pipelineAggregators, metaData, format, subsetSize, + supersetSize, significanceHeuristic, buckets); } @Override @@ -87,12 +71,4 @@ public class SignificantLongTermsTests extends InternalSignificantTermsTestCase protected Class implementationClass() { return ParsedSignificantLongTerms.class; } - - private static SignificanceHeuristic randomSignificanceHeuristic() { - return randomFrom( - new JLHScore(), - new MutualInformation(randomBoolean(), randomBoolean()), - new GND(randomBoolean()), - new ChiSquare(randomBoolean(), randomBoolean())); - } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTermsTests.java index 762472e4be5..e9c716751f7 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTermsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTermsTests.java @@ -24,10 +24,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore; -import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -39,43 +35,24 @@ import java.util.Set; public class SignificantStringTermsTests extends InternalSignificantTermsTestCase { - private SignificanceHeuristic significanceHeuristic; - - @Override - public void setUp() throws Exception { - super.setUp(); - significanceHeuristic = randomSignificanceHeuristic(); - } - @Override protected InternalSignificantTerms createTestInstance(String name, List pipelineAggregators, Map metaData, - InternalAggregations aggregations) { + InternalAggregations aggs, + int requiredSize, int numBuckets, + long subsetSize, int[] subsetDfs, + long supersetSize, int[] supersetDfs, + SignificanceHeuristic significanceHeuristic) { DocValueFormat format = DocValueFormat.RAW; - int requiredSize = randomIntBetween(1, 5); - int shardSize = requiredSize + 2; - final int numBuckets = randomInt(shardSize); - - long globalSubsetSize = 0; - long globalSupersetSize = 0; - List buckets = new ArrayList<>(numBuckets); Set terms = new HashSet<>(); for (int i = 0; i < numBuckets; ++i) { BytesRef term = randomValueOtherThanMany(b -> terms.add(b) == false, () -> new BytesRef(randomAlphaOfLength(10))); - - int subsetDf = randomIntBetween(1, 10); - int supersetDf = randomIntBetween(subsetDf, 20); - int supersetSize = randomIntBetween(supersetDf, 30); - - globalSubsetSize += subsetDf; - globalSupersetSize += supersetSize; - - buckets.add(new SignificantStringTerms.Bucket(term, subsetDf, subsetDf, supersetDf, supersetSize, aggregations, format)); + buckets.add(new SignificantStringTerms.Bucket(term, subsetDfs[i], subsetSize, supersetDfs[i], supersetSize, aggs, format)); } - return new SignificantStringTerms(name, requiredSize, 1L, pipelineAggregators, metaData, format, globalSubsetSize, - globalSupersetSize, significanceHeuristic, buckets); + return new SignificantStringTerms(name, requiredSize, 1L, pipelineAggregators, metaData, format, subsetSize, + supersetSize, significanceHeuristic, buckets); } @Override @@ -87,12 +64,4 @@ public class SignificantStringTermsTests extends InternalSignificantTermsTestCas protected Class implementationClass() { return ParsedSignificantStringTerms.class; } - - private static SignificanceHeuristic randomSignificanceHeuristic() { - return randomFrom( - new JLHScore(), - new MutualInformation(randomBoolean(), randomBoolean()), - new GND(randomBoolean()), - new ChiSquare(randomBoolean(), randomBoolean())); - } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java index c8d8b6d5979..8376d8c57a1 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregatorTests.java @@ -123,4 +123,36 @@ public class SignificantTextAggregatorTests extends AggregatorTestCase { } } } + + /** + * Test documents with arrays of text + */ + public void testSignificanceOnTextArrays() throws IOException { + TextFieldType textFieldType = new TextFieldType(); + textFieldType.setName("text"); + textFieldType.setIndexAnalyzer(new NamedAnalyzer("my_analyzer", AnalyzerScope.GLOBAL, new StandardAnalyzer())); + + IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { + for (int i = 0; i < 10; i++) { + Document doc = new Document(); + doc.add(new Field("text", "foo", textFieldType)); + String json ="{ \"text\" : [\"foo\",\"foo\"], \"title\" : [\"foo\", \"foo\"]}"; + doc.add(new StoredField("_source", new BytesRef(json))); + w.addDocument(doc); + } + + SignificantTextAggregationBuilder sigAgg = new SignificantTextAggregationBuilder("sig_text", "text"); + sigAgg.sourceFieldNames(Arrays.asList(new String [] {"title", "text"})); + try (IndexReader reader = DirectoryReader.open(w)) { + assertEquals("test expects a single segment", 1, reader.leaves().size()); + IndexSearcher searcher = new IndexSearcher(reader); + searchAndReduce(searcher, new TermQuery(new Term("text", "foo")), sigAgg, textFieldType); + // No significant results to be found in this test - only checking we don't end up + // with the internal exception discovered in issue https://github.com/elastic/elasticsearch/issues/25029 + } + } + } + + } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsTests.java index c2a0b726b86..461de583c47 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsTests.java @@ -48,7 +48,7 @@ public class DoubleTermsTests extends InternalTermsTestCase { DocValueFormat format = randomNumericDocValueFormat(); long otherDocCount = 0; List buckets = new ArrayList<>(); - final int numBuckets = randomInt(shardSize); + final int numBuckets = randomNumberOfBuckets(); Set terms = new HashSet<>(); for (int i = 0; i < numBuckets; ++i) { double term = randomValueOtherThanMany(d -> terms.add(d) == false, random()::nextDouble); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsTests.java index 941997d3372..6253d56d845 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsTests.java @@ -48,7 +48,7 @@ public class LongTermsTests extends InternalTermsTestCase { DocValueFormat format = randomNumericDocValueFormat(); long otherDocCount = 0; List buckets = new ArrayList<>(); - final int numBuckets = randomInt(shardSize); + final int numBuckets = randomNumberOfBuckets(); Set terms = new HashSet<>(); for (int i = 0; i < numBuckets; ++i) { long term = randomValueOtherThanMany(l -> terms.add(l) == false, random()::nextLong); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java index bdafb139d78..8aa6f10b832 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java @@ -49,7 +49,7 @@ public class StringTermsTests extends InternalTermsTestCase { DocValueFormat format = DocValueFormat.RAW; long otherDocCount = 0; List buckets = new ArrayList<>(); - final int numBuckets = randomInt(shardSize); + final int numBuckets = randomNumberOfBuckets(); Set terms = new HashSet<>(); for (int i = 0; i < numBuckets; ++i) { BytesRef term = randomValueOtherThanMany(b -> terms.add(b) == false, () -> new BytesRef(randomAlphaOfLength(10))); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactoryTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactoryTests.java index 7d7ffac4fb3..fe32bff86b8 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactoryTests.java @@ -21,10 +21,11 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; -public class TermsAggregatorFactoryTests extends ESSingleNodeTestCase { +public class TermsAggregatorFactoryTests extends ESTestCase { public void testSubAggCollectMode() throws Exception { assertThat(TermsAggregatorFactory.subAggCollectionMode(Integer.MAX_VALUE, -1), equalTo(Aggregator.SubAggCollectionMode.DEPTH_FIRST)); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index f54cb902d96..cf8aa6ba657 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -19,83 +19,851 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.InetAddressPoint; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; +import org.apache.lucene.document.StringField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.index.mapper.IpFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.bucket.filter.Filter; +import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; import org.elasticsearch.search.aggregations.support.ValueType; import java.io.IOException; +import java.net.InetAddress; import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Iterator; import java.util.List; +import java.util.Map; +import java.util.function.BiFunction; +import java.util.function.Function; + +import static org.hamcrest.Matchers.instanceOf; public class TermsAggregatorTests extends AggregatorTestCase { - - public void testTermsAggregator() throws Exception { + public void testGlobalOrdinalsExecutionHint() throws Exception { Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); - Document document = new Document(); - document.add(new SortedSetDocValuesField("string", new BytesRef("a"))); - document.add(new SortedSetDocValuesField("string", new BytesRef("b"))); - indexWriter.addDocument(document); - document = new Document(); - document.add(new SortedSetDocValuesField("string", new BytesRef("c"))); - document.add(new SortedSetDocValuesField("string", new BytesRef("a"))); - indexWriter.addDocument(document); - document = new Document(); - document.add(new SortedSetDocValuesField("string", new BytesRef("b"))); - document.add(new SortedSetDocValuesField("string", new BytesRef("d"))); - indexWriter.addDocument(document); indexWriter.close(); - IndexReader indexReader = DirectoryReader.open(directory); // We do not use LuceneTestCase.newSearcher because we need a DirectoryReader IndexSearcher indexSearcher = new IndexSearcher(indexReader); - for (TermsAggregatorFactory.ExecutionMode executionMode : TermsAggregatorFactory.ExecutionMode.values()) { - TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.STRING) - .executionHint(executionMode.toString()) - .field("string") - .order(BucketOrder.key(true)); - MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType(); - fieldType.setName("string"); - fieldType.setHasDocValues(true ); + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.STRING) + .field("string") + .collectMode(Aggregator.SubAggCollectionMode.BREADTH_FIRST); + MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType(); + fieldType.setName("string"); + fieldType.setHasDocValues(true); - TermsAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); - aggregator.preCollection(); - indexSearcher.search(new MatchAllDocsQuery(), aggregator); - aggregator.postCollection(); - Terms result = (Terms) aggregator.buildAggregation(0L); - assertEquals(4, result.getBuckets().size()); - assertEquals("a", result.getBuckets().get(0).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(0).getDocCount()); - assertEquals("b", result.getBuckets().get(1).getKeyAsString()); - assertEquals(2L, result.getBuckets().get(1).getDocCount()); - assertEquals("c", result.getBuckets().get(2).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(2).getDocCount()); - assertEquals("d", result.getBuckets().get(3).getKeyAsString()); - assertEquals(1L, result.getBuckets().get(3).getDocCount()); + TermsAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + assertThat(aggregator, instanceOf(GlobalOrdinalsStringTermsAggregator.class)); + GlobalOrdinalsStringTermsAggregator globalAgg = (GlobalOrdinalsStringTermsAggregator) aggregator; + assertFalse(globalAgg.remapGlobalOrds()); - } + aggregationBuilder + .subAggregation(AggregationBuilders.cardinality("card").field("string")); + aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + assertThat(aggregator, instanceOf(GlobalOrdinalsStringTermsAggregator.class)); + globalAgg = (GlobalOrdinalsStringTermsAggregator) aggregator; + assertFalse(globalAgg.remapGlobalOrds()); + + aggregationBuilder + .order(BucketOrder.aggregation("card", true)); + aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + assertThat(aggregator, instanceOf(GlobalOrdinalsStringTermsAggregator.class)); + globalAgg = (GlobalOrdinalsStringTermsAggregator) aggregator; + assertTrue(globalAgg.remapGlobalOrds()); + + aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.STRING) + .field("string") + .executionHint("global_ordinals_hash"); + aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + assertThat(aggregator, instanceOf(GlobalOrdinalsStringTermsAggregator.class)); + globalAgg = (GlobalOrdinalsStringTermsAggregator) aggregator; + assertTrue(globalAgg.remapGlobalOrds()); indexReader.close(); directory.close(); } + public void testSimple() throws Exception { + try (Directory directory = newDirectory()) { + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + Document document = new Document(); + document.add(new SortedSetDocValuesField("string", new BytesRef("a"))); + document.add(new SortedSetDocValuesField("string", new BytesRef("b"))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new SortedSetDocValuesField("string", new BytesRef("c"))); + document.add(new SortedSetDocValuesField("string", new BytesRef("a"))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new SortedSetDocValuesField("string", new BytesRef("b"))); + document.add(new SortedSetDocValuesField("string", new BytesRef("d"))); + indexWriter.addDocument(document); + try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { + IndexSearcher indexSearcher = newIndexSearcher(indexReader); + for (TermsAggregatorFactory.ExecutionMode executionMode : TermsAggregatorFactory.ExecutionMode.values()) { + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.STRING) + .executionHint(executionMode.toString()) + .field("string") + .order(BucketOrder.key(true)); + MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType(); + fieldType.setName("string"); + fieldType.setHasDocValues(true ); + + TermsAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + Terms result = (Terms) aggregator.buildAggregation(0L); + assertEquals(4, result.getBuckets().size()); + assertEquals("a", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + assertEquals("b", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + assertEquals("c", result.getBuckets().get(2).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + assertEquals("d", result.getBuckets().get(3).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(3).getDocCount()); + } + } + } + } + } + + public void testStringIncludeExclude() throws Exception { + try (Directory directory = newDirectory()) { + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + Document document = new Document(); + document.add(new SortedSetDocValuesField("mv_field", new BytesRef("val000"))); + document.add(new SortedSetDocValuesField("mv_field", new BytesRef("val001"))); + document.add(new SortedDocValuesField("sv_field", new BytesRef("val001"))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new SortedSetDocValuesField("mv_field", new BytesRef("val002"))); + document.add(new SortedSetDocValuesField("mv_field", new BytesRef("val003"))); + document.add(new SortedDocValuesField("sv_field", new BytesRef("val003"))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new SortedSetDocValuesField("mv_field", new BytesRef("val004"))); + document.add(new SortedSetDocValuesField("mv_field", new BytesRef("val005"))); + document.add(new SortedDocValuesField("sv_field", new BytesRef("val005"))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new SortedSetDocValuesField("mv_field", new BytesRef("val006"))); + document.add(new SortedSetDocValuesField("mv_field", new BytesRef("val007"))); + document.add(new SortedDocValuesField("sv_field", new BytesRef("val007"))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new SortedSetDocValuesField("mv_field", new BytesRef("val008"))); + document.add(new SortedSetDocValuesField("mv_field", new BytesRef("val009"))); + document.add(new SortedDocValuesField("sv_field", new BytesRef("val009"))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new SortedSetDocValuesField("mv_field", new BytesRef("val010"))); + document.add(new SortedSetDocValuesField("mv_field", new BytesRef("val011"))); + document.add(new SortedDocValuesField("sv_field", new BytesRef("val011"))); + indexWriter.addDocument(document); + try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { + IndexSearcher indexSearcher = newIndexSearcher(indexReader); + MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType(); + fieldType.setName("mv_field"); + fieldType.setHasDocValues(true); + + String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.STRING) + .executionHint(executionHint) + .includeExclude(new IncludeExclude("val00.+", null)) + .field("mv_field") + .size(12) + .order(BucketOrder.key(true)); + + Aggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + Terms result = (Terms) aggregator.buildAggregation(0L); + assertEquals(10, result.getBuckets().size()); + assertEquals("val000", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + assertEquals("val001", result.getBuckets().get(1).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertEquals("val002", result.getBuckets().get(2).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + assertEquals("val003", result.getBuckets().get(3).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(3).getDocCount()); + assertEquals("val004", result.getBuckets().get(4).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(4).getDocCount()); + assertEquals("val005", result.getBuckets().get(5).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(5).getDocCount()); + assertEquals("val006", result.getBuckets().get(6).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(6).getDocCount()); + assertEquals("val007", result.getBuckets().get(7).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(7).getDocCount()); + assertEquals("val008", result.getBuckets().get(8).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(8).getDocCount()); + assertEquals("val009", result.getBuckets().get(9).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(9).getDocCount()); + + MappedFieldType fieldType2 = new KeywordFieldMapper.KeywordFieldType(); + fieldType2.setName("sv_field"); + fieldType2.setHasDocValues(true); + aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.STRING) + .executionHint(executionHint) + .includeExclude(new IncludeExclude("val00.+", null)) + .field("sv_field") + .order(BucketOrder.key(true)); + + aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType2); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + result = (Terms) aggregator.buildAggregation(0L); + assertEquals(5, result.getBuckets().size()); + assertEquals("val001", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + assertEquals("val003", result.getBuckets().get(1).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertEquals("val005", result.getBuckets().get(2).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + assertEquals("val007", result.getBuckets().get(3).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(3).getDocCount()); + assertEquals("val009", result.getBuckets().get(4).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(4).getDocCount()); + + aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.STRING) + .executionHint(executionHint) + .includeExclude(new IncludeExclude("val00.+", "(val000|val001)")) + .field("mv_field") + .order(BucketOrder.key(true)); + + aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + result = (Terms) aggregator.buildAggregation(0L); + assertEquals(8, result.getBuckets().size()); + assertEquals("val002", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + assertEquals("val003", result.getBuckets().get(1).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertEquals("val004", result.getBuckets().get(2).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + assertEquals("val005", result.getBuckets().get(3).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(3).getDocCount()); + assertEquals("val006", result.getBuckets().get(4).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(4).getDocCount()); + assertEquals("val007", result.getBuckets().get(5).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(5).getDocCount()); + assertEquals("val008", result.getBuckets().get(6).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(6).getDocCount()); + assertEquals("val009", result.getBuckets().get(7).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(7).getDocCount()); + + aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.STRING) + .executionHint(executionHint) + .includeExclude(new IncludeExclude(null, "val00.+")) + .field("mv_field") + .order(BucketOrder.key(true)); + aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + result = (Terms) aggregator.buildAggregation(0L); + assertEquals(2, result.getBuckets().size()); + assertEquals("val010", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + assertEquals("val011", result.getBuckets().get(1).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + + aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.STRING) + .executionHint(executionHint) + .includeExclude(new IncludeExclude(new String[]{"val000", "val010"}, null)) + .field("mv_field") + .order(BucketOrder.key(true)); + aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + result = (Terms) aggregator.buildAggregation(0L); + assertEquals(2, result.getBuckets().size()); + assertEquals("val000", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + assertEquals("val010", result.getBuckets().get(1).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + + aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.STRING) + .executionHint(executionHint) + .includeExclude(new IncludeExclude(null, new String[]{"val001", "val002", "val003", "val004", + "val005", "val006", "val007", "val008", "val009", "val011"})) + .field("mv_field") + .order(BucketOrder.key(true)); + aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + result = (Terms) aggregator.buildAggregation(0L); + assertEquals(2, result.getBuckets().size()); + assertEquals("val000", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + assertEquals("val010", result.getBuckets().get(1).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + } + } + } + } + + public void testNumericIncludeExclude() throws Exception { + try (Directory directory = newDirectory()) { + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + Document document = new Document(); + document.add(new NumericDocValuesField("long_field", 0)); + document.add(new NumericDocValuesField("double_field", Double.doubleToRawLongBits(0.0))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new NumericDocValuesField("long_field", 1)); + document.add(new NumericDocValuesField("double_field", Double.doubleToRawLongBits(1.0))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new NumericDocValuesField("long_field", 2)); + document.add(new NumericDocValuesField("double_field", Double.doubleToRawLongBits(2.0))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new NumericDocValuesField("long_field", 3)); + document.add(new NumericDocValuesField("double_field", Double.doubleToRawLongBits(3.0))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new NumericDocValuesField("long_field", 4)); + document.add(new NumericDocValuesField("double_field", Double.doubleToRawLongBits(4.0))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new NumericDocValuesField("long_field", 5)); + document.add(new NumericDocValuesField("double_field", Double.doubleToRawLongBits(5.0))); + indexWriter.addDocument(document); + try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { + IndexSearcher indexSearcher = newIndexSearcher(indexReader); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + fieldType.setName("long_field"); + fieldType.setHasDocValues(true ); + + String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.LONG) + .executionHint(executionHint) + .includeExclude(new IncludeExclude(new long[]{0, 5}, null)) + .field("long_field") + .order(BucketOrder.key(true)); + Aggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + Terms result = (Terms) aggregator.buildAggregation(0L); + assertEquals(2, result.getBuckets().size()); + assertEquals(0L, result.getBuckets().get(0).getKey()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + assertEquals(5L, result.getBuckets().get(1).getKey()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + + aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.LONG) + .executionHint(executionHint) + .includeExclude(new IncludeExclude(null, new long[]{0, 5})) + .field("long_field") + .order(BucketOrder.key(true)); + aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + result = (Terms) aggregator.buildAggregation(0L); + assertEquals(4, result.getBuckets().size()); + assertEquals(1L, result.getBuckets().get(0).getKey()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + assertEquals(2L, result.getBuckets().get(1).getKey()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertEquals(3L, result.getBuckets().get(2).getKey()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + assertEquals(4L, result.getBuckets().get(3).getKey()); + assertEquals(1L, result.getBuckets().get(3).getDocCount()); + + fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); + fieldType.setName("double_field"); + fieldType.setHasDocValues(true ); + aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.DOUBLE) + .executionHint(executionHint) + .includeExclude(new IncludeExclude(new double[]{0.0, 5.0}, null)) + .field("double_field") + .order(BucketOrder.key(true)); + aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + result = (Terms) aggregator.buildAggregation(0L); + assertEquals(2, result.getBuckets().size()); + assertEquals(0.0, result.getBuckets().get(0).getKey()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + assertEquals(5.0, result.getBuckets().get(1).getKey()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + + aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.DOUBLE) + .executionHint(executionHint) + .includeExclude(new IncludeExclude(null, new double[]{0.0, 5.0})) + .field("double_field") + .order(BucketOrder.key(true)); + aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + result = (Terms) aggregator.buildAggregation(0L); + assertEquals(4, result.getBuckets().size()); + assertEquals(1.0, result.getBuckets().get(0).getKey()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + assertEquals(2.0, result.getBuckets().get(1).getKey()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertEquals(3.0, result.getBuckets().get(2).getKey()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + assertEquals(4.0, result.getBuckets().get(3).getKey()); + assertEquals(1L, result.getBuckets().get(3).getDocCount()); + } + } + } + } + + public void testStringTermsAggregator() throws Exception { + BiFunction luceneFieldFactory = (val, mv) -> { + if (mv) { + return new SortedSetDocValuesField("field", new BytesRef(val)); + } else { + return new SortedDocValuesField("field", new BytesRef(val)); + } + }; + MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType(); + termsAggregator(ValueType.STRING, fieldType, i -> Integer.toString(i), + String::compareTo, luceneFieldFactory); + termsAggregatorWithNestedMaxAgg(ValueType.STRING, fieldType, i -> Integer.toString(i), + val -> new SortedDocValuesField("field", new BytesRef(val))); + } + + public void testLongTermsAggregator() throws Exception { + BiFunction luceneFieldFactory = (val, mv) -> { + if (mv) { + return new SortedNumericDocValuesField("field", val); + } else { + return new NumericDocValuesField("field", val); + } + }; + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + termsAggregator(ValueType.LONG, fieldType, Integer::longValue, Long::compareTo, luceneFieldFactory); + termsAggregatorWithNestedMaxAgg(ValueType.LONG, fieldType, Integer::longValue, val -> new NumericDocValuesField("field", val)); + } + + public void testDoubleTermsAggregator() throws Exception { + BiFunction luceneFieldFactory = (val, mv) -> { + if (mv) { + return new SortedNumericDocValuesField("field", Double.doubleToRawLongBits(val)); + } else { + return new NumericDocValuesField("field", Double.doubleToRawLongBits(val)); + } + }; + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); + termsAggregator(ValueType.DOUBLE, fieldType, Integer::doubleValue, Double::compareTo, luceneFieldFactory); + termsAggregatorWithNestedMaxAgg(ValueType.DOUBLE, fieldType, Integer::doubleValue, + val -> new NumericDocValuesField("field", Double.doubleToRawLongBits(val))); + } + + public void testIpTermsAggregator() throws Exception { + BiFunction luceneFieldFactory = (val, mv) -> { + if (mv) { + return new SortedSetDocValuesField("field", new BytesRef(InetAddressPoint.encode(val))); + } else { + return new SortedDocValuesField("field", new BytesRef(InetAddressPoint.encode(val))); + } + }; + InetAddress[] base = new InetAddress[] {InetAddresses.forString("192.168.0.0")}; + Comparator comparator = (o1, o2) -> { + BytesRef b1 = new BytesRef(InetAddressPoint.encode(o1)); + BytesRef b2 = new BytesRef(InetAddressPoint.encode(o2)); + return b1.compareTo(b2); + }; + termsAggregator(ValueType.IP, new IpFieldMapper.IpFieldType(), i -> {return base[0] = InetAddressPoint.nextUp(base[0]);}, + comparator, luceneFieldFactory); + } + + private void termsAggregator(ValueType valueType, MappedFieldType fieldType, + Function valueFactory, Comparator keyComparator, + BiFunction luceneFieldFactory) throws Exception { + final Map counts = new HashMap<>(); + final Map filteredCounts = new HashMap<>(); + int numTerms = scaledRandomIntBetween(8, 128); + for (int i = 0; i < numTerms; i++) { + int numDocs = scaledRandomIntBetween(2, 32); + T key = valueFactory.apply(i); + counts.put(key, numDocs); + filteredCounts.put(key, 0); + } + + try (Directory directory = newDirectory()) { + boolean multiValued = randomBoolean(); + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + if (multiValued == false) { + for (Map.Entry entry : counts.entrySet()) { + for (int i = 0; i < entry.getValue(); i++) { + Document document = new Document(); + document.add(luceneFieldFactory.apply(entry.getKey(), false)); + if (randomBoolean()) { + document.add(new StringField("include", "yes", Field.Store.NO)); + filteredCounts.computeIfPresent(entry.getKey(), (key, integer) -> integer + 1); + } + indexWriter.addDocument(document); + } + } + } else { + Iterator> iterator = counts.entrySet().iterator(); + while (iterator.hasNext()) { + Map.Entry entry1 = iterator.next(); + Map.Entry entry2 = null; + if (randomBoolean() && iterator.hasNext()) { + entry2 = iterator.next(); + if (entry1.getValue().compareTo(entry2.getValue()) < 0) { + Map.Entry temp = entry1; + entry1 = entry2; + entry2 = temp; + } + } + + for (int i = 0; i < entry1.getValue(); i++) { + Document document = new Document(); + document.add(luceneFieldFactory.apply(entry1.getKey(), true)); + if (entry2 != null && i < entry2.getValue()) { + document.add(luceneFieldFactory.apply(entry2.getKey(), true)); + } + indexWriter.addDocument(document); + } + } + } + try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { + boolean order = randomBoolean(); + List> expectedBuckets = new ArrayList<>(); + expectedBuckets.addAll(counts.entrySet()); + BucketOrder bucketOrder; + Comparator> comparator; + if (randomBoolean()) { + bucketOrder = BucketOrder.key(order); + comparator = Comparator.comparing(Map.Entry::getKey, keyComparator); + } else { + // if order by count then we need to use compound so that we can also sort by key as tie breaker: + bucketOrder = BucketOrder.compound(BucketOrder.count(order), BucketOrder.key(order)); + comparator = Comparator.comparing(Map.Entry::getValue); + comparator = comparator.thenComparing(Comparator.comparing(Map.Entry::getKey, keyComparator)); + } + if (order == false) { + comparator = comparator.reversed(); + } + expectedBuckets.sort(comparator); + int size = randomIntBetween(1, counts.size()); + + String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); + logger.info("bucket_order={} size={} execution_hint={}", bucketOrder, size, executionHint); + IndexSearcher indexSearcher = newIndexSearcher(indexReader); + AggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name", valueType) + .executionHint(executionHint) + .size(size) + .shardSize(size) + .field("field") + .order(bucketOrder); + fieldType.setName("field"); + fieldType.setHasDocValues(true); + + Aggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + Terms result = (Terms) aggregator.buildAggregation(0L); + assertEquals(size, result.getBuckets().size()); + for (int i = 0; i < size; i++) { + Map.Entry expected = expectedBuckets.get(i); + Terms.Bucket actual = result.getBuckets().get(i); + if (valueType == ValueType.IP) { + assertEquals(String.valueOf(expected.getKey()).substring(1), actual.getKey()); + } else { + assertEquals(expected.getKey(), actual.getKey()); + } + assertEquals(expected.getValue().longValue(), actual.getDocCount()); + } + + if (multiValued == false) { + aggregationBuilder = new FilterAggregationBuilder("_name1", QueryBuilders.termQuery("include", "yes")); + aggregationBuilder.subAggregation(new TermsAggregationBuilder("_name2", valueType) + .executionHint(executionHint) + .size(numTerms) + .collectMode(randomFrom(Aggregator.SubAggCollectionMode.values())) + .field("field")); + aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + result = ((Filter) aggregator.buildAggregation(0L)).getAggregations().get("_name2"); + int expectedFilteredCounts = 0; + for (Integer count : filteredCounts.values()) { + if (count > 0) { + expectedFilteredCounts++; + } + } + assertEquals(expectedFilteredCounts, result.getBuckets().size()); + for (Terms.Bucket actual : result.getBuckets()) { + Integer expectedCount; + if (valueType == ValueType.IP) { + expectedCount = filteredCounts.get(InetAddresses.forString((String)actual.getKey())); + } else { + expectedCount = filteredCounts.get(actual.getKey()); + } + assertEquals(expectedCount.longValue(), actual.getDocCount()); + } + } + } + } + } + } + + private void termsAggregatorWithNestedMaxAgg(ValueType valueType, MappedFieldType fieldType, + Function valueFactory, + Function luceneFieldFactory) throws Exception { + final Map counts = new HashMap<>(); + int numTerms = scaledRandomIntBetween(8, 128); + for (int i = 0; i < numTerms; i++) { + counts.put(valueFactory.apply(i), randomLong()); + } + + try (Directory directory = newDirectory()) { + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + for (Map.Entry entry : counts.entrySet()) { + Document document = new Document(); + document.add(luceneFieldFactory.apply(entry.getKey())); + document.add(new NumericDocValuesField("value", entry.getValue())); + indexWriter.addDocument(document); + } + try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { + boolean order = randomBoolean(); + List> expectedBuckets = new ArrayList<>(); + expectedBuckets.addAll(counts.entrySet()); + BucketOrder bucketOrder = BucketOrder.aggregation("_max", order); + Comparator> comparator = Comparator.comparing(Map.Entry::getValue, Long::compareTo); + if (order == false) { + comparator = comparator.reversed(); + } + expectedBuckets.sort(comparator); + int size = randomIntBetween(1, counts.size()); + + String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); + Aggregator.SubAggCollectionMode collectionMode = randomFrom(Aggregator.SubAggCollectionMode.values()); + logger.info("bucket_order={} size={} execution_hint={}, collect_mode={}", + bucketOrder, size, executionHint, collectionMode); + IndexSearcher indexSearcher = newIndexSearcher(indexReader); + AggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name", valueType) + .executionHint(executionHint) + .collectMode(collectionMode) + .size(size) + .shardSize(size) + .field("field") + .order(bucketOrder) + .subAggregation(AggregationBuilders.max("_max").field("value")); + fieldType.setName("field"); + fieldType.setHasDocValues(true); + + MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + fieldType2.setName("value"); + fieldType2.setHasDocValues(true); + Aggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType, fieldType2); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + Terms result = (Terms) aggregator.buildAggregation(0L); + assertEquals(size, result.getBuckets().size()); + for (int i = 0; i < size; i++) { + Map.Entry expected = expectedBuckets.get(i); + Terms.Bucket actual = result.getBuckets().get(i); + assertEquals(expected.getKey(), actual.getKey()); + } + } + } + } + } + + public void testEmpty() throws Exception { + try (Directory directory = newDirectory()) { + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType(); + fieldType1.setName("string"); + fieldType1.setHasDocValues(true); + + MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + fieldType1.setName("long"); + fieldType1.setHasDocValues(true); + + MappedFieldType fieldType3 = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); + fieldType1.setName("double"); + fieldType1.setHasDocValues(true); + try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { + IndexSearcher indexSearcher = newIndexSearcher(indexReader); + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.STRING) + .field("string"); + Aggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType1); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + Terms result = (Terms) aggregator.buildAggregation(0L); + assertEquals("_name", result.getName()); + assertEquals(0, result.getBuckets().size()); + + aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.LONG) + .field("long"); + aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType2); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + result = (Terms) aggregator.buildAggregation(0L); + assertEquals("_name", result.getName()); + assertEquals(0, result.getBuckets().size()); + + aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.DOUBLE) + .field("double"); + aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType3); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + result = (Terms) aggregator.buildAggregation(0L); + assertEquals("_name", result.getName()); + assertEquals(0, result.getBuckets().size()); + } + } + } + } + + public void testUnmapped() throws Exception { + try (Directory directory = newDirectory()) { + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + Document document = new Document(); + document.add(new SortedDocValuesField("string", new BytesRef("a"))); + document.add(new NumericDocValuesField("long", 0L)); + document.add(new NumericDocValuesField("double", Double.doubleToRawLongBits(0L))); + indexWriter.addDocument(document); + MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType(); + fieldType1.setName("another_string"); + fieldType1.setHasDocValues(true); + + MappedFieldType fieldType2 = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + fieldType1.setName("another_long"); + fieldType1.setHasDocValues(true); + + MappedFieldType fieldType3 = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); + fieldType1.setName("another_double"); + fieldType1.setHasDocValues(true); + try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { + IndexSearcher indexSearcher = newIndexSearcher(indexReader); + ValueType[] valueTypes = new ValueType[]{ValueType.STRING, ValueType.LONG, ValueType.DOUBLE}; + String[] fieldNames = new String[]{"string", "long", "double"}; + for (int i = 0; i < fieldNames.length; i++) { + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name", valueTypes[i]) + .field(fieldNames[i]); + Aggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType1, fieldType2, fieldType3); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + Terms result = (Terms) aggregator.buildAggregation(0L); + assertEquals("_name", result.getName()); + assertEquals(0, result.getBuckets().size()); + } + } + } + } + } + + public void testNestedTermsAgg() throws Exception { + try (Directory directory = newDirectory()) { + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + Document document = new Document(); + document.add(new SortedDocValuesField("field1", new BytesRef("a"))); + document.add(new SortedDocValuesField("field2", new BytesRef("b"))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new SortedDocValuesField("field1", new BytesRef("c"))); + document.add(new SortedDocValuesField("field2", new BytesRef("d"))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new SortedDocValuesField("field1", new BytesRef("e"))); + document.add(new SortedDocValuesField("field2", new BytesRef("f"))); + indexWriter.addDocument(document); + try (IndexReader indexReader = maybeWrapReaderEs(indexWriter.getReader())) { + IndexSearcher indexSearcher = newIndexSearcher(indexReader); + String executionHint = randomFrom(TermsAggregatorFactory.ExecutionMode.values()).toString(); + Aggregator.SubAggCollectionMode collectionMode = randomFrom(Aggregator.SubAggCollectionMode.values()); + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name1", ValueType.STRING) + .executionHint(executionHint) + .collectMode(collectionMode) + .field("field1") + .order(BucketOrder.key(true)) + .subAggregation(new TermsAggregationBuilder("_name2", ValueType.STRING) + .executionHint(executionHint) + .collectMode(collectionMode) + .field("field2") + .order(BucketOrder.key(true)) + ); + MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType(); + fieldType1.setName("field1"); + fieldType1.setHasDocValues(true); + MappedFieldType fieldType2 = new KeywordFieldMapper.KeywordFieldType(); + fieldType2.setName("field2"); + fieldType2.setHasDocValues(true); + + Aggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType1, fieldType2); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + Terms result = (Terms) aggregator.buildAggregation(0L); + assertEquals(3, result.getBuckets().size()); + assertEquals("a", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + Terms.Bucket nestedBucket = ((Terms) result.getBuckets().get(0).getAggregations().get("_name2")).getBuckets().get(0); + assertEquals("b", nestedBucket.getKeyAsString()); + assertEquals("c", result.getBuckets().get(1).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + nestedBucket = ((Terms) result.getBuckets().get(1).getAggregations().get("_name2")).getBuckets().get(0); + assertEquals("d", nestedBucket.getKeyAsString()); + assertEquals("e", result.getBuckets().get(2).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + nestedBucket = ((Terms) result.getBuckets().get(2).getAggregations().get("_name2")).getBuckets().get(0); + assertEquals("f", nestedBucket.getKeyAsString()); + } + } + } + } + public void testMixLongAndDouble() throws Exception { for (TermsAggregatorFactory.ExecutionMode executionMode : TermsAggregatorFactory.ExecutionMode.values()) { TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.LONG) diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index 563fac1ba7d..a90960c2ec9 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -852,7 +852,7 @@ public class TopHitsIT extends ESIntegTestCase { } public void testNestedFetchFeatures() { - String hlType = randomFrom("plain", "fvh", "postings"); + String hlType = randomFrom("plain", "fvh", "unified"); HighlightBuilder.Field hlField = new HighlightBuilder.Field("comments.message") .highlightQuery(matchQuery("comments.message", "comment")) .forceSource(randomBoolean()) // randomly from stored field or _source diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java index f24dfe42270..bbe6ecc3a4e 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java @@ -19,9 +19,12 @@ package org.elasticsearch.search.aggregations.pipeline.moving.avg; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.collect.EvictingQueue; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; @@ -41,6 +44,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuil import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -67,6 +71,7 @@ import static org.hamcrest.core.IsNull.nullValue; public class MovAvgIT extends ESIntegTestCase { private static final String INTERVAL_FIELD = "l_value"; private static final String VALUE_FIELD = "v_value"; + private static final String VALUE_FIELD2 = "v_value2"; static int interval; static int numBuckets; @@ -1204,6 +1209,68 @@ public class MovAvgIT extends ESIntegTestCase { } } + public void testPredictWithNonEmptyBuckets() throws Exception { + + createIndex("predict_non_empty"); + BulkRequestBuilder bulkBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + for (int i = 0; i < 10; i++) { + bulkBuilder.add(client().prepareIndex("predict_non_empty", "type").setSource( + jsonBuilder().startObject().field(INTERVAL_FIELD, i) + .field(VALUE_FIELD, 10) + .field(VALUE_FIELD2, 10) + .endObject())); + } + for (int i = 10; i < 20; i++) { + // Extra so there is a bucket that only has second field + bulkBuilder.add(client().prepareIndex("predict_non_empty", "type").setSource( + jsonBuilder().startObject().field(INTERVAL_FIELD, i).field(VALUE_FIELD2, 10).endObject())); + } + + bulkBuilder.execute().actionGet(); + ensureSearchable(); + + SearchResponse response = client() + .prepareSearch("predict_non_empty") + .setTypes("type") + .addAggregation( + histogram("histo") + .field(INTERVAL_FIELD) + .interval(1) + .subAggregation(max("max").field(VALUE_FIELD)) + .subAggregation(max("max2").field(VALUE_FIELD2)) + .subAggregation( + movingAvg("movavg_values", "max") + .window(windowSize) + .modelBuilder(new SimpleModel.SimpleModelBuilder()) + .gapPolicy(BucketHelpers.GapPolicy.SKIP).predict(5))).execute().actionGet(); + + assertSearchResponse(response); + + Histogram histo = response.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(20)); + + SimpleValue current = buckets.get(0).getAggregations().get("movavg_values"); + assertThat(current, nullValue()); + + for (int i = 1; i < 20; i++) { + Bucket bucket = buckets.get(i); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo((double)i)); + assertThat(bucket.getDocCount(), equalTo(1L)); + SimpleValue movAvgAgg = bucket.getAggregations().get("movavg_values"); + if (i < 15) { + assertThat(movAvgAgg, notNullValue()); + assertThat(movAvgAgg.value(), equalTo(10d)); + } else { + assertThat(movAvgAgg, nullValue()); + } + } + } + private void assertValidIterators(Iterator expectedBucketIter, Iterator expectedCountsIter, Iterator expectedValuesIter) { if (!expectedBucketIter.hasNext()) { fail("`expectedBucketIter` iterator ended before `actual` iterator, size mismatch"); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java index 38111654bbd..1915857302c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java @@ -22,7 +22,7 @@ package org.elasticsearch.search.aggregations.support; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.apache.lucene.search.Scorer; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.script.LeafSearchScript; +import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.support.values.ScriptBytesValues; import org.elasticsearch.search.aggregations.support.values.ScriptDoubleValues; import org.elasticsearch.search.aggregations.support.values.ScriptLongValues; @@ -30,16 +30,16 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.Arrays; -import java.util.Map; public class ScriptValuesTests extends ESTestCase { - private static class FakeSearchScript implements LeafSearchScript { + private static class FakeSearchScript extends SearchScript { private final Object[][] values; int index; FakeSearchScript(Object[][] values) { + super(null, null, null); this.values = values; index = -1; } @@ -67,10 +67,6 @@ public class ScriptValuesTests extends ESTestCase { index = doc; } - @Override - public void setSource(Map source) { - } - @Override public long runAsLong() { throw new UnsupportedOperationException(); diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java index 617aa28a0ba..8f092383a5b 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java @@ -38,7 +38,6 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; -import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; @@ -46,7 +45,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.function.Function; @@ -54,7 +52,6 @@ import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDI import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; -import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.nestedQuery; @@ -62,9 +59,9 @@ import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHit; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -616,4 +613,25 @@ public class InnerHitsIT extends ESIntegTestCase { assertSearchHits(response, "1", "3"); } + public void testDontExplode() throws Exception { + assertAcked(prepareCreate("index2").addMapping("type", "nested", "type=nested")); + client().prepareIndex("index2", "type", "1").setSource(jsonBuilder().startObject() + .startArray("nested") + .startObject() + .field("field", "value1") + .endObject() + .endArray() + .endObject()) + .setRefreshPolicy(IMMEDIATE) + .get(); + + QueryBuilder query = nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg) + .innerHit(new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1)); + SearchResponse response = client().prepareSearch("index2") + .setQuery(query) + .get(); + assertNoFailures(response); + assertHitCount(response, 1); + } + } diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 01cc605393b..9cbd9fc5d75 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -102,8 +102,7 @@ import static org.hamcrest.Matchers.startsWith; public class HighlighterSearchIT extends ESIntegTestCase { // TODO as we move analyzers out of the core we need to move some of these into HighlighterWithAnalyzersTests - private static final String[] ALL_TYPES = new String[] {"plain", "postings", "fvh", "unified"}; - private static final String[] UNIFIED_AND_NULL = new String[] {null, "unified"}; + private static final String[] ALL_TYPES = new String[] {"plain", "fvh", "unified"}; @Override protected Collection> nodePlugins() { @@ -127,11 +126,9 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setSource(jsonBuilder().startObject().field("text", "foo").endObject()) .get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "foo")) - .highlighter(new HighlightBuilder().field(new Field("text")).highlighterType(type)).get(); - assertHighlight(search, 0, "text", 0, equalTo("foo")); - } + SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "foo")) + .highlighter(new HighlightBuilder().field(new Field("text"))).get(); + assertHighlight(search, 0, "text", 0, equalTo("foo")); } public void testHighlightingWithWildcardName() throws IOException { @@ -279,19 +276,19 @@ public class HighlighterSearchIT extends ESIntegTestCase { refresh(); SearchResponse search = client().prepareSearch() .setQuery(matchQuery("long_term", "thisisaverylongwordandmakessurethisfails foo highlighed")) - .highlighter(new HighlightBuilder().field("long_term", 18, 1)) + .highlighter(new HighlightBuilder().field("long_term", 18, 1).highlighterType("fvh")) .get(); assertHighlight(search, 0, "long_term", 0, 1, equalTo("thisisaverylongwordandmakessurethisfails")); search = client().prepareSearch() .setQuery(matchPhraseQuery("no_long_term", "test foo highlighed").slop(3)) - .highlighter(new HighlightBuilder().field("no_long_term", 18, 1).postTags("").preTags("")) + .highlighter(new HighlightBuilder().field("no_long_term", 18, 1).highlighterType("fvh").postTags("").preTags("")) .get(); assertNotHighlighted(search, 0, "no_long_term"); search = client().prepareSearch() .setQuery(matchPhraseQuery("no_long_term", "test foo highlighed").slop(3)) - .highlighter(new HighlightBuilder().field("no_long_term", 30, 1).postTags("").preTags("")) + .highlighter(new HighlightBuilder().field("no_long_term", 30, 1).highlighterType("fvh").postTags("").preTags("")) .get(); assertHighlight(search, 0, "no_long_term", 0, 1, equalTo("a test where foo is highlighed and")); @@ -326,26 +323,25 @@ public class HighlighterSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); - for (String type : UNIFIED_AND_NULL) { - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "bug")) - .highlighter(new HighlightBuilder().field("title", -1, 0).highlighterType(type)) - .get(); + SearchResponse search = client().prepareSearch() + .setQuery(matchQuery("title", "bug")) + .highlighter(new HighlightBuilder().field("title", -1, 0)) + .get(); - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight(search, i, "title", 0, equalTo("This is a test on the highlighting bug present in elasticsearch")); - } - - search = client().prepareSearch() - .setQuery(matchQuery("attachments.body", "attachment")) - .highlighter(new HighlightBuilder().field("attachments.body", -1, 0).highlighterType(type)) - .get(); - - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight(search, i, "attachments.body", 0, equalTo("attachment 1")); - assertHighlight(search, i, "attachments.body", 1, equalTo("attachment 2")); - } + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight(search, i, "title", 0, equalTo("This is a test on the highlighting bug present in elasticsearch")); } + + search = client().prepareSearch() + .setQuery(matchQuery("attachments.body", "attachment")) + .highlighter(new HighlightBuilder().field("attachments.body", -1, 0)) + .get(); + + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight(search, i, "attachments.body", 0, equalTo("attachment 1")); + assertHighlight(search, i, "attachments.body", 1, equalTo("attachment 2")); + } + } public void testSourceLookupHighlightingUsingFastVectorHighlighter() throws Exception { @@ -380,25 +376,23 @@ public class HighlighterSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); - for (String type : UNIFIED_AND_NULL) { - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "bug")) - .highlighter(new HighlightBuilder().field("title", -1, 0).highlighterType(type)) - .get(); + SearchResponse search = client().prepareSearch() + .setQuery(matchQuery("title", "bug")) + .highlighter(new HighlightBuilder().field("title", -1, 0)) + .get(); - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight(search, i, "title", 0, equalTo("This is a test on the highlighting bug present in elasticsearch")); - } + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight(search, i, "title", 0, equalTo("This is a test on the highlighting bug present in elasticsearch")); + } - search = client().prepareSearch() - .setQuery(matchQuery("attachments.body", "attachment")) - .highlighter(new HighlightBuilder().field("attachments.body", -1, 2).highlighterType(type)) - .execute().get(); + search = client().prepareSearch() + .setQuery(matchQuery("attachments.body", "attachment")) + .highlighter(new HighlightBuilder().field("attachments.body", -1, 2)) + .execute().get(); - for (int i = 0; i < 5; i++) { - assertHighlight(search, i, "attachments.body", 0, equalTo("attachment 1")); - assertHighlight(search, i, "attachments.body", 1, equalTo("attachment 2")); - } + for (int i = 0; i < 5; i++) { + assertHighlight(search, i, "attachments.body", 0, equalTo("attachment 1")); + assertHighlight(search, i, "attachments.body", 1, equalTo("attachment 2")); } } @@ -446,28 +440,26 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertHighlight(search, i, "title", 1, 2, equalTo("This is the second bug to perform highlighting on.")); } - for (String type : UNIFIED_AND_NULL) { - search = client().prepareSearch() - .setQuery(matchQuery("title", "bug")) - //sentences will be generated out of each value - .highlighter(new HighlightBuilder().field("title").highlighterType(type)).get(); + search = client().prepareSearch() + .setQuery(matchQuery("title", "bug")) + //sentences will be generated out of each value + .highlighter(new HighlightBuilder().field("title")).get(); - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight(search, i, "title", 0, - equalTo("This is a test on the highlighting bug present in elasticsearch.")); - assertHighlight(search, i, "title", 1, 2, - equalTo("This is the second bug to perform highlighting on.")); - } + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight(search, i, "title", 0, + equalTo("This is a test on the highlighting bug present in elasticsearch.")); + assertHighlight(search, i, "title", 1, 2, + equalTo("This is the second bug to perform highlighting on.")); + } - search = client().prepareSearch() - .setQuery(matchQuery("attachments.body", "attachment")) - .highlighter(new HighlightBuilder().field("attachments.body", -1, 2).highlighterType(type)) - .get(); + search = client().prepareSearch() + .setQuery(matchQuery("attachments.body", "attachment")) + .highlighter(new HighlightBuilder().field("attachments.body", -1, 2)) + .get(); - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight(search, i, "attachments.body", 0, equalTo("attachment for this test")); - assertHighlight(search, i, "attachments.body", 1, 2, equalTo("attachment 2")); - } + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight(search, i, "attachments.body", 0, equalTo("attachment for this test")); + assertHighlight(search, i, "attachments.body", 1, 2, equalTo("attachment 2")); } } @@ -521,9 +513,9 @@ public class HighlighterSearchIT extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field1", 0, 2, equalTo(" test")); - assertHighlight(searchResponse, 0, "field1", 1, 2, equalTo(" test")); - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("this is another test")); + assertHighlight(searchResponse, 0, "field1", 0, 2, equalTo("test")); + assertHighlight(searchResponse, 0, "field1", 1, 2, equalTo("test")); + assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("yet another test")); } // Issue #5175 @@ -570,34 +562,31 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setSource("field1", "The quick brown fox jumps over the lazy dog", "field2", "second field content").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - //works using stored field - SearchResponse searchResponse = client().prepareSearch("test") + //works using stored field + SearchResponse searchResponse = client().prepareSearch("test") + .setQuery(termQuery("field1", "quick")) + .highlighter(new HighlightBuilder().field(new Field("field1").preTags("").postTags(""))) + .get(); + assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); + + assertFailures(client().prepareSearch("test") .setQuery(termQuery("field1", "quick")) - .highlighter(new HighlightBuilder().field(new Field("field1").preTags("").postTags("").highlighterType(type))) - .get(); - assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); + .highlighter( + new HighlightBuilder().field(new Field("field1").preTags("").postTags("").forceSource(true))), + RestStatus.BAD_REQUEST, + containsString("source is forced for fields [field1] but type [type1] has disabled _source")); - assertFailures(client().prepareSearch("test") - .setQuery(termQuery("field1", "quick")) - .highlighter( - new HighlightBuilder().field(new Field("field1").preTags("").postTags("") - .highlighterType(type).forceSource(true))), - RestStatus.BAD_REQUEST, - containsString("source is forced for fields [field1] but type [type1] has disabled _source")); + SearchSourceBuilder searchSource = SearchSourceBuilder.searchSource().query(termQuery("field1", "quick")) + .highlighter(highlight().forceSource(true).field("field1")); + assertFailures(client().prepareSearch("test").setSource(searchSource), + RestStatus.BAD_REQUEST, + containsString("source is forced for fields [field1] but type [type1] has disabled _source")); - SearchSourceBuilder searchSource = SearchSourceBuilder.searchSource().query(termQuery("field1", "quick")) - .highlighter(highlight().forceSource(true).field("field1").highlighterType(type)); - assertFailures(client().prepareSearch("test").setSource(searchSource), - RestStatus.BAD_REQUEST, - containsString("source is forced for fields [field1] but type [type1] has disabled _source")); - - searchSource = SearchSourceBuilder.searchSource().query(termQuery("field1", "quick")) - .highlighter(highlight().forceSource(true).field("field*").highlighterType(type)); - assertFailures(client().prepareSearch("test").setSource(searchSource), - RestStatus.BAD_REQUEST, - matches("source is forced for fields \\[field\\d, field\\d\\] but type \\[type1\\] has disabled _source")); - } + searchSource = SearchSourceBuilder.searchSource().query(termQuery("field1", "quick")) + .highlighter(highlight().forceSource(true).field("field*")); + assertFailures(client().prepareSearch("test").setSource(searchSource), + RestStatus.BAD_REQUEST, + matches("source is forced for fields \\[field\\d, field\\d\\] but type \\[type1\\] has disabled _source")); } public void testPlainHighlighter() throws Exception { @@ -802,7 +791,8 @@ public class HighlighterSearchIT extends ESIntegTestCase { * in the neighborhood of 300ms and the large phrase limit is in the * neighborhood of 8 seconds. */ - assertThat(defaultPhraseLimit.getTookInMillis(), lessThan(largePhraseLimit.getTookInMillis())); + assertThat(defaultPhraseLimit.getTook().getMillis(), + lessThan(largePhraseLimit.getTook().getMillis())); } @@ -1014,16 +1004,14 @@ public class HighlighterSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); - for (String type : UNIFIED_AND_NULL) { - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "bug")) - .highlighter(new HighlightBuilder().field("title", -1, 0).highlighterType(type)) - .get(); + SearchResponse search = client().prepareSearch() + .setQuery(matchQuery("title", "bug")) + .highlighter(new HighlightBuilder().field("title", -1, 0)) + .get(); - for (int i = 0; i < 5; i++) { - assertHighlight(search, i, "title", 0, 1, equalTo("This is a test on the highlighting bug " + - "present in elasticsearch")); - } + for (int i = 0; i < 5; i++) { + assertHighlight(search, i, "title", 0, 1, equalTo("This is a test on the highlighting bug " + + "present in elasticsearch")); } } @@ -1040,7 +1028,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { SearchResponse search = client().prepareSearch() .setQuery(matchQuery("title", "bug")) - .highlighter(new HighlightBuilder().field("title", 30, 1, 10)) + .highlighter(new HighlightBuilder().field("title", 30, 1, 10).highlighterType("fvh")) .get(); for (int i = 0; i < 5; i++) { @@ -1060,16 +1048,14 @@ public class HighlighterSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); - for (String type : UNIFIED_AND_NULL) { - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "test")) - .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1, 10).highlighterType(type)) - .get(); + SearchResponse search = client().prepareSearch() + .setQuery(matchQuery("title", "test")) + .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1, 10)) + .get(); - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight(search, i, "title", 0, 1, - startsWith("This is a html escaping highlighting test for *&?")); - } + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight(search, i, "title", 0, 1, + startsWith("This is a html escaping highlighting test for *&?")); } } @@ -1086,11 +1072,11 @@ public class HighlighterSearchIT extends ESIntegTestCase { SearchResponse search = client().prepareSearch() .setQuery(matchQuery("title", "test")) - .highlighter(new HighlightBuilder().encoder("html").field("title", 30, 1, 10)) + .highlighter(new HighlightBuilder().encoder("html").field("title", 30, 1, 10).highlighterType("plain")) .get(); for (int i = 0; i < 5; i++) { - assertHighlight(search, i, "title", 0, 1, equalTo("highlighting test for *&? elasticsearch")); + assertHighlight(search, i, "title", 0, 1, equalTo(" highlighting test for *&? elasticsearch")); } } @@ -1115,23 +1101,21 @@ public class HighlighterSearchIT extends ESIntegTestCase { client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - // simple search on body with standard analyzer with a simple field query - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "this is a test")) - .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1).highlighterType(type)) - .get(); + // simple search on body with standard analyzer with a simple field query + SearchResponse search = client().prepareSearch() + .setQuery(matchQuery("title", "this is a test")) + .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1)) + .get(); - assertHighlight(search, 0, "title", 0, 1, equalTo("this is a test")); + assertHighlight(search, 0, "title", 0, 1, equalTo("this is a test")); - // search on title.key and highlight on title - search = client().prepareSearch() - .setQuery(matchQuery("title.key", "this is a test")) - .highlighter(new HighlightBuilder().encoder("html").field("title.key", 50, 1).highlighterType(type)) - .get(); + // search on title.key and highlight on title + search = client().prepareSearch() + .setQuery(matchQuery("title.key", "this is a test")) + .highlighter(new HighlightBuilder().encoder("html").field("title.key", 50, 1)) + .get(); - assertHighlight(search, 0, "title.key", 0, 1, equalTo("this is a test")); - } + assertHighlight(search, 0, "title.key", 0, 1, equalTo("this is a test")); } public void testMultiMapperVectorFromSource() throws Exception { @@ -1156,23 +1140,21 @@ public class HighlighterSearchIT extends ESIntegTestCase { client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - // simple search on body with standard analyzer with a simple field query - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "this is a test")) - .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1).highlighterType(type)) - .get(); + // simple search on body with standard analyzer with a simple field query + SearchResponse search = client().prepareSearch() + .setQuery(matchQuery("title", "this is a test")) + .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1)) + .get(); - assertHighlight(search, 0, "title", 0, 1, equalTo("this is a test")); + assertHighlight(search, 0, "title", 0, 1, equalTo("this is a test")); - // search on title.key and highlight on title.key - search = client().prepareSearch() - .setQuery(matchQuery("title.key", "this is a test")) - .highlighter(new HighlightBuilder().encoder("html").field("title.key", 50, 1).highlighterType(type)) - .get(); + // search on title.key and highlight on title.key + search = client().prepareSearch() + .setQuery(matchQuery("title.key", "this is a test")) + .highlighter(new HighlightBuilder().encoder("html").field("title.key", 50, 1)) + .get(); - assertHighlight(search, 0, "title.key", 0, 1, equalTo("this is a test")); - } + assertHighlight(search, 0, "title.key", 0, 1, equalTo("this is a test")); } public void testMultiMapperNoVectorWithStore() throws Exception { @@ -1199,23 +1181,21 @@ public class HighlighterSearchIT extends ESIntegTestCase { refresh(); - for (String type : UNIFIED_AND_NULL) { - // simple search on body with standard analyzer with a simple field query - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "this is a test")) - .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1).highlighterType(type)) - .get(); + // simple search on body with standard analyzer with a simple field query + SearchResponse search = client().prepareSearch() + .setQuery(matchQuery("title", "this is a test")) + .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1)) + .get(); - assertHighlight(search, 0, "title", 0, 1, equalTo("this is a test")); + assertHighlight(search, 0, "title", 0, 1, equalTo("this is a test")); - // search on title.key and highlight on title - search = client().prepareSearch() - .setQuery(matchQuery("title.key", "this is a test")) - .highlighter(new HighlightBuilder().encoder("html").field("title.key", 50, 1).highlighterType(type)) - .get(); + // search on title.key and highlight on title + search = client().prepareSearch() + .setQuery(matchQuery("title.key", "this is a test")) + .highlighter(new HighlightBuilder().encoder("html").field("title.key", 50, 1)) + .get(); - assertHighlight(search, 0, "title.key", 0, 1, equalTo("this is a test")); - } + assertHighlight(search, 0, "title.key", 0, 1, equalTo("this is a test")); } public void testMultiMapperNoVectorFromSource() throws Exception { @@ -1240,23 +1220,21 @@ public class HighlighterSearchIT extends ESIntegTestCase { client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - // simple search on body with standard analyzer with a simple field query - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "this is a test")) - .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1).highlighterType(type)) - .get(); + // simple search on body with standard analyzer with a simple field query + SearchResponse search = client().prepareSearch() + .setQuery(matchQuery("title", "this is a test")) + .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1)) + .get(); - assertHighlight(search, 0, "title", 0, 1, equalTo("this is a test")); + assertHighlight(search, 0, "title", 0, 1, equalTo("this is a test")); - // search on title.key and highlight on title.key - search = client().prepareSearch() - .setQuery(matchQuery("title.key", "this is a test")) - .highlighter(new HighlightBuilder().encoder("html").field("title.key", 50, 1).highlighterType(type)) - .get(); + // search on title.key and highlight on title.key + search = client().prepareSearch() + .setQuery(matchQuery("title.key", "this is a test")) + .highlighter(new HighlightBuilder().encoder("html").field("title.key", 50, 1)) + .get(); - assertHighlight(search, 0, "title.key", 0, 1, equalTo("this is a test")); - } + assertHighlight(search, 0, "title.key", 0, 1, equalTo("this is a test")); } public void testFastVectorHighlighterShouldFailIfNoTermVectors() throws Exception { @@ -1304,7 +1282,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { SearchResponse search = client().prepareSearch() .setQuery(matchPhraseQuery("title", "test for the workaround")) - .highlighter(new HighlightBuilder().field("title", 50, 1, 10)) + .highlighter(new HighlightBuilder().field("title", 50, 1, 10).highlighterType("fvh")) .get(); for (int i = 0; i < indexRequestBuilders.length; i++) { @@ -1349,7 +1327,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("tags", "tag")) - .highlighter(new HighlightBuilder().field("tags", -1, 0)).get(); + .highlighter(new HighlightBuilder().field("tags", -1, 0).highlighterType("fvh")).get(); assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); assertHighlight(response, 0, "tags", 1, 2, @@ -1363,16 +1341,14 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - logger.info("--> highlighting and searching on field1"); - SearchSourceBuilder source = searchSource() - .query(boostingQuery(termQuery("field2", "brown"), termQuery("field2", "foobar")).negativeBoost(0.5f)) - .highlighter(highlight().field("field2").order("score").preTags("").postTags("").highlighterType(type)); + logger.info("--> highlighting and searching on field1"); + SearchSourceBuilder source = searchSource() + .query(boostingQuery(termQuery("field2", "brown"), termQuery("field2", "foobar")).negativeBoost(0.5f)) + .highlighter(highlight().field("field2").order("score").preTags("").postTags("")); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); - } + assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); } public void testBoostingQueryTermVector() throws IOException { @@ -1403,14 +1379,12 @@ public class HighlighterSearchIT extends ESIntegTestCase { refresh(); logger.info("--> highlighting and searching on field1"); - for (String type : UNIFIED_AND_NULL) { - SearchSourceBuilder source = searchSource() - .query(commonTermsQuery("field2", "quick brown").cutoffFrequency(100)) - .highlighter(highlight().field("field2").order("score").preTags("").postTags("").highlighterType(type)); + SearchSourceBuilder source = searchSource() + .query(commonTermsQuery("field2", "quick brown").cutoffFrequency(100)) + .highlighter(highlight().field("field2").order("score").preTags("").postTags("")); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); - } + SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); } public void testCommonTermsTermVector() throws IOException { @@ -1452,75 +1426,86 @@ public class HighlighterSearchIT extends ESIntegTestCase { refresh(); logger.info("--> highlighting and searching on field0"); - for (String type : UNIFIED_AND_NULL) { - SearchSourceBuilder source = searchSource() - .query(matchPhrasePrefixQuery("field0", "quick bro")) - .highlighter(highlight().field("field0").order("score").preTags("").postTags("").highlighterType(type)); + SearchSourceBuilder source = searchSource() + .query(matchPhrasePrefixQuery("field0", "bro")) + .highlighter(highlight().field("field0").order("score").preTags("").postTags("")); + SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + assertHighlight(searchResponse, 0, "field0", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); - assertHighlight(searchResponse, 0, "field0", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); + source = searchSource() + .query(matchPhrasePrefixQuery("field0", "quick bro")) + .highlighter(highlight().field("field0").order("score").preTags("").postTags("")); - logger.info("--> highlighting and searching on field1"); - source = searchSource() - .query(matchPhrasePrefixQuery("field1", "quick bro")) - .highlighter(highlight().field("field1").order("score").preTags("").postTags("").highlighterType(type)); + searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + assertHighlight(searchResponse, 0, "field0", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); - searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + logger.info("--> highlighting and searching on field1"); + source = searchSource() + .query(boolQuery() + .should(matchPhrasePrefixQuery("field1", "test")) + .should(matchPhrasePrefixQuery("field1", "bro")) + ) + .highlighter(highlight().field("field1").order("score").preTags("").postTags("")); - assertHighlight(searchResponse, 0, "field1", 0, 1, anyOf( - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"), - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"))); - assertHighlight(searchResponse, 1, "field1", 0, 1, anyOf( - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"), - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"))); - - // with synonyms - client().prepareIndex("test", "type2", "0").setSource( - "field4", "The quick brown fox jumps over the lazy dog", - "field3", "The quick brown fox jumps over the lazy dog").get(); - client().prepareIndex("test", "type2", "1").setSource( - "field4", "The quick browse button is a fancy thing, right bro?").get(); - client().prepareIndex("test", "type2", "2").setSource( - "field4", "a quick fast blue car").get(); - refresh(); - - source = searchSource().postFilter(typeQuery("type2")).query(matchPhrasePrefixQuery("field3", "fast bro")) - .highlighter(highlight().field("field3").order("score").preTags("").postTags("").highlighterType(type)); - - searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - - assertHighlight(searchResponse, 0, "field3", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); - - logger.info("--> highlighting and searching on field4"); - source = searchSource().postFilter(typeQuery("type2")).query(matchPhrasePrefixQuery("field4", "the fast bro")) - .highlighter(highlight().field("field4").order("score").preTags("").postTags("").highlighterType(type)); - searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - - assertHighlight(searchResponse, 0, "field4", 0, 1, anyOf( - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"), - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"))); - assertHighlight(searchResponse, 1, "field4", 0, 1, anyOf( - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"), - equalTo("The quick browse button is a fancy thing, right bro?"), - equalTo("The quick brown fox jumps over the lazy dog"))); - - logger.info("--> highlighting and searching on field4"); - source = searchSource().postFilter(typeQuery("type2")).query(matchPhrasePrefixQuery("field4", "a fast quick blue ca")) - .highlighter(highlight().field("field4").order("score").preTags("").postTags("").highlighterType(type)); - searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - - assertHighlight(searchResponse, 0, "field4", 0, 1, - anyOf(equalTo("a quick fast blue car"), - equalTo("a quick fast blue car"))); + searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + assertThat(searchResponse.getHits().totalHits, equalTo(2L)); + for (int i = 0; i < 2; i++) { + assertHighlight(searchResponse, i, "field1", 0, 1, anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog"))); } + + source = searchSource() + .query(matchPhrasePrefixQuery("field1", "quick bro")) + .highlighter(highlight().field("field1").order("score").preTags("").postTags("")); + + searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + + assertHighlight(searchResponse, 0, "field1", 0, 1, anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog"))); + assertHighlight(searchResponse, 1, "field1", 0, 1, anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog"))); + + // with synonyms + client().prepareIndex("test", "type2", "0").setSource( + "field4", "The quick brown fox jumps over the lazy dog", + "field3", "The quick brown fox jumps over the lazy dog").get(); + client().prepareIndex("test", "type2", "1").setSource( + "field4", "The quick browse button is a fancy thing, right bro?").get(); + client().prepareIndex("test", "type2", "2").setSource( + "field4", "a quick fast blue car").get(); + refresh(); + + source = searchSource().postFilter(typeQuery("type2")).query(matchPhrasePrefixQuery("field3", "fast bro")) + .highlighter(highlight().field("field3").order("score").preTags("").postTags("")); + + searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + + assertHighlight(searchResponse, 0, "field3", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); + + logger.info("--> highlighting and searching on field4"); + source = searchSource().postFilter(typeQuery("type2")).query(matchPhrasePrefixQuery("field4", "the fast bro")) + .highlighter(highlight().field("field4").order("score").preTags("").postTags("")); + searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + + assertHighlight(searchResponse, 0, "field4", 0, 1, anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog"))); + assertHighlight(searchResponse, 1, "field4", 0, 1, anyOf( + equalTo("The quick browse button is a fancy thing, right bro?"), + equalTo("The quick brown fox jumps over the lazy dog"))); + + logger.info("--> highlighting and searching on field4"); + source = searchSource().postFilter(typeQuery("type2")).query(matchPhrasePrefixQuery("field4", "a fast quick blue ca")) + .highlighter(highlight().field("field4").order("score").preTags("").postTags("")); + searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + + assertHighlight(searchResponse, 0, "field4", 0, 1, + anyOf(equalTo("a quick fast blue car"), + equalTo("a quick fast blue car"))); } public void testPlainHighlightDifferentFragmenter() throws Exception { @@ -1536,8 +1521,9 @@ public class HighlighterSearchIT extends ESIntegTestCase { SearchResponse response = client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE)) .highlighter( - new HighlightBuilder().field(new HighlightBuilder.Field("tags").fragmentSize(-1).numOfFragments(2) - .fragmenter("simple"))).get(); + new HighlightBuilder().field(new HighlightBuilder.Field("tags") + .highlighterType("plain").fragmentSize(-1).numOfFragments(2).fragmenter("simple"))) + .get(); assertHighlight(response, 0, "tags", 0, equalTo("this is a really long tag i would like to highlight")); assertHighlight(response, 0, "tags", 1, 2, @@ -1546,7 +1532,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { response = client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE)) .highlighter( - new HighlightBuilder().field(new HighlightBuilder.Field("tags").fragmentSize(-1).numOfFragments(2) + new HighlightBuilder().field(new Field("tags").highlighterType("plain").fragmentSize(-1).numOfFragments(2) .fragmenter("span"))).get(); assertHighlight(response, 0, "tags", 0, @@ -1557,7 +1543,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertFailures(client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQuery.Type.PHRASE)) .highlighter( - new HighlightBuilder().field(new HighlightBuilder.Field("tags").fragmentSize(-1).numOfFragments(2) + new HighlightBuilder().field(new Field("tags").highlighterType("plain").fragmentSize(-1).numOfFragments(2) .fragmenter("invalid"))), RestStatus.BAD_REQUEST, containsString("unknown fragmenter option [invalid] for the field [tags]")); @@ -1611,15 +1597,13 @@ public class HighlighterSearchIT extends ESIntegTestCase { .endObject()).get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - // This query used to fail when the field to highlight was absent - SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("field", "highlight").type(MatchQuery.Type.BOOLEAN)) - .highlighter( - new HighlightBuilder().field(new HighlightBuilder.Field("highlight_field").fragmentSize(-1).numOfFragments(1) - .fragmenter("simple")).highlighterType(type)).get(); - assertThat(response.getHits().getHits()[0].getHighlightFields().isEmpty(), equalTo(true)); - } + // This query used to fail when the field to highlight was absent + SearchResponse response = client().prepareSearch("test") + .setQuery(QueryBuilders.matchQuery("field", "highlight").type(MatchQuery.Type.BOOLEAN)) + .highlighter( + new HighlightBuilder().field(new HighlightBuilder.Field("highlight_field").fragmentSize(-1).numOfFragments(1) + .fragmenter("simple"))).get(); + assertThat(response.getHits().getHits()[0].getHighlightFields().isEmpty(), equalTo(true)); } // Issue #3211 @@ -1664,13 +1648,11 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setSource("text", "elasticsearch test").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("text", "test").type(MatchQuery.Type.BOOLEAN)) - .highlighter(new HighlightBuilder().field("text").highlighterType(type)).execute().actionGet(); - // PatternAnalyzer will throw an exception if it is resetted twice - assertHitCount(response, 1L); - } + SearchResponse response = client().prepareSearch("test") + .setQuery(QueryBuilders.matchQuery("text", "test").type(MatchQuery.Type.BOOLEAN)) + .highlighter(new HighlightBuilder().field("text")).execute().actionGet(); + // PatternAnalyzer will throw an exception if it is resetted twice + assertHitCount(response, 1L); } public void testHighlightUsesHighlightQuery() throws IOException { @@ -1735,10 +1717,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); - field.highlighterType("postings"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); - field.highlighterType("unified"); response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); @@ -1752,10 +1730,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); - field.highlighterType("postings"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); - field.highlighterType("unified"); response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); @@ -1775,11 +1749,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); - // Postings hl also works but the fragment is the whole first sentence (size ignored) - field.highlighterType("postings"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); - // We can also ask for a fragment longer than the input string and get the whole string field.highlighterType("plain").noMatchSize(text.length() * 2); response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); @@ -1793,11 +1762,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 1, equalTo(text)); - //no difference using postings hl as the noMatchSize is ignored (just needs to be greater than 0) - field.highlighterType("postings"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); - // We can also ask for a fragment exactly the size of the input field and get the whole field field.highlighterType("plain").noMatchSize(text.length()); response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); @@ -1812,11 +1776,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 1, equalTo(text)); - //no difference using postings hl as the noMatchSize is ignored (just needs to be greater than 0) - field.highlighterType("postings"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); - // You can set noMatchSize globally in the highlighter as well field.highlighterType("plain").noMatchSize(null); response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field).noMatchSize(21)).get(); @@ -1830,10 +1789,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field).noMatchSize(21)).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); - field.highlighterType("postings"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field).noMatchSize(21)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); - // We don't break if noMatchSize is less than zero though field.highlighterType("plain").noMatchSize(randomIntBetween(Integer.MIN_VALUE, -1)); response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); @@ -1843,10 +1798,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); - field.highlighterType("postings"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); - field.highlighterType("unified"); response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); @@ -1879,11 +1830,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); - // Postings hl also works but the fragment is the whole first sentence (size ignored) - field.highlighterType("postings"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); - // And noMatchSize returns nothing when the first entry is empty string! index("test", "type1", "2", "text", new String[] {"", text2}); refresh(); @@ -1901,12 +1847,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { .highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); - field.highlighterType("postings"); - response = client().prepareSearch("test") - .setQuery(idsQueryBuilder) - .highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); - // except for the unified highlighter which starts from the first string with actual content field.highlighterType("unified"); response = client().prepareSearch("test") @@ -1930,12 +1870,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { .highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); - field.highlighterType("postings"); - response = client().prepareSearch("test") - .setQuery(idsQueryBuilder) - .highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); - field.highlighterType("unified"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) @@ -1976,10 +1910,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); - field.highlighterType("postings"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); - field.highlighterType("unified"); response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); assertNotHighlighted(response, 0, "text"); @@ -2014,11 +1944,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertHighlight(response, 0, "text", 0, 1, equalTo("This is the first sentence")); - // Postings hl also works but the fragment is the whole first sentence (size ignored) - field.highlighterType("postings"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("This is the first sentence.")); - //if there's a match we only return the values with matches (whole value as number_of_fragments == 0) MatchQueryBuilder queryBuilder = QueryBuilders.matchQuery("text", "third fifth"); field.highlighterType("plain"); @@ -2031,11 +1956,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertHighlight(response, 0, "text", 0, 2, equalTo("This is the third sentence. This is the fourth sentence.")); assertHighlight(response, 0, "text", 1, 2, equalTo("This is the fifth sentence")); - field.highlighterType("postings"); - response = client().prepareSearch("test").setQuery(queryBuilder).highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 2, equalTo("This is the third sentence. This is the fourth sentence.")); - assertHighlight(response, 0, "text", 1, 2, equalTo("This is the fifth sentence")); - field.highlighterType("unified"); response = client().prepareSearch("test").setQuery(queryBuilder).highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 2, equalTo("This is the third sentence. This is the fourth sentence.")); @@ -2050,49 +1970,42 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy quick dog").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - logger.info("--> highlighting and searching on field1"); - SearchSourceBuilder source = searchSource() - .query(termQuery("field1", "test")) - .highlighter(highlight().field("field1").preTags("").postTags("").highlighterType(type)); - SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + logger.info("--> highlighting and searching on field1"); + SearchSourceBuilder source = searchSource() + .query(termQuery("field1", "test")) + .highlighter(highlight().field("field1").preTags("").postTags("")); + SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a test")); + assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a test")); - logger.info("--> searching on field1, highlighting on field1"); - source = searchSource() - .query(termQuery("field1", "test")) - .highlighter(highlight().field("field1").preTags("").postTags("").highlighterType(type)); + logger.info("--> searching on field1, highlighting on field1"); + source = searchSource() + .query(termQuery("field1", "test")) + .highlighter(highlight().field("field1").preTags("").postTags("")); - searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a test")); + assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a test")); - logger.info("--> searching on field2, highlighting on field2"); - source = searchSource() - .query(termQuery("field2", "quick")) - .highlighter(highlight().field("field2").order("score").preTags("").postTags("").highlighterType(type)); + logger.info("--> searching on field2, highlighting on field2"); + source = searchSource() + .query(termQuery("field2", "quick")) + .highlighter(highlight().field("field2").order("score").preTags("").postTags("")); - searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field2", 0, 1, - equalTo("The quick brown fox jumps over the lazy quick dog")); + assertHighlight(searchResponse, 0, "field2", 0, 1, + equalTo("The quick brown fox jumps over the lazy quick dog")); - logger.info("--> searching on field2, highlighting on field2"); - source = searchSource() - .query(matchPhraseQuery("field2", "quick brown")) - .highlighter(highlight().field("field2").preTags("").postTags("").highlighterType(type)); + logger.info("--> searching on field2, highlighting on field2"); + source = searchSource() + .query(matchPhraseQuery("field2", "quick brown")) + .highlighter(highlight().field("field2").preTags("").postTags("")); - searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - if (type == null) { - //phrase query results in highlighting all different terms regardless of their positions - assertHighlight(searchResponse, 0, "field2", 0, 1, - equalTo("The quick brown fox jumps over the lazy quick dog")); - } else { - assertHighlight(searchResponse, 0, "field2", 0, 1, - equalTo("The quick brown fox jumps over the lazy quick dog")); - } + assertHighlight(searchResponse, 0, "field2", 0, 1, + equalTo("The quick brown fox jumps over the lazy quick dog")); //lets fall back to the standard highlighter then, what people would do to highlight query matches logger.info("--> searching on field2, highlighting on field2, falling back to the plain highlighter"); @@ -2101,11 +2014,10 @@ public class HighlighterSearchIT extends ESIntegTestCase { .highlighter(highlight() .field("field2").preTags("").postTags("").highlighterType("plain").requireFieldMatch(false)); - searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field2", 0, 1, - equalTo("The quick brown fox jumps over the lazy quick dog")); - } + assertHighlight(searchResponse, 0, "field2", 0, 1, + equalTo("The quick brown fox jumps over the lazy quick dog")); } public void testPostingsHighlighterMultipleFields() throws Exception { @@ -2117,15 +2029,13 @@ public class HighlighterSearchIT extends ESIntegTestCase { "field2", "The slow brown fox. Second sentence."); refresh(); - for (String type : UNIFIED_AND_NULL) { - SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("field1", "fox")) - .highlighter( - new HighlightBuilder().field(new Field("field1").preTags("<1>").postTags("") - .requireFieldMatch(true).highlighterType(type))) - .get(); - assertHighlight(response, 0, "field1", 0, 1, equalTo("The quick brown <1>fox.")); - } + SearchResponse response = client().prepareSearch("test") + .setQuery(QueryBuilders.matchQuery("field1", "fox")) + .highlighter( + new HighlightBuilder().field(new Field("field1").preTags("<1>").postTags("") + .requireFieldMatch(true))) + .get(); + assertHighlight(response, 0, "field1", 0, 1, equalTo("The quick brown <1>fox.")); } public void testPostingsHighlighterNumberOfFragments() throws Exception { @@ -2140,53 +2050,50 @@ public class HighlighterSearchIT extends ESIntegTestCase { refresh(); - for (String type : UNIFIED_AND_NULL) { - logger.info("--> highlighting and searching on field1"); - SearchSourceBuilder source = searchSource() - .query(termQuery("field1", "fox")) - .highlighter(highlight() - .field(new Field("field1").numOfFragments(5).preTags("").postTags("").highlighterType(type))); + logger.info("--> highlighting and searching on field1"); + SearchSourceBuilder source = searchSource() + .query(termQuery("field1", "fox")) + .highlighter(highlight() + .field(new Field("field1").numOfFragments(5).preTags("").postTags(""))); - SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field1", 0, equalTo("The quick brown fox jumps over the lazy dog.")); - assertHighlight(searchResponse, 0, "field1", 1, equalTo("The lazy red fox jumps over the quick dog.")); - assertHighlight(searchResponse, 0, "field1", 2, 3, equalTo("The quick brown dog jumps over the lazy fox.")); + assertHighlight(searchResponse, 0, "field1", 0, equalTo("The quick brown fox jumps over the lazy dog.")); + assertHighlight(searchResponse, 0, "field1", 1, equalTo("The lazy red fox jumps over the quick dog.")); + assertHighlight(searchResponse, 0, "field1", 2, 3, equalTo("The quick brown dog jumps over the lazy fox.")); - client().prepareIndex("test", "type1", "2") - .setSource("field1", new String[]{ - "The quick brown fox jumps over the lazy dog. Second sentence not finished", - "The lazy red fox jumps over the quick dog.", - "The quick brown dog jumps over the lazy fox."}).get(); - refresh(); + client().prepareIndex("test", "type1", "2") + .setSource("field1", new String[]{ + "The quick brown fox jumps over the lazy dog. Second sentence not finished", + "The lazy red fox jumps over the quick dog.", + "The quick brown dog jumps over the lazy fox."}).get(); + refresh(); - source = searchSource() - .query(termQuery("field1", "fox")) - .highlighter(highlight() - .field(new Field("field1").numOfFragments(0).preTags("").postTags("").highlighterType(type))); + source = searchSource() + .query(termQuery("field1", "fox")) + .highlighter(highlight() + .field(new Field("field1").numOfFragments(0).preTags("").postTags(""))); - searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHitCount(searchResponse, 2L); + searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + assertHitCount(searchResponse, 2L); - for (SearchHit searchHit : searchResponse.getHits()) { - if ("1".equals(searchHit.getId())) { - assertHighlight(searchHit, "field1", 0, 1, equalTo("The quick brown fox jumps over the lazy dog. " - + "The lazy red fox jumps over the quick dog. " - + "The quick brown dog jumps over the lazy fox.")); - } else if ("2".equals(searchHit.getId())) { - assertHighlight(searchHit, "field1", 0, 3, - equalTo("The quick brown fox jumps over the lazy dog. Second sentence not finished")); - assertHighlight(searchHit, "field1", 1, 3, equalTo("The lazy red fox jumps over the quick dog.")); - assertHighlight(searchHit, "field1", 2, 3, equalTo("The quick brown dog jumps over the lazy fox.")); - } else { - fail("Only hits with id 1 and 2 are returned"); - } + for (SearchHit searchHit : searchResponse.getHits()) { + if ("1".equals(searchHit.getId())) { + assertHighlight(searchHit, "field1", 0, 1, equalTo("The quick brown fox jumps over the lazy dog. " + + "The lazy red fox jumps over the quick dog. " + + "The quick brown dog jumps over the lazy fox.")); + } else if ("2".equals(searchHit.getId())) { + assertHighlight(searchHit, "field1", 0, 3, + equalTo("The quick brown fox jumps over the lazy dog. Second sentence not finished")); + assertHighlight(searchHit, "field1", 1, 3, equalTo("The lazy red fox jumps over the quick dog.")); + assertHighlight(searchHit, "field1", 2, 3, equalTo("The quick brown dog jumps over the lazy fox.")); + } else { + fail("Only hits with id 1 and 2 are returned"); } } } public void testMultiMatchQueryHighlight() throws IOException { - String[] highlighterTypes = new String[] {"fvh", "plain", "postings", "unified"}; XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("field1") @@ -2209,23 +2116,10 @@ public class HighlighterSearchIT extends ESIntegTestCase { refresh(); final int iters = scaledRandomIntBetween(20, 30); for (int i = 0; i < iters; i++) { - String highlighterType = rarely() ? null : RandomPicks.randomFrom(random(), highlighterTypes); - MultiMatchQueryBuilder.Type[] supportedQueryTypes; - if ("postings".equals(highlighterType)) { - /* - * phrase_prefix is not supported by postings highlighter, as it rewrites against an empty reader, the prefix will never - * match any term - */ - supportedQueryTypes = new MultiMatchQueryBuilder.Type[]{ - MultiMatchQueryBuilder.Type.BEST_FIELDS, - MultiMatchQueryBuilder.Type.CROSS_FIELDS, - MultiMatchQueryBuilder.Type.MOST_FIELDS, - MultiMatchQueryBuilder.Type.PHRASE}; - } else { - supportedQueryTypes = MultiMatchQueryBuilder.Type.values(); - } - MultiMatchQueryBuilder.Type matchQueryType = RandomPicks.randomFrom(random(), supportedQueryTypes); - MultiMatchQueryBuilder multiMatchQueryBuilder = multiMatchQuery("the quick brown fox", "field1", "field2").type(matchQueryType); + String highlighterType = rarely() ? null : RandomPicks.randomFrom(random(), ALL_TYPES); + MultiMatchQueryBuilder.Type matchQueryType = RandomPicks.randomFrom(random(), MultiMatchQueryBuilder.Type.values()); + MultiMatchQueryBuilder multiMatchQueryBuilder = multiMatchQuery("the quick brown fox", "field1", "field2") + .type(matchQueryType); SearchSourceBuilder source = searchSource() .query(multiMatchQueryBuilder) @@ -2254,26 +2148,24 @@ public class HighlighterSearchIT extends ESIntegTestCase { + "This one contains no matches."}).get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - logger.info("--> highlighting and searching on field1"); - SearchSourceBuilder source = searchSource() - .query(termQuery("field1", "sentence")) - .highlighter(highlight().field("field1").order("score").highlighterType(type)); + logger.info("--> highlighting and searching on field1"); + SearchSourceBuilder source = searchSource() + .query(termQuery("field1", "sentence")) + .highlighter(highlight().field("field1").order("score")); - SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - Map highlightFieldMap = searchResponse.getHits().getAt(0).getHighlightFields(); - assertThat(highlightFieldMap.size(), equalTo(1)); - HighlightField field1 = highlightFieldMap.get("field1"); - assertThat(field1.fragments().length, equalTo(5)); - assertThat(field1.fragments()[0].string(), - equalTo("This sentence contains three sentence occurrences (sentence).")); - assertThat(field1.fragments()[1].string(), equalTo("This sentence contains two sentence matches.")); - assertThat(field1.fragments()[2].string(), equalTo("This is the second value's first sentence.")); - assertThat(field1.fragments()[3].string(), equalTo("This sentence contains one match, not that short.")); - assertThat(field1.fragments()[4].string(), - equalTo("One sentence match here and scored lower since the text is quite long, not that appealing.")); - } + Map highlightFieldMap = searchResponse.getHits().getAt(0).getHighlightFields(); + assertThat(highlightFieldMap.size(), equalTo(1)); + HighlightField field1 = highlightFieldMap.get("field1"); + assertThat(field1.fragments().length, equalTo(5)); + assertThat(field1.fragments()[0].string(), + equalTo("This sentence contains three sentence occurrences (sentence).")); + assertThat(field1.fragments()[1].string(), equalTo("This sentence contains two sentence matches.")); + assertThat(field1.fragments()[2].string(), equalTo("This is the second value's first sentence.")); + assertThat(field1.fragments()[3].string(), equalTo("This sentence contains one match, not that short.")); + assertThat(field1.fragments()[4].string(), + equalTo("One sentence match here and scored lower since the text is quite long, not that appealing.")); } public void testPostingsHighlighterEscapeHtml() throws Exception { @@ -2287,15 +2179,13 @@ public class HighlighterSearchIT extends ESIntegTestCase { } indexRandom(true, indexRequestBuilders); - for (String type : UNIFIED_AND_NULL) { - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchQuery("title", "test")) - .highlighter(new HighlightBuilder().field("title").encoder("html").highlighterType(type)).get(); + SearchResponse searchResponse = client().prepareSearch() + .setQuery(matchQuery("title", "test")) + .highlighter(new HighlightBuilder().field("title").encoder("html")).get(); - for (int i = 0; i < indexRequestBuilders.length; i++) { - assertHighlight(searchResponse, i, "title", 0, 1, - equalTo("This is a html escaping highlighting test for *&?")); - } + for (int i = 0; i < indexRequestBuilders.length; i++) { + assertHighlight(searchResponse, i, "title", 0, 1, + equalTo("This is a html escaping highlighting test for *&?")); } } @@ -2320,28 +2210,26 @@ public class HighlighterSearchIT extends ESIntegTestCase { client().prepareIndex("test", "type1", "1").setSource("title", "this is a test . Second sentence.").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - // simple search on body with standard analyzer with a simple field query - SearchResponse searchResponse = client().prepareSearch() - //lets make sure we analyze the query and we highlight the resulting terms - .setQuery(matchQuery("title", "This is a Test")) - .highlighter(new HighlightBuilder().field("title").highlighterType(type)).get(); + // simple search on body with standard analyzer with a simple field query + SearchResponse searchResponse = client().prepareSearch() + //lets make sure we analyze the query and we highlight the resulting terms + .setQuery(matchQuery("title", "This is a Test")) + .highlighter(new HighlightBuilder().field("title")).get(); - assertHitCount(searchResponse, 1L); - SearchHit hit = searchResponse.getHits().getAt(0); - //stopwords are not highlighted since not indexed - assertHighlight(hit, "title", 0, 1, equalTo("this is a test .")); + assertHitCount(searchResponse, 1L); + SearchHit hit = searchResponse.getHits().getAt(0); + //stopwords are not highlighted since not indexed + assertHighlight(hit, "title", 0, 1, equalTo("this is a test .")); - // search on title.key and highlight on title - searchResponse = client().prepareSearch() - .setQuery(matchQuery("title.key", "this is a test")) - .highlighter(new HighlightBuilder().field("title.key")).get(); - assertHitCount(searchResponse, 1L); + // search on title.key and highlight on title + searchResponse = client().prepareSearch() + .setQuery(matchQuery("title.key", "this is a test")) + .highlighter(new HighlightBuilder().field("title.key")).get(); + assertHitCount(searchResponse, 1L); - //stopwords are now highlighted since we used only whitespace analyzer here - assertHighlight(searchResponse, 0, "title.key", 0, 1, - equalTo("this is a test .")); - } + //stopwords are now highlighted since we used only whitespace analyzer here + assertHighlight(searchResponse, 0, "title.key", 0, 1, + equalTo("this is a test .")); } public void testPostingsHighlighterMultiMapperFromSource() throws Exception { @@ -2366,22 +2254,20 @@ public class HighlighterSearchIT extends ESIntegTestCase { client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - // simple search on body with standard analyzer with a simple field query - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchQuery("title", "this is a test")) - .highlighter(new HighlightBuilder().field("title")) - .get(); + // simple search on body with standard analyzer with a simple field query + SearchResponse searchResponse = client().prepareSearch() + .setQuery(matchQuery("title", "this is a test")) + .highlighter(new HighlightBuilder().field("title")) + .get(); - assertHighlight(searchResponse, 0, "title", 0, 1, equalTo("this is a test")); + assertHighlight(searchResponse, 0, "title", 0, 1, equalTo("this is a test")); - // search on title.key and highlight on title.key - searchResponse = client().prepareSearch() - .setQuery(matchQuery("title.key", "this is a test")) - .highlighter(new HighlightBuilder().field("title.key").highlighterType(type)).get(); + // search on title.key and highlight on title.key + searchResponse = client().prepareSearch() + .setQuery(matchQuery("title.key", "this is a test")) + .highlighter(new HighlightBuilder().field("title.key")).get(); - assertHighlight(searchResponse, 0, "title.key", 0, 1, equalTo("this is a test")); - } + assertHighlight(searchResponse, 0, "title.key", 0, 1, equalTo("this is a test")); } public void testPostingsHighlighterShouldFailIfNoOffsets() throws Exception { @@ -2403,26 +2289,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { .highlighter(new HighlightBuilder().field("title")) .get(); assertNoFailures(search); - - assertFailures(client().prepareSearch() - .setQuery(matchQuery("title", "this is a test")) - .highlighter(new HighlightBuilder().field("title").highlighterType("postings")), - RestStatus.BAD_REQUEST, - containsString("the field [title] should be indexed with positions and offsets in the " - + "postings list to be used with postings highlighter")); - - - assertFailures(client().prepareSearch() - .setQuery(matchQuery("title", "this is a test")) - .highlighter(new HighlightBuilder().field("title").highlighterType("postings")), - RestStatus.BAD_REQUEST, - containsString("the field [title] should be indexed with positions and offsets in the " - + "postings list to be used with postings highlighter")); - - //should not fail if there is a wildcard - assertNoFailures(client().prepareSearch() - .setQuery(matchQuery("title", "this is a test")) - .highlighter(new HighlightBuilder().field("tit*").highlighterType("postings")).get()); } public void testPostingsHighlighterBoostingQuery() throws IOException { @@ -2432,15 +2298,13 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - logger.info("--> highlighting and searching on field1"); - SearchSourceBuilder source = searchSource() - .query(boostingQuery(termQuery("field2", "brown"), termQuery("field2", "foobar")).negativeBoost(0.5f)) - .highlighter(highlight().field("field2").preTags("").postTags("").highlighterType(type)); - SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + logger.info("--> highlighting and searching on field1"); + SearchSourceBuilder source = searchSource() + .query(boostingQuery(termQuery("field2", "brown"), termQuery("field2", "foobar")).negativeBoost(0.5f)) + .highlighter(highlight().field("field2").preTags("").postTags("")); + SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog!")); - } + assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog!")); } public void testPostingsHighlighterCommonTermsQuery() throws IOException { @@ -2451,15 +2315,13 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - logger.info("--> highlighting and searching on field1"); - SearchSourceBuilder source = searchSource().query(commonTermsQuery("field2", "quick brown").cutoffFrequency(100)) - .highlighter(highlight().field("field2").preTags("").postTags("").highlighterType(type)); - SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); - assertHitCount(searchResponse, 1L); + logger.info("--> highlighting and searching on field1"); + SearchSourceBuilder source = searchSource().query(commonTermsQuery("field2", "quick brown").cutoffFrequency(100)) + .highlighter(highlight().field("field2").preTags("").postTags("")); + SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); + assertHitCount(searchResponse, 1L); - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog!")); - } + assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog!")); } private static XContentBuilder type1PostingsffsetsMapping() throws IOException { @@ -2480,12 +2342,10 @@ public class HighlighterSearchIT extends ESIntegTestCase { refresh(); logger.info("--> highlighting and searching on field2"); - for (String type : UNIFIED_AND_NULL) { - SearchSourceBuilder source = searchSource().query(prefixQuery("field2", "qui")) - .highlighter(highlight().field("field2").highlighterType(type)); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog!")); - } + SearchSourceBuilder source = searchSource().query(prefixQuery("field2", "qui")) + .highlighter(highlight().field("field2")); + SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog!")); } public void testPostingsHighlighterFuzzyQuery() throws Exception { @@ -2496,14 +2356,12 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - logger.info("--> highlighting and searching on field2"); - SearchSourceBuilder source = searchSource().query(fuzzyQuery("field2", "quck")) - .highlighter(highlight().field("field2").highlighterType(type)); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + logger.info("--> highlighting and searching on field2"); + SearchSourceBuilder source = searchSource().query(fuzzyQuery("field2", "quck")) + .highlighter(highlight().field("field2")); + SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog!")); - } + assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog!")); } public void testPostingsHighlighterRegexpQuery() throws Exception { @@ -2514,14 +2372,12 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - logger.info("--> highlighting and searching on field2"); - SearchSourceBuilder source = searchSource().query(regexpQuery("field2", "qu[a-l]+k")) - .highlighter(highlight().field("field2").highlighterType(type)); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + logger.info("--> highlighting and searching on field2"); + SearchSourceBuilder source = searchSource().query(regexpQuery("field2", "qu[a-l]+k")) + .highlighter(highlight().field("field2")); + SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog!")); - } + assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog!")); } public void testPostingsHighlighterWildcardQuery() throws Exception { @@ -2532,21 +2388,19 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - logger.info("--> highlighting and searching on field2"); - SearchSourceBuilder source = searchSource().query(wildcardQuery("field2", "qui*")) - .highlighter(highlight().field("field2").highlighterType(type)); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + logger.info("--> highlighting and searching on field2"); + SearchSourceBuilder source = searchSource().query(wildcardQuery("field2", "qui*")) + .highlighter(highlight().field("field2")); + SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog!")); + assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog!")); - source = searchSource().query(wildcardQuery("field2", "qu*k")) - .highlighter(highlight().field("field2").highlighterType(type)); - searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHitCount(searchResponse, 1L); + source = searchSource().query(wildcardQuery("field2", "qu*k")) + .highlighter(highlight().field("field2")); + searchResponse = client().prepareSearch("test").setSource(source).get(); + assertHitCount(searchResponse, 1L); - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog!")); - } + assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog!")); } public void testPostingsHighlighterTermRangeQuery() throws Exception { @@ -2556,14 +2410,12 @@ public class HighlighterSearchIT extends ESIntegTestCase { client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "aaab").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - logger.info("--> highlighting and searching on field2"); - SearchSourceBuilder source = searchSource().query(rangeQuery("field2").gte("aaaa").lt("zzzz")) - .highlighter(highlight().field("field2").highlighterType(type)); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + logger.info("--> highlighting and searching on field2"); + SearchSourceBuilder source = searchSource().query(rangeQuery("field2").gte("aaaa").lt("zzzz")) + .highlighter(highlight().field("field2")); + SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("aaab")); - } + assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("aaab")); } public void testPostingsHighlighterQueryString() throws Exception { @@ -2574,13 +2426,11 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - logger.info("--> highlighting and searching on field2"); - SearchSourceBuilder source = searchSource().query(queryStringQuery("qui*").defaultField("field2")) - .highlighter(highlight().field("field2").highlighterType(type)); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog!")); - } + logger.info("--> highlighting and searching on field2"); + SearchSourceBuilder source = searchSource().query(queryStringQuery("qui*").defaultField("field2")) + .highlighter(highlight().field("field2")); + SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog!")); } public void testPostingsHighlighterRegexpQueryWithinConstantScoreQuery() throws Exception { @@ -2590,13 +2440,11 @@ public class HighlighterSearchIT extends ESIntegTestCase { client().prepareIndex("test", "type1").setSource("field1", "The photography word will get highlighted").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - logger.info("--> highlighting and searching on field1"); - SearchSourceBuilder source = searchSource().query(constantScoreQuery(regexpQuery("field1", "pho[a-z]+"))) - .highlighter(highlight().field("field1").highlighterType(type)); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The photography word will get highlighted")); - } + logger.info("--> highlighting and searching on field1"); + SearchSourceBuilder source = searchSource().query(constantScoreQuery(regexpQuery("field1", "pho[a-z]+"))) + .highlighter(highlight().field("field1")); + SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The photography word will get highlighted")); } public void testPostingsHighlighterMultiTermQueryMultipleLevels() throws Exception { @@ -2606,16 +2454,14 @@ public class HighlighterSearchIT extends ESIntegTestCase { client().prepareIndex("test", "type1").setSource("field1", "The photography word will get highlighted").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - logger.info("--> highlighting and searching on field1"); - SearchSourceBuilder source = searchSource().query(boolQuery() - .should(boolQuery().mustNot(QueryBuilders.existsQuery("field1"))) - .should(matchQuery("field1", "test")) - .should(constantScoreQuery(queryStringQuery("field1:photo*")))) - .highlighter(highlight().field("field1").highlighterType(type)); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The photography word will get highlighted")); - } + logger.info("--> highlighting and searching on field1"); + SearchSourceBuilder source = searchSource().query(boolQuery() + .should(boolQuery().mustNot(QueryBuilders.existsQuery("field1"))) + .should(matchQuery("field1", "test")) + .should(constantScoreQuery(queryStringQuery("field1:photo*")))) + .highlighter(highlight().field("field1")); + SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The photography word will get highlighted")); } public void testPostingsHighlighterPrefixQueryWithinBooleanQuery() throws Exception { @@ -2625,14 +2471,12 @@ public class HighlighterSearchIT extends ESIntegTestCase { client().prepareIndex("test", "type1").setSource("field1", "The photography word will get highlighted").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - logger.info("--> highlighting and searching on field1"); - SearchSourceBuilder source = searchSource() - .query(boolQuery().must(prefixQuery("field1", "photo")).should(matchQuery("field1", "test").minimumShouldMatch("0"))) - .highlighter(highlight().field("field1").highlighterType(type)); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The photography word will get highlighted")); - } + logger.info("--> highlighting and searching on field1"); + SearchSourceBuilder source = searchSource() + .query(boolQuery().must(prefixQuery("field1", "photo")).should(matchQuery("field1", "test").minimumShouldMatch("0"))) + .highlighter(highlight().field("field1")); + SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The photography word will get highlighted")); } public void testPostingsHighlighterQueryStringWithinFilteredQuery() throws Exception { @@ -2642,15 +2486,13 @@ public class HighlighterSearchIT extends ESIntegTestCase { client().prepareIndex("test", "type1").setSource("field1", "The photography word will get highlighted").get(); refresh(); - for (String type : UNIFIED_AND_NULL) { - logger.info("--> highlighting and searching on field1"); - SearchSourceBuilder source = searchSource().query(boolQuery() - .must(queryStringQuery("field1:photo*")) - .mustNot(existsQuery("field_null"))) - .highlighter(highlight().field("field1").highlighterType(type)); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The photography word will get highlighted")); - } + logger.info("--> highlighting and searching on field1"); + SearchSourceBuilder source = searchSource().query(boolQuery() + .must(queryStringQuery("field1:photo*")) + .mustNot(existsQuery("field_null"))) + .highlighter(highlight().field("field1")); + SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The photography word will get highlighted")); } public void testPostingsHighlighterManyDocs() throws Exception { @@ -2667,25 +2509,23 @@ public class HighlighterSearchIT extends ESIntegTestCase { String prefix = randomAlphaOfLengthBetween(5, 30); prefixes.put(String.valueOf(i), prefix); indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field1", "Sentence " + prefix - + " test. Sentence two."); + + " test. Sentence two."); } logger.info("--> indexing docs"); indexRandom(true, indexRequestBuilders); - for (String type : UNIFIED_AND_NULL) { - logger.info("--> searching explicitly on field1 and highlighting on it"); - SearchRequestBuilder searchRequestBuilder = client().prepareSearch() - .setSize(COUNT) - .setQuery(termQuery("field1", "test")) - .highlighter(new HighlightBuilder().field("field1").highlighterType(type)); - SearchResponse searchResponse = - searchRequestBuilder.get(); - assertHitCount(searchResponse, COUNT); - assertThat(searchResponse.getHits().getHits().length, equalTo(COUNT)); - for (SearchHit hit : searchResponse.getHits()) { - String prefix = prefixes.get(hit.getId()); - assertHighlight(hit, "field1", 0, 1, equalTo("Sentence " + prefix + " test.")); - } + logger.info("--> searching explicitly on field1 and highlighting on it"); + SearchRequestBuilder searchRequestBuilder = client().prepareSearch() + .setSize(COUNT) + .setQuery(termQuery("field1", "test")) + .highlighter(new HighlightBuilder().field("field1")); + SearchResponse searchResponse = + searchRequestBuilder.get(); + assertHitCount(searchResponse, COUNT); + assertThat(searchResponse.getHits().getHits().length, equalTo(COUNT)); + for (SearchHit hit : searchResponse.getHits()) { + String prefix = prefixes.get(hit.getId()); + assertHighlight(hit, "field1", 0, 1, equalTo("Sentence " + prefix + " test.")); } } @@ -2731,11 +2571,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { phraseBoostTestCase("fvh"); } - public void testPostingsHighlighterPhraseBoost() throws Exception { - assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); - phraseBoostTestCase("postings"); - } - /** * Test phrase boosting over normal term matches. Note that this will never pass with the plain highlighter * because it doesn't support the concept of terms having a different weight based on position. @@ -2827,7 +2662,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setSource(jsonBuilder().startObject().field("text", "Arbitrary text field which will should not cause a failure").endObject()) .get(); refresh(); - String highlighterType = randomFrom("plain", "fvh", "postings", "unified"); + String highlighterType = randomFrom(ALL_TYPES); QueryBuilder query = QueryBuilders.boolQuery().should(QueryBuilders.geoBoundingBoxQuery("geo_point") .setCorners(61.10078883158897, -170.15625, -64.92354174306496, 118.47656249999999)) .should(QueryBuilders.termQuery("text", "failure")); @@ -2948,17 +2783,15 @@ public class HighlighterSearchIT extends ESIntegTestCase { .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); - for (String type : UNIFIED_AND_NULL) { - SearchResponse searchResponse = client().prepareSearch() - .setQuery(new FunctionScoreQueryBuilder(QueryBuilders.prefixQuery("text", "bro"))) - .highlighter(new HighlightBuilder() - .field(new Field("text")).highlighterType(type)) - .get(); - assertHitCount(searchResponse, 1); - HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("text"); - assertThat(field.getFragments().length, equalTo(1)); - assertThat(field.getFragments()[0].string(), equalTo("brown")); - } + SearchResponse searchResponse = client().prepareSearch() + .setQuery(new FunctionScoreQueryBuilder(QueryBuilders.prefixQuery("text", "bro"))) + .highlighter(new HighlightBuilder() + .field(new Field("text"))) + .get(); + assertHitCount(searchResponse, 1); + HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("text"); + assertThat(field.getFragments().length, equalTo(1)); + assertThat(field.getFragments()[0].string(), equalTo("brown")); } public void testFiltersFunctionScoreQueryHighlight() throws Exception { @@ -2970,18 +2803,16 @@ public class HighlighterSearchIT extends ESIntegTestCase { new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery("enable", "yes"), new RandomScoreFunctionBuilder()); - for (String type : UNIFIED_AND_NULL) { - SearchResponse searchResponse = client().prepareSearch() - .setQuery(new FunctionScoreQueryBuilder(QueryBuilders.prefixQuery("text", "bro"), - new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{filterBuilder})) - .highlighter(new HighlightBuilder() - .field(new Field("text")).highlighterType(type)) - .get(); - assertHitCount(searchResponse, 1); - HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("text"); - assertThat(field.getFragments().length, equalTo(1)); - assertThat(field.getFragments()[0].string(), equalTo("brown")); - } + SearchResponse searchResponse = client().prepareSearch() + .setQuery(new FunctionScoreQueryBuilder(QueryBuilders.prefixQuery("text", "bro"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{filterBuilder})) + .highlighter(new HighlightBuilder() + .field(new Field("text"))) + .get(); + assertHitCount(searchResponse, 1); + HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("text"); + assertThat(field.getFragments().length, equalTo(1)); + assertThat(field.getFragments()[0].string(), equalTo("brown")); } public void testSynonyms() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java index d290bd6c3e0..cfed4c014b3 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java @@ -30,7 +30,6 @@ import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.script.ExplainableSearchScript; -import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptEngine; @@ -78,9 +77,9 @@ public class ExplainableScriptIT extends ESIntegTestCase { public T compile(String scriptName, String scriptSource, ScriptContext context, Map params) { assert scriptSource.equals("explainable_script"); assert context == SearchScript.CONTEXT; - SearchScript.Factory factory = (p, lookup) -> new SearchScript() { + SearchScript.Factory factory = (p, lookup) -> new SearchScript.LeafFactory() { @Override - public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException { + public SearchScript newInstance(LeafReaderContext context) throws IOException { return new MyScript(lookup.doc().getLeafDocLookup(context)); } @Override @@ -94,10 +93,11 @@ public class ExplainableScriptIT extends ESIntegTestCase { } } - static class MyScript implements ExplainableSearchScript { + static class MyScript extends SearchScript implements ExplainableSearchScript { LeafDocLookup docLookup; MyScript(LeafDocLookup docLookup) { + super(null, null, null); this.docLookup = docLookup; } diff --git a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 316e83ad1bb..3e4792690ad 100644 --- a/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/core/src/test/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -57,9 +57,7 @@ import static org.hamcrest.Matchers.startsWith; public class SimpleNestedIT extends ESIntegTestCase { public void testSimpleNested() throws Exception { assertAcked(prepareCreate("test") - .setSettings("index.mapping.single_type", false) - .addMapping("type1", "nested1", "type=nested") - .addMapping("type2", "nested1", "type=nested")); + .addMapping("type1", "nested1", "type=nested")); ensureGreen(); // check on no data, see it works @@ -158,10 +156,6 @@ public class SimpleNestedIT extends ESIntegTestCase { searchResponse = client().prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)).execute().actionGet(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); - - searchResponse = client().prepareSearch("test").setTypes("type1", "type2").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)).execute().actionGet(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); } public void testMultiNested() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java b/core/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java index 77b41b062d3..5174267815b 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java +++ b/core/src/test/java/org/elasticsearch/search/profile/ProfileResultTests.java @@ -33,9 +33,11 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Predicate; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; public class ProfileResultTests extends ESTestCase { @@ -62,12 +64,32 @@ public class ProfileResultTests extends ESTestCase { } public void testFromXContent() throws IOException { + doFromXContentTestWithRandomFields(false); + } + + /** + * This test adds random fields and objects to the xContent rendered out to ensure we can parse it + * back to be forward compatible with additions to the xContent + */ + public void testFromXContentWithRandomFields() throws IOException { + doFromXContentTestWithRandomFields(true); + } + + private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException { ProfileResult profileResult = createTestItem(2); XContentType xContentType = randomFrom(XContentType.values()); boolean humanReadable = randomBoolean(); BytesReference originalBytes = toShuffledXContent(profileResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable); + BytesReference mutated; + if (addRandomFields) { + // "breakdown" just consists of key/value pairs, we shouldn't add anything random there + Predicate excludeFilter = (s) -> s.endsWith(ProfileResult.BREAKDOWN.getPreferredName()); + mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random()); + } else { + mutated = originalBytes; + } ProfileResult parsed; - try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { + try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); parsed = ProfileResult.fromXContent(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); diff --git a/core/src/test/java/org/elasticsearch/search/profile/SearchProfileShardResultsTests.java b/core/src/test/java/org/elasticsearch/search/profile/SearchProfileShardResultsTests.java index 853e7cd13a3..7bc9b188606 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/SearchProfileShardResultsTests.java +++ b/core/src/test/java/org/elasticsearch/search/profile/SearchProfileShardResultsTests.java @@ -34,10 +34,12 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Predicate; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName; +import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;; public class SearchProfileShardResultsTests extends ESTestCase { @@ -58,20 +60,43 @@ public class SearchProfileShardResultsTests extends ESTestCase { } public void testFromXContent() throws IOException { + doFromXContentTestWithRandomFields(false); + } + + /** + * This test adds random fields and objects to the xContent rendered out to ensure we can parse it + * back to be forward compatible with additions to the xContent + */ + public void testFromXContentWithRandomFields() throws IOException { + doFromXContentTestWithRandomFields(true); + } + + private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException { SearchProfileShardResults shardResult = createTestItem(); XContentType xContentType = randomFrom(XContentType.values()); boolean humanReadable = randomBoolean(); BytesReference originalBytes = toShuffledXContent(shardResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable); + BytesReference mutated; + if (addRandomFields) { + // The ProfileResults "breakdown" section just consists of key/value pairs, we shouldn't add anything random there + // also we don't want to insert into the root object here, its just the PROFILE_FIELD itself + Predicate excludeFilter = (s) -> (s.isEmpty() || s.endsWith(ProfileResult.BREAKDOWN.getPreferredName())); + mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random()); + } else { + mutated = originalBytes; + } SearchProfileShardResults parsed; - try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { + try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { ensureExpectedToken(parser.nextToken(), XContentParser.Token.START_OBJECT, parser::getTokenLocation); ensureFieldName(parser, parser.nextToken(), SearchProfileShardResults.PROFILE_FIELD); ensureExpectedToken(parser.nextToken(), XContentParser.Token.START_OBJECT, parser::getTokenLocation); parsed = SearchProfileShardResults.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); assertNull(parser.nextToken()); } assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType); + } } diff --git a/core/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java b/core/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java index b09c177bf0b..9914938854d 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java +++ b/core/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java @@ -154,7 +154,7 @@ public class AggregationProfilerIT extends ESIntegTestCase { ProfileResult termsAggResult = histoAggResult.getProfiledChildren().get(0); assertThat(termsAggResult, notNullValue()); - assertThat(termsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.WithHash.class.getName())); + assertThat(termsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getName())); assertThat(termsAggResult.getLuceneDescription(), equalTo("terms")); assertThat(termsAggResult.getTime(), greaterThan(0L)); Map termsBreakdown = termsAggResult.getTimeBreakdown(); @@ -224,7 +224,7 @@ public class AggregationProfilerIT extends ESIntegTestCase { ProfileResult termsAggResult = histoAggResult.getProfiledChildren().get(0); assertThat(termsAggResult, notNullValue()); - assertThat(termsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.WithHash.class.getName())); + assertThat(termsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getName())); assertThat(termsAggResult.getLuceneDescription(), equalTo("terms")); assertThat(termsAggResult.getTime(), greaterThan(0L)); Map termsBreakdown = termsAggResult.getTimeBreakdown(); @@ -355,7 +355,7 @@ public class AggregationProfilerIT extends ESIntegTestCase { ProfileResult tagsAggResult = histoAggResult.getProfiledChildren().get(0); assertThat(tagsAggResult, notNullValue()); - assertThat(tagsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.WithHash.class.getName())); + assertThat(tagsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getName())); assertThat(tagsAggResult.getLuceneDescription(), equalTo("tags")); assertThat(tagsAggResult.getTime(), greaterThan(0L)); Map tagsBreakdown = tagsAggResult.getTimeBreakdown(); @@ -406,7 +406,7 @@ public class AggregationProfilerIT extends ESIntegTestCase { ProfileResult stringsAggResult = histoAggResult.getProfiledChildren().get(1); assertThat(stringsAggResult, notNullValue()); - assertThat(stringsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.WithHash.class.getName())); + assertThat(stringsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getName())); assertThat(stringsAggResult.getLuceneDescription(), equalTo("strings")); assertThat(stringsAggResult.getTime(), greaterThan(0L)); Map stringsBreakdown = stringsAggResult.getTimeBreakdown(); @@ -457,7 +457,7 @@ public class AggregationProfilerIT extends ESIntegTestCase { tagsAggResult = stringsAggResult.getProfiledChildren().get(2); assertThat(tagsAggResult, notNullValue()); - assertThat(tagsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.WithHash.class.getName())); + assertThat(tagsAggResult.getQueryName(), equalTo(GlobalOrdinalsStringTermsAggregator.class.getName())); assertThat(tagsAggResult.getLuceneDescription(), equalTo("tags")); assertThat(tagsAggResult.getTime(), greaterThan(0L)); tagsBreakdown = tagsAggResult.getTimeBreakdown(); diff --git a/core/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java b/core/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java index 8d87f193607..10bf8e2a300 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java +++ b/core/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java @@ -34,6 +34,7 @@ import java.util.List; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; public class CollectorResultTests extends ESTestCase { @@ -57,18 +58,30 @@ public class CollectorResultTests extends ESTestCase { } public void testFromXContent() throws IOException { + doFromXContentTestWithRandomFields(false); + } + + public void testFromXContentWithRandomFields() throws IOException { + doFromXContentTestWithRandomFields(true); + } + + private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException { CollectorResult collectorResult = createTestItem(1); XContentType xContentType = randomFrom(XContentType.values()); boolean humanReadable = randomBoolean(); BytesReference originalBytes = toShuffledXContent(collectorResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable); - - CollectorResult parsed; - try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); - parsed = CollectorResult.fromXContent(parser); - assertNull(parser.nextToken()); + BytesReference mutated; + if (addRandomFields) { + mutated = insertRandomFields(xContentType, originalBytes, null, random()); + } else { + mutated = originalBytes; + } + try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + CollectorResult parsed = CollectorResult.fromXContent(parser); + assertNull(parser.nextToken()); + assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType); } - assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType); } public void testToXContent() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java b/core/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java index 65aa5f992e6..b05c6dff04b 100644 --- a/core/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java +++ b/core/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java @@ -21,7 +21,9 @@ package org.elasticsearch.search.query; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field.Store; +import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.StringField; +import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; @@ -29,19 +31,28 @@ import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.MinDocQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Collector; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.FieldComparator; +import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.elasticsearch.action.search.SearchTask; import org.elasticsearch.index.query.ParsedQuery; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.internal.ScrollContext; +import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TestSearchContext; @@ -49,6 +60,12 @@ import java.io.IOException; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.lessThan; + public class QueryPhaseTests extends ESTestCase { private void countTestCase(Query query, IndexReader reader, boolean shouldCollect) throws Exception { @@ -66,7 +83,7 @@ public class QueryPhaseTests extends ESTestCase { } }; - final boolean rescore = QueryPhase.execute(context, contextSearcher); + final boolean rescore = QueryPhase.execute(context, contextSearcher, null); assertFalse(rescore); assertEquals(searcher.count(query), context.queryResult().topDocs().totalHits); assertEquals(shouldCollect, collected.get()); @@ -135,12 +152,12 @@ public class QueryPhaseTests extends ESTestCase { } }; - QueryPhase.execute(context, contextSearcher); + QueryPhase.execute(context, contextSearcher, null); assertEquals(0, context.queryResult().topDocs().totalHits); assertFalse(collected.get()); context.parsedPostFilter(new ParsedQuery(new MatchNoDocsQuery())); - QueryPhase.execute(context, contextSearcher); + QueryPhase.execute(context, contextSearcher, null); assertEquals(0, context.queryResult().topDocs().totalHits); assertTrue(collected.get()); } @@ -159,14 +176,331 @@ public class QueryPhaseTests extends ESTestCase { } }; - QueryPhase.execute(context, contextSearcher); + QueryPhase.execute(context, contextSearcher, null); assertEquals(0, context.queryResult().topDocs().totalHits); assertFalse(collected.get()); context.minimumScore(1); - QueryPhase.execute(context, contextSearcher); + QueryPhase.execute(context, contextSearcher, null); assertEquals(0, context.queryResult().topDocs().totalHits); assertTrue(collected.get()); } + public void testInOrderScrollOptimization() throws Exception { + Directory dir = newDirectory(); + final Sort sort = new Sort(new SortField("rank", SortField.Type.INT)); + IndexWriterConfig iwc = newIndexWriterConfig() + .setIndexSort(sort); + RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); + final int numDocs = scaledRandomIntBetween(100, 200); + for (int i = 0; i < numDocs; ++i) { + w.addDocument(new Document()); + } + w.close(); + final AtomicBoolean collected = new AtomicBoolean(); + IndexReader reader = DirectoryReader.open(dir); + IndexSearcher contextSearcher = new IndexSearcher(reader) { + protected void search(List leaves, Weight weight, Collector collector) throws IOException { + collected.set(true); + super.search(leaves, weight, collector); + } + }; + + TestSearchContext context = new TestSearchContext(null); + context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery())); + ScrollContext scrollContext = new ScrollContext(); + scrollContext.lastEmittedDoc = null; + scrollContext.maxScore = Float.NaN; + scrollContext.totalHits = -1; + context.scrollContext(scrollContext); + context.setTask(new SearchTask(123L, "", "", "", null)); + context.setSize(10); + + QueryPhase.execute(context, contextSearcher, null); + assertThat(context.queryResult().topDocs().totalHits, equalTo(numDocs)); + assertTrue(collected.get()); + assertNull(context.queryResult().terminatedEarly()); + assertThat(context.terminateAfter(), equalTo(0)); + assertThat(context.queryResult().getTotalHits(), equalTo(numDocs)); + + QueryPhase.execute(context, contextSearcher, null); + assertThat(context.queryResult().topDocs().totalHits, equalTo(numDocs)); + assertTrue(collected.get()); + assertTrue(context.queryResult().terminatedEarly()); + assertThat(context.terminateAfter(), equalTo(10)); + assertThat(context.queryResult().getTotalHits(), equalTo(numDocs)); + assertThat(context.queryResult().topDocs().scoreDocs[0].doc, greaterThanOrEqualTo(10)); + reader.close(); + dir.close(); + } + + public void testTerminateAfterEarlyTermination() throws Exception { + Directory dir = newDirectory(); + IndexWriterConfig iwc = newIndexWriterConfig(); + RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); + final int numDocs = scaledRandomIntBetween(100, 200); + for (int i = 0; i < numDocs; ++i) { + Document doc = new Document(); + if (randomBoolean()) { + doc.add(new StringField("foo", "bar", Store.NO)); + } + if (randomBoolean()) { + doc.add(new StringField("foo", "baz", Store.NO)); + } + doc.add(new NumericDocValuesField("rank", numDocs - i)); + w.addDocument(doc); + } + w.close(); + TestSearchContext context = new TestSearchContext(null); + context.setTask(new SearchTask(123L, "", "", "", null)); + context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery())); + context.terminateAfter(1); + + final AtomicBoolean collected = new AtomicBoolean(); + final IndexReader reader = DirectoryReader.open(dir); + IndexSearcher contextSearcher = new IndexSearcher(reader) { + protected void search(List leaves, Weight weight, Collector collector) throws IOException { + collected.set(true); + super.search(leaves, weight, collector); + } + }; + + { + context.setSize(1); + QueryPhase.execute(context, contextSearcher, null); + assertTrue(collected.get()); + assertTrue(context.queryResult().terminatedEarly()); + assertThat(context.queryResult().topDocs().totalHits, equalTo(1)); + assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); + + context.setSize(0); + QueryPhase.execute(context, contextSearcher, null); + assertTrue(collected.get()); + assertTrue(context.queryResult().terminatedEarly()); + assertThat(context.queryResult().topDocs().totalHits, equalTo(1)); + assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(0)); + } + + { + context.setSize(1); + QueryPhase.execute(context, contextSearcher, null); + assertTrue(collected.get()); + assertTrue(context.queryResult().terminatedEarly()); + assertThat(context.queryResult().topDocs().totalHits, equalTo(1)); + assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); + } + { + context.setSize(1); + BooleanQuery bq = new BooleanQuery.Builder() + .add(new TermQuery(new Term("foo", "bar")), Occur.SHOULD) + .add(new TermQuery(new Term("foo", "baz")), Occur.SHOULD) + .build(); + context.parsedQuery(new ParsedQuery(bq)); + collected.set(false); + QueryPhase.execute(context, contextSearcher, null); + assertTrue(collected.get()); + assertTrue(context.queryResult().terminatedEarly()); + assertThat(context.queryResult().topDocs().totalHits, equalTo(1)); + assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); + + context.setSize(0); + context.parsedQuery(new ParsedQuery(bq)); + collected.set(false); + QueryPhase.execute(context, contextSearcher, null); + assertTrue(collected.get()); + assertTrue(context.queryResult().terminatedEarly()); + assertThat(context.queryResult().topDocs().totalHits, equalTo(1)); + assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(0)); + } + { + context.setSize(1); + collected.set(false); + TotalHitCountCollector collector = new TotalHitCountCollector(); + context.queryCollectors().put(TotalHitCountCollector.class, collector); + QueryPhase.execute(context, contextSearcher, null); + assertTrue(collected.get()); + assertTrue(context.queryResult().terminatedEarly()); + assertThat(context.queryResult().topDocs().totalHits, equalTo(1)); + assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); + } + { + context.setSize(0); + collected.set(false); + TotalHitCountCollector collector = new TotalHitCountCollector(); + context.queryCollectors().put(TotalHitCountCollector.class, collector); + QueryPhase.execute(context, contextSearcher, null); + assertTrue(collected.get()); + assertTrue(context.queryResult().terminatedEarly()); + assertThat(context.queryResult().topDocs().totalHits, equalTo(1)); + assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(0)); + assertThat(collector.getTotalHits(), equalTo(1)); + } + + reader.close(); + dir.close(); + } + + public void testIndexSortingEarlyTermination() throws Exception { + Directory dir = newDirectory(); + final Sort sort = new Sort(new SortField("rank", SortField.Type.INT)); + IndexWriterConfig iwc = newIndexWriterConfig() + .setIndexSort(sort); + RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); + final int numDocs = scaledRandomIntBetween(100, 200); + for (int i = 0; i < numDocs; ++i) { + Document doc = new Document(); + if (randomBoolean()) { + doc.add(new StringField("foo", "bar", Store.NO)); + } + if (randomBoolean()) { + doc.add(new StringField("foo", "baz", Store.NO)); + } + doc.add(new NumericDocValuesField("rank", numDocs - i)); + w.addDocument(doc); + } + w.close(); + + TestSearchContext context = new TestSearchContext(null); + context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery())); + context.setSize(1); + context.setTask(new SearchTask(123L, "", "", "", null)); + context.sort(new SortAndFormats(sort, new DocValueFormat[] {DocValueFormat.RAW})); + + final AtomicBoolean collected = new AtomicBoolean(); + final IndexReader reader = DirectoryReader.open(dir); + IndexSearcher contextSearcher = new IndexSearcher(reader) { + protected void search(List leaves, Weight weight, Collector collector) throws IOException { + collected.set(true); + super.search(leaves, weight, collector); + } + }; + QueryPhase.execute(context, contextSearcher, sort); + assertTrue(collected.get()); + assertTrue(context.queryResult().terminatedEarly()); + assertThat(context.queryResult().topDocs().totalHits, equalTo(numDocs)); + assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); + assertThat(context.queryResult().topDocs().scoreDocs[0], instanceOf(FieldDoc.class)); + FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().scoreDocs[0]; + assertThat(fieldDoc.fields[0], equalTo(1)); + + + { + collected.set(false); + context.parsedPostFilter(new ParsedQuery(new MinDocQuery(1))); + QueryPhase.execute(context, contextSearcher, sort); + assertTrue(collected.get()); + assertTrue(context.queryResult().terminatedEarly()); + assertThat(context.queryResult().topDocs().totalHits, equalTo(numDocs - 1)); + assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); + assertThat(context.queryResult().topDocs().scoreDocs[0], instanceOf(FieldDoc.class)); + assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2))); + context.parsedPostFilter(null); + + final TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector(); + context.queryCollectors().put(TotalHitCountCollector.class, totalHitCountCollector); + collected.set(false); + QueryPhase.execute(context, contextSearcher, sort); + assertTrue(collected.get()); + assertTrue(context.queryResult().terminatedEarly()); + assertThat(context.queryResult().topDocs().totalHits, equalTo(numDocs)); + assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); + assertThat(context.queryResult().topDocs().scoreDocs[0], instanceOf(FieldDoc.class)); + assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2))); + assertThat(totalHitCountCollector.getTotalHits(), equalTo(numDocs)); + context.queryCollectors().clear(); + } + + { + collected.set(false); + context.trackTotalHits(false); + QueryPhase.execute(context, contextSearcher, sort); + assertTrue(collected.get()); + assertTrue(context.queryResult().terminatedEarly()); + assertThat(context.queryResult().topDocs().totalHits, lessThan(numDocs)); + assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); + assertThat(context.queryResult().topDocs().scoreDocs[0], instanceOf(FieldDoc.class)); + assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2))); + + final TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector(); + context.queryCollectors().put(TotalHitCountCollector.class, totalHitCountCollector); + collected.set(false); + QueryPhase.execute(context, contextSearcher, sort); + assertTrue(collected.get()); + assertTrue(context.queryResult().terminatedEarly()); + assertThat(context.queryResult().topDocs().totalHits, lessThan(numDocs)); + assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); + assertThat(context.queryResult().topDocs().scoreDocs[0], instanceOf(FieldDoc.class)); + assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2))); + assertThat(totalHitCountCollector.getTotalHits(), equalTo(numDocs)); + } + reader.close(); + dir.close(); + } + + public void testIndexSortScrollOptimization() throws Exception { + Directory dir = newDirectory(); + final Sort sort = new Sort( + new SortField("rank", SortField.Type.INT), + new SortField("tiebreaker", SortField.Type.INT) + ); + IndexWriterConfig iwc = newIndexWriterConfig().setIndexSort(sort); + RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); + final int numDocs = scaledRandomIntBetween(100, 200); + for (int i = 0; i < numDocs; ++i) { + Document doc = new Document(); + doc.add(new NumericDocValuesField("rank", random().nextInt())); + doc.add(new NumericDocValuesField("tiebreaker", i)); + w.addDocument(doc); + } + w.close(); + + TestSearchContext context = new TestSearchContext(null); + context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery())); + ScrollContext scrollContext = new ScrollContext(); + scrollContext.lastEmittedDoc = null; + scrollContext.maxScore = Float.NaN; + scrollContext.totalHits = -1; + context.scrollContext(scrollContext); + context.setTask(new SearchTask(123L, "", "", "", null)); + context.setSize(10); + context.sort(new SortAndFormats(sort, new DocValueFormat[] {DocValueFormat.RAW, DocValueFormat.RAW})); + + final AtomicBoolean collected = new AtomicBoolean(); + final IndexReader reader = DirectoryReader.open(dir); + IndexSearcher contextSearcher = new IndexSearcher(reader) { + protected void search(List leaves, Weight weight, Collector collector) throws IOException { + collected.set(true); + super.search(leaves, weight, collector); + } + }; + + QueryPhase.execute(context, contextSearcher, sort); + assertThat(context.queryResult().topDocs().totalHits, equalTo(numDocs)); + assertTrue(collected.get()); + assertNull(context.queryResult().terminatedEarly()); + assertThat(context.terminateAfter(), equalTo(0)); + assertThat(context.queryResult().getTotalHits(), equalTo(numDocs)); + int sizeMinus1 = context.queryResult().topDocs().scoreDocs.length - 1; + FieldDoc lastDoc = (FieldDoc) context.queryResult().topDocs().scoreDocs[sizeMinus1]; + + QueryPhase.execute(context, contextSearcher, sort); + assertThat(context.queryResult().topDocs().totalHits, equalTo(numDocs)); + assertTrue(collected.get()); + assertTrue(context.queryResult().terminatedEarly()); + assertThat(context.terminateAfter(), equalTo(0)); + assertThat(context.queryResult().getTotalHits(), equalTo(numDocs)); + FieldDoc firstDoc = (FieldDoc) context.queryResult().topDocs().scoreDocs[0]; + for (int i = 0; i < sort.getSort().length; i++) { + @SuppressWarnings("unchecked") + FieldComparator comparator = (FieldComparator) sort.getSort()[i].getComparator(1, i); + int cmp = comparator.compareValues(firstDoc.fields[i], lastDoc.fields[i]); + if (cmp == 0) { + continue; + } + assertThat(cmp, equalTo(1)); + break; + } + reader.close(); + dir.close(); + } } diff --git a/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 58c0bf82e98..f22ec392b99 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -120,7 +120,6 @@ public class SimpleQueryStringIT extends ESIntegTestCase { assertSearchHits(searchResponse, "5", "6"); } - @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/23966") public void testSimpleQueryStringMinimumShouldMatch() throws Exception { createIndex("test"); ensureGreen("test"); diff --git a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java index f63f13b6dd2..c4bb4a811a5 100644 --- a/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -30,7 +30,9 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchContextException; import org.elasticsearch.search.rescore.QueryRescorerBuilder; +import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; @@ -51,6 +53,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFail import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; public class SimpleSearchIT extends ESIntegTestCase { @@ -285,7 +289,50 @@ public class SimpleSearchIT extends ESIntegTestCase { .setTerminateAfter(2 * max).execute().actionGet(); assertHitCount(searchResponse, max); - assertFalse(searchResponse.isTerminatedEarly()); + assertNull(searchResponse.isTerminatedEarly()); + } + + public void testSimpleIndexSortEarlyTerminate() throws Exception { + prepareCreate("test") + .setSettings(Settings.builder() + .put(SETTING_NUMBER_OF_SHARDS, 1) + .put(SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.sort.field", "rank") + ) + .addMapping("type1", "rank", "type=integer") + .get(); + ensureGreen(); + int max = randomIntBetween(3, 29); + List docbuilders = new ArrayList<>(max); + + for (int i = max-1; i >= 0; i--) { + String id = String.valueOf(i); + docbuilders.add(client().prepareIndex("test", "type1", id).setSource("rank", i)); + } + + indexRandom(true, docbuilders); + ensureGreen(); + refresh(); + + SearchResponse searchResponse; + boolean hasEarlyTerminated = false; + for (int i = 1; i < max; i++) { + searchResponse = client().prepareSearch("test") + .addDocValueField("rank") + .setTrackTotalHits(false) + .addSort("rank", SortOrder.ASC) + .setSize(i).execute().actionGet(); + assertThat(searchResponse.getHits().getTotalHits(), equalTo(-1L)); + if (searchResponse.isTerminatedEarly() != null) { + assertTrue(searchResponse.isTerminatedEarly()); + hasEarlyTerminated = true; + } + for (int j = 0; j < i; j++) { + assertThat(searchResponse.getHits().getAt(j).field("rank").getValue(), + equalTo((long) j)); + } + } + assertTrue(hasEarlyTerminated); } public void testInsaneFromAndSize() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index 2ed8857f122..121085f34d7 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -158,7 +158,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase excludeFilter = (path) -> (path.endsWith(CompletionSuggestion.Entry.Option.CONTEXTS.getPreferredName()) + || path.endsWith("highlight") || path.endsWith("fields") || path.contains("_source") || path.contains("inner_hits")); + mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random()); + } else { + mutated = originalBytes; } Option parsed; - try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { + try (XContentParser parser = createParser(xContentType.xContent(), mutated)) { parsed = Option.fromXContent(parser); assertNull(parser.nextToken()); } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index a3ea5e99324..eb30c32cf62 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.suggest; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; @@ -660,37 +659,42 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase { .field("preserve_separators", completionMappingBuilder.preserveSeparators) .field("preserve_position_increments", completionMappingBuilder.preservePositionIncrements); + List categoryContextFields = new ArrayList<>(); if (completionMappingBuilder.contextMappings != null) { - mapping = mapping.startArray("contexts"); + mapping.startArray("contexts"); for (Map.Entry contextMapping : completionMappingBuilder.contextMappings.entrySet()) { - mapping = mapping.startObject() + mapping.startObject() .field("name", contextMapping.getValue().name()) .field("type", contextMapping.getValue().type().name()); switch (contextMapping.getValue().type()) { case CATEGORY: final String fieldName = ((CategoryContextMapping) contextMapping.getValue()).getFieldName(); if (fieldName != null) { - mapping = mapping.field("path", fieldName); + mapping.field("path", fieldName); + categoryContextFields.add(fieldName); } break; case GEO: final String name = ((GeoContextMapping) contextMapping.getValue()).getFieldName(); - mapping = mapping - .field("precision", ((GeoContextMapping) contextMapping.getValue()).getPrecision()); + mapping.field("precision", ((GeoContextMapping) contextMapping.getValue()).getPrecision()); if (name != null) { mapping.field("path", name); } break; } - mapping = mapping.endObject(); + mapping.endObject(); } - mapping = mapping.endArray(); + mapping.endArray(); } - mapping = mapping.endObject() - .endObject().endObject() + mapping.endObject(); + for (String fieldName : categoryContextFields) { + mapping.startObject(fieldName) + .field("type", randomBoolean() ? "keyword" : "text") .endObject(); + } + mapping.endObject().endObject().endObject(); assertAcked(client().admin().indices().prepareCreate(INDEX) .setSettings(Settings.builder().put(indexSettings()).put(settings)) diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java index 50611a1cb95..035fd847ad2 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.script.TemplateScript; import org.elasticsearch.search.suggest.phrase.DirectCandidateGeneratorBuilder; import org.elasticsearch.search.suggest.phrase.Laplace; import org.elasticsearch.search.suggest.phrase.LinearInterpolation; @@ -1025,23 +1026,18 @@ public class SuggestSearchIT extends ESIntegTestCase { @Override public T compile(String scriptName, String scriptSource, ScriptContext context, Map params) { - if (context.instanceClazz != ExecutableScript.class) { + if (context.instanceClazz != TemplateScript.class) { throw new UnsupportedOperationException(); } - ExecutableScript.Factory factory = p -> { + TemplateScript.Factory factory = p -> { String script = scriptSource; for (Entry entry : p.entrySet()) { script = script.replace("{{" + entry.getKey() + "}}", String.valueOf(entry.getValue())); } String result = script; - return new ExecutableScript() { + return new TemplateScript(null) { @Override - public void setNextVar(String name, Object value) { - throw new UnsupportedOperationException("setNextVar not supported"); - } - - @Override - public Object run() { + public String execute() { return result; } }; diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java index c565836adb6..8c938caa479 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestTests.java @@ -90,7 +90,7 @@ public class SuggestTests extends ESTestCase { Suggest suggest = createTestItem(); XContentType xContentType = randomFrom(XContentType.values()); boolean humanReadable = randomBoolean(); - BytesReference originalBytes = toXContent(suggest, xContentType, params, humanReadable); + BytesReference originalBytes = toShuffledXContent(suggest, xContentType, params, humanReadable); Suggest parsed; try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java index 770fd2f6e6c..46971a5537b 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestionEntryTests.java @@ -36,10 +36,12 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.function.Function; +import java.util.function.Predicate; import java.util.function.Supplier; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; public class SuggestionEntryTests extends ESTestCase { @@ -80,15 +82,35 @@ public class SuggestionEntryTests extends ESTestCase { return entry; } - @SuppressWarnings("unchecked") public void testFromXContent() throws IOException { + doTestFromXContent(false); + } + + public void testFromXContentWithRandomFields() throws IOException { + doTestFromXContent(true); + } + + @SuppressWarnings("unchecked") + private void doTestFromXContent(boolean addRandomFields) throws IOException { for (Class entryType : ENTRY_PARSERS.keySet()) { Entry