Merge branch 'master' into enhancement/remove_node_client_setting

This commit is contained in:
javanna 2016-03-21 17:18:23 +01:00 committed by Luca Cavanna
commit bf390a935e
1348 changed files with 37002 additions and 31473 deletions

39
Vagrantfile vendored
View File

@ -23,15 +23,15 @@
Vagrant.configure(2) do |config| Vagrant.configure(2) do |config|
config.vm.define "ubuntu-1204" do |config| config.vm.define "ubuntu-1204" do |config|
config.vm.box = "ubuntu/precise64" config.vm.box = "elastic/ubuntu-12.04-x86_64"
ubuntu_common config ubuntu_common config
end end
config.vm.define "ubuntu-1404" do |config| config.vm.define "ubuntu-1404" do |config|
config.vm.box = "ubuntu/trusty64" config.vm.box = "elastic/ubuntu-14.04-x86_64"
ubuntu_common config ubuntu_common config
end end
config.vm.define "ubuntu-1504" do |config| config.vm.define "ubuntu-1504" do |config|
config.vm.box = "ubuntu/vivid64" config.vm.box = "elastic/ubuntu-15.04-x86_64"
ubuntu_common config, extra: <<-SHELL ubuntu_common config, extra: <<-SHELL
# Install Jayatana so we can work around it being present. # Install Jayatana so we can work around it being present.
[ -f /usr/share/java/jayatanaag.jar ] || install jayatana [ -f /usr/share/java/jayatanaag.jar ] || install jayatana
@ -41,44 +41,35 @@ Vagrant.configure(2) do |config|
# get the sun jdk on there just aren't worth it. We have jessie for testing # get the sun jdk on there just aren't worth it. We have jessie for testing
# debian and it works fine. # debian and it works fine.
config.vm.define "debian-8" do |config| config.vm.define "debian-8" do |config|
config.vm.box = "debian/jessie64" config.vm.box = "elastic/debian-8-x86_64"
deb_common config, deb_common config, 'echo deb http://http.debian.net/debian jessie-backports main > /etc/apt/sources.list.d/backports.list', 'backports'
'echo deb http://http.debian.net/debian jessie-backports main > /etc/apt/sources.list.d/backports.list', 'backports'
end end
config.vm.define "centos-6" do |config| config.vm.define "centos-6" do |config|
config.vm.box = "boxcutter/centos67" config.vm.box = "elastic/centos-6-x86_64"
rpm_common config rpm_common config
end end
config.vm.define "centos-7" do |config| config.vm.define "centos-7" do |config|
# There is a centos/7 box but it doesn't have rsync or virtualbox guest config.vm.box = "elastic/centos-7-x86_64"
# stuff on there so its slow to use. So chef it is.... rpm_common config
config.vm.box = "boxcutter/centos71" end
config.vm.define "oel-6" do |config|
config.vm.box = "elastic/oraclelinux-6-x86_64"
rpm_common config rpm_common config
end end
# This box hangs _forever_ on ```yum check-update```. I have no idea why.
# config.vm.define "oel-6", autostart: false do |config|
# config.vm.box = "boxcutter/oel66"
# rpm_common(config)
# end
config.vm.define "oel-7" do |config| config.vm.define "oel-7" do |config|
config.vm.box = "boxcutter/oel70" config.vm.box = "elastic/oraclelinux-7-x86_64"
rpm_common config rpm_common config
end end
config.vm.define "fedora-22" do |config| config.vm.define "fedora-22" do |config|
# Fedora hosts their own 'cloud' images that aren't in Vagrant's Atlas but config.vm.box = "elastic/fedora-22-x86_64"
# and are missing required stuff like rsync. It'd be nice if we could use
# them but they much slower to get up and running then the boxcutter image.
config.vm.box = "boxcutter/fedora22"
dnf_common config dnf_common config
end end
config.vm.define "opensuse-13" do |config| config.vm.define "opensuse-13" do |config|
config.vm.box = "chef/opensuse-13" config.vm.box = "elastic/opensuse-13-x86_64"
config.vm.box_url = "http://opscode-vm-bento.s3.amazonaws.com/vagrant/virtualbox/opscode_opensuse-13.2-x86_64_chef-provisionerless.box"
opensuse_common config opensuse_common config
end end
# The SLES boxes are not considered to be highest quality, but seem to be sufficient for a test run
config.vm.define "sles-12" do |config| config.vm.define "sles-12" do |config|
config.vm.box = "idar/sles12" config.vm.box = "elastic/sles-12-x86_64"
sles_common config sles_common config
end end
# Switch the default share for the project root from /vagrant to # Switch the default share for the project root from /vagrant to

View File

@ -116,6 +116,7 @@ subprojects {
"org.elasticsearch.distribution.tar:elasticsearch:${version}": ':distribution:tar', "org.elasticsearch.distribution.tar:elasticsearch:${version}": ':distribution:tar',
"org.elasticsearch.distribution.rpm:elasticsearch:${version}": ':distribution:rpm', "org.elasticsearch.distribution.rpm:elasticsearch:${version}": ':distribution:rpm',
"org.elasticsearch.distribution.deb:elasticsearch:${version}": ':distribution:deb', "org.elasticsearch.distribution.deb:elasticsearch:${version}": ':distribution:deb',
"org.elasticsearch.test:logger-usage:${version}": ':test:logger-usage',
] ]
configurations.all { configurations.all {
resolutionStrategy.dependencySubstitution { DependencySubstitutions subs -> resolutionStrategy.dependencySubstitution { DependencySubstitutions subs ->

View File

@ -307,6 +307,12 @@ class BuildPlugin implements Plugin<Project> {
/** Adds repositores used by ES dependencies */ /** Adds repositores used by ES dependencies */
static void configureRepositories(Project project) { static void configureRepositories(Project project) {
RepositoryHandler repos = project.repositories RepositoryHandler repos = project.repositories
if (System.getProperty("repos.mavenlocal") != null) {
// with -Drepos.mavenlocal=true we can force checking the local .m2 repo which is
// useful for development ie. bwc tests where we install stuff in the local repository
// such that we don't have to pass hardcoded files to gradle
repos.mavenLocal()
}
repos.mavenCentral() repos.mavenCentral()
repos.maven { repos.maven {
name 'sonatype-snapshots' name 'sonatype-snapshots'
@ -407,6 +413,7 @@ class BuildPlugin implements Plugin<Project> {
systemProperty 'jna.nosys', 'true' systemProperty 'jna.nosys', 'true'
// default test sysprop values // default test sysprop values
systemProperty 'tests.ifNoTests', 'fail' systemProperty 'tests.ifNoTests', 'fail'
// TODO: remove setting logging level via system property
systemProperty 'es.logger.level', 'WARN' systemProperty 'es.logger.level', 'WARN'
for (Map.Entry<String, String> property : System.properties.entrySet()) { for (Map.Entry<String, String> property : System.properties.entrySet()) {
if (property.getKey().startsWith('tests.') || if (property.getKey().startsWith('tests.') ||

View File

@ -68,7 +68,7 @@ public class PluginBuildPlugin extends BuildPlugin {
testCompile "org.elasticsearch.test:framework:${project.versions.elasticsearch}" testCompile "org.elasticsearch.test:framework:${project.versions.elasticsearch}"
// we "upgrade" these optional deps to provided for plugins, since they will run // we "upgrade" these optional deps to provided for plugins, since they will run
// with a full elasticsearch server that includes optional deps // with a full elasticsearch server that includes optional deps
provided "com.spatial4j:spatial4j:${project.versions.spatial4j}" provided "org.locationtech.spatial4j:spatial4j:${project.versions.spatial4j}"
provided "com.vividsolutions:jts:${project.versions.jts}" provided "com.vividsolutions:jts:${project.versions.jts}"
provided "log4j:log4j:${project.versions.log4j}" provided "log4j:log4j:${project.versions.log4j}"
provided "log4j:apache-log4j-extras:${project.versions.log4j}" provided "log4j:apache-log4j-extras:${project.versions.log4j}"

View File

@ -68,11 +68,17 @@ class PluginPropertiesTask extends Copy {
} }
Map generateSubstitutions() { Map generateSubstitutions() {
def stringSnap = { version ->
if (version.endsWith("-SNAPSHOT")) {
return version.substring(0, version.length() - 9)
}
return version
}
return [ return [
'name': extension.name, 'name': extension.name,
'description': extension.description, 'description': extension.description,
'version': extension.version, 'version': stringSnap(extension.version),
'elasticsearchVersion': VersionProperties.elasticsearch, 'elasticsearchVersion': stringSnap(VersionProperties.elasticsearch),
'javaVersion': project.targetCompatibility as String, 'javaVersion': project.targetCompatibility as String,
'isolated': extension.isolated as String, 'isolated': extension.isolated as String,
'classname': extension.classname 'classname': extension.classname

View File

@ -0,0 +1,98 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.precommit
import org.elasticsearch.gradle.LoggedExec
import org.gradle.api.file.FileCollection
import org.gradle.api.tasks.InputFiles
import org.gradle.api.tasks.OutputFile
/**
* Runs LoggerUsageCheck on a set of directories.
*/
public class LoggerUsageTask extends LoggedExec {
/**
* We use a simple "marker" file that we touch when the task succeeds
* as the task output. This is compared against the modified time of the
* inputs (ie the jars/class files).
*/
private File successMarker = new File(project.buildDir, 'markers/loggerUsage')
private FileCollection classpath;
private List<File> classDirectories;
public LoggerUsageTask() {
project.afterEvaluate {
dependsOn(classpath)
description = "Runs LoggerUsageCheck on ${classDirectories}"
executable = new File(project.javaHome, 'bin/java')
if (classDirectories == null) {
classDirectories = []
if (project.sourceSets.findByName("main") && project.sourceSets.main.output.classesDir.exists()) {
classDirectories += [project.sourceSets.main.output.classesDir]
dependsOn project.tasks.classes
}
if (project.sourceSets.findByName("test") && project.sourceSets.test.output.classesDir.exists()) {
classDirectories += [project.sourceSets.test.output.classesDir]
dependsOn project.tasks.testClasses
}
}
doFirst({
args('-cp', getClasspath().asPath, 'org.elasticsearch.test.loggerusage.ESLoggerUsageChecker')
getClassDirectories().each {
args it.getAbsolutePath()
}
})
doLast({
successMarker.parentFile.mkdirs()
successMarker.setText("", 'UTF-8')
})
}
}
@InputFiles
FileCollection getClasspath() {
return classpath
}
void setClasspath(FileCollection classpath) {
this.classpath = classpath
}
@InputFiles
List<File> getClassDirectories() {
return classDirectories
}
void setClassDirectories(List<File> classDirectories) {
this.classDirectories = classDirectories
}
@OutputFile
File getSuccessMarker() {
return successMarker
}
void setSuccessMarker(File successMarker) {
this.successMarker = successMarker
}
}

View File

@ -34,6 +34,7 @@ class PrecommitTasks {
configureForbiddenApis(project), configureForbiddenApis(project),
configureCheckstyle(project), configureCheckstyle(project),
configureNamingConventions(project), configureNamingConventions(project),
configureLoggerUsage(project),
project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class), project.tasks.create('forbiddenPatterns', ForbiddenPatternsTask.class),
project.tasks.create('licenseHeaders', LicenseHeadersTask.class), project.tasks.create('licenseHeaders', LicenseHeadersTask.class),
project.tasks.create('jarHell', JarHellTask.class), project.tasks.create('jarHell', JarHellTask.class),
@ -63,21 +64,21 @@ class PrecommitTasks {
project.forbiddenApis { project.forbiddenApis {
internalRuntimeForbidden = true internalRuntimeForbidden = true
failOnUnsupportedJava = false failOnUnsupportedJava = false
bundledSignatures = ['jdk-unsafe', 'jdk-deprecated'] bundledSignatures = ['jdk-unsafe', 'jdk-deprecated', 'jdk-system-out']
signaturesURLs = [getClass().getResource('/forbidden/all-signatures.txt')] signaturesURLs = [getClass().getResource('/forbidden/jdk-signatures.txt'),
getClass().getResource('/forbidden/es-all-signatures.txt')]
suppressAnnotations = ['**.SuppressForbidden'] suppressAnnotations = ['**.SuppressForbidden']
} }
Task mainForbidden = project.tasks.findByName('forbiddenApisMain') Task mainForbidden = project.tasks.findByName('forbiddenApisMain')
if (mainForbidden != null) { if (mainForbidden != null) {
mainForbidden.configure { mainForbidden.configure {
bundledSignatures += 'jdk-system-out' signaturesURLs += getClass().getResource('/forbidden/es-core-signatures.txt')
signaturesURLs += getClass().getResource('/forbidden/core-signatures.txt')
} }
} }
Task testForbidden = project.tasks.findByName('forbiddenApisTest') Task testForbidden = project.tasks.findByName('forbiddenApisTest')
if (testForbidden != null) { if (testForbidden != null) {
testForbidden.configure { testForbidden.configure {
signaturesURLs += getClass().getResource('/forbidden/test-signatures.txt') signaturesURLs += getClass().getResource('/forbidden/es-test-signatures.txt')
} }
} }
Task forbiddenApis = project.tasks.findByName('forbiddenApis') Task forbiddenApis = project.tasks.findByName('forbiddenApis')
@ -117,4 +118,18 @@ class PrecommitTasks {
} }
return null return null
} }
private static Task configureLoggerUsage(Project project) {
Task loggerUsageTask = project.tasks.create('loggerUsageCheck', LoggerUsageTask.class)
project.configurations.create('loggerUsagePlugin')
project.dependencies.add('loggerUsagePlugin',
"org.elasticsearch.test:logger-usage:${org.elasticsearch.gradle.VersionProperties.elasticsearch}")
loggerUsageTask.configure {
classpath = project.configurations.loggerUsagePlugin
}
return loggerUsageTask
}
} }

View File

@ -23,8 +23,6 @@ import org.gradle.api.Project
import org.gradle.api.file.FileCollection import org.gradle.api.file.FileCollection
import org.gradle.api.tasks.Input import org.gradle.api.tasks.Input
import java.time.LocalDateTime
/** Configuration for an elasticsearch cluster, used for integration tests. */ /** Configuration for an elasticsearch cluster, used for integration tests. */
class ClusterConfiguration { class ClusterConfiguration {
@ -34,6 +32,12 @@ class ClusterConfiguration {
@Input @Input
int numNodes = 1 int numNodes = 1
@Input
int numBwcNodes = 0
@Input
String bwcVersion = null
@Input @Input
int httpPort = 0 int httpPort = 0
@ -49,6 +53,15 @@ class ClusterConfiguration {
@Input @Input
String jvmArgs = System.getProperty('tests.jvm.argline', '') String jvmArgs = System.getProperty('tests.jvm.argline', '')
/**
* The seed nodes port file. In the case the cluster has more than one node we use a seed node
* to form the cluster. The file is null if there is no seed node yet available.
*
* Note: this can only be null if the cluster has only one node or if the first node is not yet
* configured. All nodes but the first node should see a non null value.
*/
File seedNodePortsFile
/** /**
* A closure to call before the cluster is considered ready. The closure is passed the node info, * A closure to call before the cluster is considered ready. The closure is passed the node info,
* as well as a groovy AntBuilder, to enable running ant condition checks. The default wait * as well as a groovy AntBuilder, to enable running ant condition checks. The default wait
@ -119,4 +132,12 @@ class ClusterConfiguration {
} }
extraConfigFiles.put(path, sourceFile) extraConfigFiles.put(path, sourceFile)
} }
/** Returns an address and port suitable for a uri to connect to this clusters seed node over transport protocol*/
String seedNodeTransportUri() {
if (seedNodePortsFile != null) {
return seedNodePortsFile.readLines("UTF-8").get(0)
}
return null;
}
} }

View File

@ -53,13 +53,59 @@ class ClusterFormationTasks {
// no need to add cluster formation tasks if the task won't run! // no need to add cluster formation tasks if the task won't run!
return return
} }
configureDistributionDependency(project, config.distribution) File sharedDir = new File(project.buildDir, "cluster/shared")
List<Task> startTasks = [] // first we remove everything in the shared cluster directory to ensure there are no leftovers in repos or anything
// in theory this should not be necessary but repositories are only deleted in the cluster-state and not on-disk
// such that snapshots survive failures / test runs and there is no simple way today to fix that.
Task cleanup = project.tasks.create(name: "${task.name}#prepareCluster.cleanShared", type: Delete, dependsOn: task.dependsOn.collect()) {
delete sharedDir
doLast {
sharedDir.mkdirs()
}
}
List<Task> startTasks = [cleanup]
List<NodeInfo> nodes = [] List<NodeInfo> nodes = []
if (config.numNodes < config.numBwcNodes) {
throw new GradleException("numNodes must be >= numBwcNodes [${config.numNodes} < ${config.numBwcNodes}]")
}
if (config.numBwcNodes > 0 && config.bwcVersion == null) {
throw new GradleException("bwcVersion must not be null if numBwcNodes is > 0")
}
// this is our current version distribution configuration we use for all kinds of REST tests etc.
project.configurations {
elasticsearchDistro
}
configureDistributionDependency(project, config.distribution, project.configurations.elasticsearchDistro, VersionProperties.elasticsearch)
if (config.bwcVersion != null && config.numBwcNodes > 0) {
// if we have a cluster that has a BWC cluster we also need to configure a dependency on the BWC version
// this version uses the same distribution etc. and only differs in the version we depend on.
// from here on everything else works the same as if it's the current version, we fetch the BWC version
// from mirrors using gradles built-in mechanism etc.
project.configurations {
elasticsearchBwcDistro
}
configureDistributionDependency(project, config.distribution, project.configurations.elasticsearchBwcDistro, config.bwcVersion)
}
for (int i = 0; i < config.numNodes; ++i) { for (int i = 0; i < config.numNodes; ++i) {
NodeInfo node = new NodeInfo(config, i, project, task) // we start N nodes and out of these N nodes there might be M bwc nodes.
// for each of those nodes we might have a different configuratioon
String elasticsearchVersion = VersionProperties.elasticsearch
Configuration configuration = project.configurations.elasticsearchDistro
if (i < config.numBwcNodes) {
elasticsearchVersion = config.bwcVersion
configuration = project.configurations.elasticsearchBwcDistro
}
NodeInfo node = new NodeInfo(config, i, project, task, elasticsearchVersion, sharedDir)
if (i == 0) {
if (config.seedNodePortsFile != null) {
// we might allow this in the future to be set but for now we are the only authority to set this!
throw new GradleException("seedNodePortsFile has a non-null value but first node has not been intialized")
}
config.seedNodePortsFile = node.transportPortsFile;
}
nodes.add(node) nodes.add(node)
startTasks.add(configureNode(project, task, node)) startTasks.add(configureNode(project, task, cleanup, node, configuration))
} }
Task wait = configureWaitTask("${task.name}#wait", project, nodes, startTasks) Task wait = configureWaitTask("${task.name}#wait", project, nodes, startTasks)
@ -70,20 +116,14 @@ class ClusterFormationTasks {
} }
/** Adds a dependency on the given distribution */ /** Adds a dependency on the given distribution */
static void configureDistributionDependency(Project project, String distro) { static void configureDistributionDependency(Project project, String distro, Configuration configuration, String elasticsearchVersion) {
String elasticsearchVersion = VersionProperties.elasticsearch
String packaging = distro String packaging = distro
if (distro == 'tar') { if (distro == 'tar') {
packaging = 'tar.gz' packaging = 'tar.gz'
} else if (distro == 'integ-test-zip') { } else if (distro == 'integ-test-zip') {
packaging = 'zip' packaging = 'zip'
} }
project.configurations { project.dependencies.add(configuration.name, "org.elasticsearch.distribution.${distro}:elasticsearch:${elasticsearchVersion}@${packaging}")
elasticsearchDistro
}
project.dependencies {
elasticsearchDistro "org.elasticsearch.distribution.${distro}:elasticsearch:${elasticsearchVersion}@${packaging}"
}
} }
/** /**
@ -103,10 +143,10 @@ class ClusterFormationTasks {
* *
* @return a task which starts the node. * @return a task which starts the node.
*/ */
static Task configureNode(Project project, Task task, NodeInfo node) { static Task configureNode(Project project, Task task, Object dependsOn, NodeInfo node, Configuration configuration) {
// tasks are chained so their execution order is maintained // tasks are chained so their execution order is maintained
Task setup = project.tasks.create(name: taskName(task, node, 'clean'), type: Delete, dependsOn: task.dependsOn.collect()) { Task setup = project.tasks.create(name: taskName(task, node, 'clean'), type: Delete, dependsOn: dependsOn) {
delete node.homeDir delete node.homeDir
delete node.cwd delete node.cwd
doLast { doLast {
@ -115,7 +155,7 @@ class ClusterFormationTasks {
} }
setup = configureCheckPreviousTask(taskName(task, node, 'checkPrevious'), project, setup, node) setup = configureCheckPreviousTask(taskName(task, node, 'checkPrevious'), project, setup, node)
setup = configureStopTask(taskName(task, node, 'stopPrevious'), project, setup, node) setup = configureStopTask(taskName(task, node, 'stopPrevious'), project, setup, node)
setup = configureExtractTask(taskName(task, node, 'extract'), project, setup, node) setup = configureExtractTask(taskName(task, node, 'extract'), project, setup, node, configuration)
setup = configureWriteConfigTask(taskName(task, node, 'configure'), project, setup, node) setup = configureWriteConfigTask(taskName(task, node, 'configure'), project, setup, node)
setup = configureExtraConfigFilesTask(taskName(task, node, 'extraConfig'), project, setup, node) setup = configureExtraConfigFilesTask(taskName(task, node, 'extraConfig'), project, setup, node)
setup = configureCopyPluginsTask(taskName(task, node, 'copyPlugins'), project, setup, node) setup = configureCopyPluginsTask(taskName(task, node, 'copyPlugins'), project, setup, node)
@ -151,27 +191,28 @@ class ClusterFormationTasks {
} }
/** Adds a task to extract the elasticsearch distribution */ /** Adds a task to extract the elasticsearch distribution */
static Task configureExtractTask(String name, Project project, Task setup, NodeInfo node) { static Task configureExtractTask(String name, Project project, Task setup, NodeInfo node, Configuration configuration) {
List extractDependsOn = [project.configurations.elasticsearchDistro, setup] List extractDependsOn = [configuration, setup]
/* project.configurations.elasticsearchDistro.singleFile will be an /* configuration.singleFile will be an external artifact if this is being run by a plugin not living in the
external artifact if this is being run by a plugin not living in the elasticsearch source tree. If this is a plugin built in the elasticsearch source tree or this is a distro in
elasticsearch source tree. If this is a plugin built in the the elasticsearch source tree then this should be the version of elasticsearch built by the source tree.
elasticsearch source tree or this is a distro in the elasticsearch If it isn't then Bad Things(TM) will happen. */
source tree then this should be the version of elasticsearch built
by the source tree. If it isn't then Bad Things(TM) will happen. */
Task extract Task extract
switch (node.config.distribution) { switch (node.config.distribution) {
case 'integ-test-zip': case 'integ-test-zip':
case 'zip': case 'zip':
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) { extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
from { project.zipTree(project.configurations.elasticsearchDistro.singleFile) } from {
project.zipTree(configuration.singleFile)
}
into node.baseDir into node.baseDir
} }
break; break;
case 'tar': case 'tar':
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) { extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
from { from {
project.tarTree(project.resources.gzip(project.configurations.elasticsearchDistro.singleFile)) project.tarTree(project.resources.gzip(configuration.singleFile))
} }
into node.baseDir into node.baseDir
} }
@ -180,7 +221,7 @@ class ClusterFormationTasks {
File rpmDatabase = new File(node.baseDir, 'rpm-database') File rpmDatabase = new File(node.baseDir, 'rpm-database')
File rpmExtracted = new File(node.baseDir, 'rpm-extracted') File rpmExtracted = new File(node.baseDir, 'rpm-extracted')
/* Delay reading the location of the rpm file until task execution */ /* Delay reading the location of the rpm file until task execution */
Object rpm = "${ -> project.configurations.elasticsearchDistro.singleFile}" Object rpm = "${ -> configuration.singleFile}"
extract = project.tasks.create(name: name, type: LoggedExec, dependsOn: extractDependsOn) { extract = project.tasks.create(name: name, type: LoggedExec, dependsOn: extractDependsOn) {
commandLine 'rpm', '--badreloc', '--nodeps', '--noscripts', '--notriggers', commandLine 'rpm', '--badreloc', '--nodeps', '--noscripts', '--notriggers',
'--dbpath', rpmDatabase, '--dbpath', rpmDatabase,
@ -195,7 +236,7 @@ class ClusterFormationTasks {
case 'deb': case 'deb':
/* Delay reading the location of the deb file until task execution */ /* Delay reading the location of the deb file until task execution */
File debExtracted = new File(node.baseDir, 'deb-extracted') File debExtracted = new File(node.baseDir, 'deb-extracted')
Object deb = "${ -> project.configurations.elasticsearchDistro.singleFile}" Object deb = "${ -> configuration.singleFile}"
extract = project.tasks.create(name: name, type: LoggedExec, dependsOn: extractDependsOn) { extract = project.tasks.create(name: name, type: LoggedExec, dependsOn: extractDependsOn) {
commandLine 'dpkg-deb', '-x', deb, debExtracted commandLine 'dpkg-deb', '-x', deb, debExtracted
doFirst { doFirst {
@ -214,26 +255,28 @@ class ClusterFormationTasks {
Map esConfig = [ Map esConfig = [
'cluster.name' : node.clusterName, 'cluster.name' : node.clusterName,
'pidfile' : node.pidFile, 'pidfile' : node.pidFile,
'path.repo' : "${node.homeDir}/repo", 'path.repo' : "${node.sharedDir}/repo",
'path.shared_data' : "${node.homeDir}/../", 'path.shared_data' : "${node.sharedDir}/",
// Define a node attribute so we can test that it exists // Define a node attribute so we can test that it exists
'node.testattr' : 'test', 'node.testattr' : 'test',
'repositories.url.allowed_urls': 'http://snapshot.test*' 'repositories.url.allowed_urls': 'http://snapshot.test*'
] ]
if (node.config.numNodes == 1) { esConfig['http.port'] = node.config.httpPort
esConfig['http.port'] = node.config.httpPort esConfig['transport.tcp.port'] = node.config.transportPort
esConfig['transport.tcp.port'] = node.config.transportPort
} else {
// TODO: fix multi node so it doesn't use hardcoded prots
esConfig['http.port'] = 9400 + node.nodeNum
esConfig['transport.tcp.port'] = 9500 + node.nodeNum
esConfig['discovery.zen.ping.unicast.hosts'] = (0..<node.config.numNodes).collect{"localhost:${9500 + it}"}.join(',')
}
esConfig.putAll(node.config.settings) esConfig.putAll(node.config.settings)
Task writeConfig = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup) Task writeConfig = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup)
writeConfig.doFirst { writeConfig.doFirst {
if (node.nodeNum > 0) { // multi-node cluster case, we have to wait for the seed node to startup
ant.waitfor(maxwait: '20', maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond') {
resourceexists {
file(file: node.config.seedNodePortsFile.toString())
}
}
// the seed node is enough to form the cluster - all subsequent nodes will get the seed node as a unicast
// host and join the cluster via that.
esConfig['discovery.zen.ping.unicast.hosts'] = "\"${node.config.seedNodeTransportUri()}\""
}
File configFile = new File(node.confDir, 'elasticsearch.yml') File configFile = new File(node.confDir, 'elasticsearch.yml')
logger.info("Configuring ${configFile}") logger.info("Configuring ${configFile}")
configFile.setText(esConfig.collect { key, value -> "${key}: ${value}" }.join('\n'), 'UTF-8') configFile.setText(esConfig.collect { key, value -> "${key}: ${value}" }.join('\n'), 'UTF-8')

View File

@ -40,6 +40,9 @@ class NodeInfo {
/** root directory all node files and operations happen under */ /** root directory all node files and operations happen under */
File baseDir File baseDir
/** shared data directory all nodes share */
File sharedDir
/** the pid file the node will use */ /** the pid file the node will use */
File pidFile File pidFile
@ -89,14 +92,15 @@ class NodeInfo {
ByteArrayOutputStream buffer = new ByteArrayOutputStream() ByteArrayOutputStream buffer = new ByteArrayOutputStream()
/** Creates a node to run as part of a cluster for the given task */ /** Creates a node to run as part of a cluster for the given task */
NodeInfo(ClusterConfiguration config, int nodeNum, Project project, Task task) { NodeInfo(ClusterConfiguration config, int nodeNum, Project project, Task task, String nodeVersion, File sharedDir) {
this.config = config this.config = config
this.nodeNum = nodeNum this.nodeNum = nodeNum
this.sharedDir = sharedDir
clusterName = "${task.path.replace(':', '_').substring(1)}" clusterName = "${task.path.replace(':', '_').substring(1)}"
baseDir = new File(project.buildDir, "cluster/${task.name} node${nodeNum}") baseDir = new File(project.buildDir, "cluster/${task.name} node${nodeNum}")
pidFile = new File(baseDir, 'es.pid') pidFile = new File(baseDir, 'es.pid')
homeDir = homeDir(baseDir, config.distribution) homeDir = homeDir(baseDir, config.distribution, nodeVersion)
confDir = confDir(baseDir, config.distribution) confDir = confDir(baseDir, config.distribution, nodeVersion)
configFile = new File(confDir, 'elasticsearch.yml') configFile = new File(confDir, 'elasticsearch.yml')
// even for rpm/deb, the logs are under home because we dont start with real services // even for rpm/deb, the logs are under home because we dont start with real services
File logsDir = new File(homeDir, 'logs') File logsDir = new File(homeDir, 'logs')
@ -129,14 +133,15 @@ class NodeInfo {
'JAVA_HOME' : project.javaHome, 'JAVA_HOME' : project.javaHome,
'ES_GC_OPTS': config.jvmArgs // we pass these with the undocumented gc opts so the argline can set gc, etc 'ES_GC_OPTS': config.jvmArgs // we pass these with the undocumented gc opts so the argline can set gc, etc
] ]
args.add("-Des.node.portsfile=true") args.addAll("-E", "es.node.portsfile=true")
args.addAll(config.systemProperties.collect { key, value -> "-D${key}=${value}" }) env.put('ES_JAVA_OPTS', config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" "))
for (Map.Entry<String, String> property : System.properties.entrySet()) { for (Map.Entry<String, String> property : System.properties.entrySet()) {
if (property.getKey().startsWith('es.')) { if (property.getKey().startsWith('es.')) {
args.add("-D${property.getKey()}=${property.getValue()}") args.add("-E")
args.add("${property.getKey()}=${property.getValue()}")
} }
} }
args.add("-Des.path.conf=${confDir}") args.addAll("-E", "es.path.conf=${confDir}")
if (Os.isFamily(Os.FAMILY_WINDOWS)) { if (Os.isFamily(Os.FAMILY_WINDOWS)) {
args.add('"') // end the entire command, quoted args.add('"') // end the entire command, quoted
} }
@ -181,13 +186,13 @@ class NodeInfo {
} }
/** Returns the directory elasticsearch home is contained in for the given distribution */ /** Returns the directory elasticsearch home is contained in for the given distribution */
static File homeDir(File baseDir, String distro) { static File homeDir(File baseDir, String distro, String nodeVersion) {
String path String path
switch (distro) { switch (distro) {
case 'integ-test-zip': case 'integ-test-zip':
case 'zip': case 'zip':
case 'tar': case 'tar':
path = "elasticsearch-${VersionProperties.elasticsearch}" path = "elasticsearch-${nodeVersion}"
break break
case 'rpm': case 'rpm':
case 'deb': case 'deb':
@ -199,12 +204,12 @@ class NodeInfo {
return new File(baseDir, path) return new File(baseDir, path)
} }
static File confDir(File baseDir, String distro) { static File confDir(File baseDir, String distro, String nodeVersion) {
switch (distro) { switch (distro) {
case 'integ-test-zip': case 'integ-test-zip':
case 'zip': case 'zip':
case 'tar': case 'tar':
return new File(homeDir(baseDir, distro), 'config') return new File(homeDir(baseDir, distro, nodeVersion), 'config')
case 'rpm': case 'rpm':
case 'deb': case 'deb':
return new File(baseDir, "${distro}-extracted/etc/elasticsearch") return new File(baseDir, "${distro}-extracted/etc/elasticsearch")

View File

@ -208,7 +208,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]TransportClearScrollAction.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]TransportClearScrollAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]TransportMultiSearchAction.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]TransportMultiSearchAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]suggest[/\\]SuggestResponse.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]suggest[/\\]SuggestResponse.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]suggest[/\\]TransportSuggestAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]ActionFilter.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]ActionFilter.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]AutoCreateIndex.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]AutoCreateIndex.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]DelegatingActionListener.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]DelegatingActionListener.java" checks="LineLength" />
@ -259,7 +258,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]UpdateRequest.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]UpdateRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]UpdateRequestBuilder.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]UpdateRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]Bootstrap.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]Bootstrap.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]BootstrapCLIParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JNAKernel32Library.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JNAKernel32Library.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JNANatives.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JNANatives.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JVMCheck.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JVMCheck.java" checks="LineLength" />
@ -269,14 +267,12 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cache[/\\]recycler[/\\]PageCacheRecycler.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cache[/\\]recycler[/\\]PageCacheRecycler.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]ElasticsearchClient.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]ElasticsearchClient.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]FilterClient.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]FilterClient.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]Requests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]node[/\\]NodeClient.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]node[/\\]NodeClient.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]support[/\\]AbstractClient.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]support[/\\]AbstractClient.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]TransportClient.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]TransportClient.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]TransportClientNodesService.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]TransportClientNodesService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]support[/\\]TransportProxyClient.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]support[/\\]TransportProxyClient.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterModule.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterModule.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterState.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterState.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterStateObserver.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterStateObserver.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterStateUpdateTask.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterStateUpdateTask.java" checks="LineLength" />
@ -393,7 +389,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]CollectionUtils.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]CollectionUtils.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]ExtensionPoint.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]ExtensionPoint.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]EsExecutors.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]EsExecutors.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]EsThreadPoolExecutor.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]PrioritizedEsThreadPoolExecutor.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]PrioritizedEsThreadPoolExecutor.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]ThreadBarrier.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]ThreadBarrier.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]ThreadContext.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]concurrent[/\\]ThreadContext.java" checks="LineLength" />
@ -464,7 +459,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]codec[/\\]PerFieldMappingPostingFormatCodec.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]codec[/\\]PerFieldMappingPostingFormatCodec.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]ElasticsearchConcurrentMergeScheduler.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]ElasticsearchConcurrentMergeScheduler.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]Engine.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]Engine.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]EngineConfig.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]InternalEngine.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]InternalEngine.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]LiveVersionMap.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]LiveVersionMap.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]ShadowEngine.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]engine[/\\]ShadowEngine.java" checks="LineLength" />
@ -617,7 +611,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]similarity[/\\]SimilarityService.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]similarity[/\\]SimilarityService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]snapshots[/\\]blobstore[/\\]BlobStoreIndexShardRepository.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]snapshots[/\\]blobstore[/\\]BlobStoreIndexShardRepository.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]snapshots[/\\]blobstore[/\\]BlobStoreIndexShardSnapshots.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]snapshots[/\\]blobstore[/\\]BlobStoreIndexShardSnapshots.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]FsDirectoryService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]IndexStore.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]IndexStore.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]IndexStoreConfig.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]IndexStoreConfig.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]LegacyVerification.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]LegacyVerification.java" checks="LineLength" />
@ -669,10 +662,7 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]process[/\\]ProcessService.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]process[/\\]ProcessService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]node[/\\]Node.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]node[/\\]Node.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]node[/\\]internal[/\\]InternalSettingsPreparer.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]node[/\\]internal[/\\]InternalSettingsPreparer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]PercolateContext.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]PercolateDocumentParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]PercolatorQuery.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]PercolatorQuery.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]PercolatorService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]DummyPluginInfo.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]DummyPluginInfo.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]InstallPluginCommand.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]InstallPluginCommand.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginsService.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginsService.java" checks="LineLength" />
@ -896,7 +886,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]SuggestUtils.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]SuggestUtils.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]Suggesters.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]Suggesters.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]CompletionSuggestParser.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]CompletionSuggestParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]CompletionSuggester.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]CategoryContextMapping.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]CategoryContextMapping.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]ContextMapping.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]ContextMapping.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]GeoContextMapping.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]GeoContextMapping.java" checks="LineLength" />
@ -907,12 +896,9 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]LinearInterpoatingScorer.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]LinearInterpoatingScorer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]NoisyChannelSpellChecker.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]NoisyChannelSpellChecker.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]PhraseSuggestParser.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]PhraseSuggestParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]PhraseSuggester.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]PhraseSuggestionBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]StupidBackoffScorer.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]StupidBackoffScorer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]WordScorer.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]WordScorer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]term[/\\]TermSuggestParser.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]term[/\\]TermSuggestParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]term[/\\]TermSuggester.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]RestoreService.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]RestoreService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotInfo.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotInfo.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotShardFailure.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotShardFailure.java" checks="LineLength" />
@ -1336,14 +1322,9 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]os[/\\]OsProbeTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]os[/\\]OsProbeTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]nodesinfo[/\\]NodeInfoStreamingTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]nodesinfo[/\\]NodeInfoStreamingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]options[/\\]detailederrors[/\\]DetailedErrorsEnabledIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]options[/\\]detailederrors[/\\]DetailedErrorsEnabledIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]ConcurrentPercolatorIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]MultiPercolatorIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]MultiPercolatorIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]PercolateDocumentParserTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]PercolatorIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]PercolatorIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]PercolatorQueryTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]PercolatorQueryTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]PercolatorServiceTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]RecoveryPercolatorIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]TTLPercolatorIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginInfoTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginInfoTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginsServiceTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginsServiceTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]recovery[/\\]FullRollingRestartIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]recovery[/\\]FullRollingRestartIT.java" checks="LineLength" />
@ -1403,7 +1384,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]SearchWithRandomIOExceptionsIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]SearchWithRandomIOExceptionsIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]TransportSearchFailuresIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]TransportSearchFailuresIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]TransportTwoNodesSearchIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]TransportTwoNodesSearchIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]builder[/\\]SearchSourceBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]child[/\\]ChildQuerySearchIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]child[/\\]ChildQuerySearchIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]child[/\\]ParentFieldLoadingIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]child[/\\]ParentFieldLoadingIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSubPhasePluginIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSubPhasePluginIT.java" checks="LineLength" />
@ -1425,7 +1405,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]query[/\\]ExistsIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]query[/\\]ExistsIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]query[/\\]MultiMatchQueryIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]query[/\\]MultiMatchQueryIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]query[/\\]SearchQueryIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]query[/\\]SearchQueryIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]rescore[/\\]QueryRescoreBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]scroll[/\\]DuelScrollIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]scroll[/\\]DuelScrollIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]scroll[/\\]SearchScrollIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]scroll[/\\]SearchScrollIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]scroll[/\\]SearchScrollWithFailingNodesIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]scroll[/\\]SearchScrollWithFailingNodesIT.java" checks="LineLength" />
@ -1436,12 +1415,10 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]CompletionSuggestSearchIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]CompletionSuggestSearchIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]ContextCompletionSuggestSearchIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]ContextCompletionSuggestSearchIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]CustomSuggester.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]CustomSuggester.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]CustomSuggesterSearchIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]CategoryContextMappingTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]CategoryContextMappingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]GeoContextMappingTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]GeoContextMappingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]DirectCandidateGeneratorTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]DirectCandidateGeneratorTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]NoisyChannelSpellCheckerTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]NoisyChannelSpellCheckerTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]SmoothingModelTestCase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]similarity[/\\]SimilarityIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]similarity[/\\]SimilarityIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]AbstractSnapshotIntegTestCase.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]AbstractSnapshotIntegTestCase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]BlobStoreFormatIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]BlobStoreFormatIT.java" checks="LineLength" />
@ -1486,7 +1463,6 @@
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]groovy[/\\]GroovyScriptEngineService.java" checks="LineLength" /> <suppress files="modules[/\\]lang-groovy[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]groovy[/\\]GroovyScriptEngineService.java" checks="LineLength" />
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]BucketScriptTests.java" checks="LineLength" /> <suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]BucketScriptTests.java" checks="LineLength" />
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]BulkTests.java" checks="LineLength" /> <suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]BulkTests.java" checks="LineLength" />
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]DateRangeTests.java" checks="LineLength" />
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]DoubleTermsTests.java" checks="LineLength" /> <suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]DoubleTermsTests.java" checks="LineLength" />
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]EquivalenceTests.java" checks="LineLength" /> <suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]EquivalenceTests.java" checks="LineLength" />
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]FunctionScoreTests.java" checks="LineLength" /> <suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]messy[/\\]tests[/\\]FunctionScoreTests.java" checks="LineLength" />
@ -1598,7 +1574,6 @@
<suppress files="plugins[/\\]repository-s3[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cloud[/\\]aws[/\\]blobstore[/\\]MockDefaultS3OutputStream.java" checks="LineLength" /> <suppress files="plugins[/\\]repository-s3[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cloud[/\\]aws[/\\]blobstore[/\\]MockDefaultS3OutputStream.java" checks="LineLength" />
<suppress files="plugins[/\\]repository-s3[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]s3[/\\]AbstractS3SnapshotRestoreTest.java" checks="LineLength" /> <suppress files="plugins[/\\]repository-s3[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]s3[/\\]AbstractS3SnapshotRestoreTest.java" checks="LineLength" />
<suppress files="plugins[/\\]store-smb[/\\]src[/\\]main[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]store[/\\]SmbDirectoryWrapper.java" checks="LineLength" /> <suppress files="plugins[/\\]store-smb[/\\]src[/\\]main[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]store[/\\]SmbDirectoryWrapper.java" checks="LineLength" />
<suppress files="qa[/\\]evil-tests[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]BootstrapCliParserTests.java" checks="LineLength" />
<suppress files="qa[/\\]evil-tests[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]ESPolicyUnitTests.java" checks="LineLength" /> <suppress files="qa[/\\]evil-tests[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]ESPolicyUnitTests.java" checks="LineLength" />
<suppress files="qa[/\\]evil-tests[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]EvilSecurityTests.java" checks="LineLength" /> <suppress files="qa[/\\]evil-tests[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]EvilSecurityTests.java" checks="LineLength" />
<suppress files="qa[/\\]evil-tests[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]cli[/\\]CheckFileCommandTests.java" checks="LineLength" /> <suppress files="qa[/\\]evil-tests[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]cli[/\\]CheckFileCommandTests.java" checks="LineLength" />

View File

@ -0,0 +1,30 @@
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
java.nio.file.Paths @ Use org.elasticsearch.common.io.PathUtils.get() instead.
java.nio.file.FileSystems#getDefault() @ use org.elasticsearch.common.io.PathUtils.getDefaultFileSystem() instead.
java.nio.file.Files#getFileStore(java.nio.file.Path) @ Use org.elasticsearch.env.Environment.getFileStore() instead, impacted by JDK-8034057
java.nio.file.Files#isWritable(java.nio.file.Path) @ Use org.elasticsearch.env.Environment.isWritable() instead, impacted by JDK-8034057
@defaultMessage Use org.elasticsearch.common.Randomness#get for reproducible sources of randomness
java.util.Random#<init>()
java.util.concurrent.ThreadLocalRandom
java.security.MessageDigest#clone() @ use org.elasticsearch.common.hash.MessageDigests
@defaultMessage this should not have been added to lucene in the first place
org.apache.lucene.index.IndexReader#getCombinedCoreAndDeletesKey()

View File

@ -41,14 +41,10 @@ org.apache.lucene.index.IndexReader#addReaderClosedListener(org.apache.lucene.in
org.apache.lucene.index.IndexReader#removeReaderClosedListener(org.apache.lucene.index.IndexReader$ReaderClosedListener) org.apache.lucene.index.IndexReader#removeReaderClosedListener(org.apache.lucene.index.IndexReader$ReaderClosedListener)
@defaultMessage Pass the precision step from the mappings explicitly instead @defaultMessage Pass the precision step from the mappings explicitly instead
org.apache.lucene.search.NumericRangeQuery#newDoubleRange(java.lang.String,java.lang.Double,java.lang.Double,boolean,boolean) org.apache.lucene.search.LegacyNumericRangeQuery#newDoubleRange(java.lang.String,java.lang.Double,java.lang.Double,boolean,boolean)
org.apache.lucene.search.NumericRangeQuery#newFloatRange(java.lang.String,java.lang.Float,java.lang.Float,boolean,boolean) org.apache.lucene.search.LegacyNumericRangeQuery#newFloatRange(java.lang.String,java.lang.Float,java.lang.Float,boolean,boolean)
org.apache.lucene.search.NumericRangeQuery#newIntRange(java.lang.String,java.lang.Integer,java.lang.Integer,boolean,boolean) org.apache.lucene.search.LegacyNumericRangeQuery#newIntRange(java.lang.String,java.lang.Integer,java.lang.Integer,boolean,boolean)
org.apache.lucene.search.NumericRangeQuery#newLongRange(java.lang.String,java.lang.Long,java.lang.Long,boolean,boolean) org.apache.lucene.search.LegacyNumericRangeQuery#newLongRange(java.lang.String,java.lang.Long,java.lang.Long,boolean,boolean)
org.apache.lucene.search.NumericRangeFilter#newDoubleRange(java.lang.String,java.lang.Double,java.lang.Double,boolean,boolean)
org.apache.lucene.search.NumericRangeFilter#newFloatRange(java.lang.String,java.lang.Float,java.lang.Float,boolean,boolean)
org.apache.lucene.search.NumericRangeFilter#newIntRange(java.lang.String,java.lang.Integer,java.lang.Integer,boolean,boolean)
org.apache.lucene.search.NumericRangeFilter#newLongRange(java.lang.String,java.lang.Long,java.lang.Long,boolean,boolean)
@defaultMessage Only use wait / notify when really needed try to use concurrency primitives, latches or callbacks instead. @defaultMessage Only use wait / notify when really needed try to use concurrency primitives, latches or callbacks instead.
java.lang.Object#wait() java.lang.Object#wait()
@ -88,9 +84,6 @@ java.util.concurrent.Future#cancel(boolean)
org.elasticsearch.common.io.PathUtils#get(java.lang.String, java.lang.String[]) org.elasticsearch.common.io.PathUtils#get(java.lang.String, java.lang.String[])
org.elasticsearch.common.io.PathUtils#get(java.net.URI) org.elasticsearch.common.io.PathUtils#get(java.net.URI)
@defaultMessage Don't use deprecated Query#setBoost, wrap the query into a BoostQuery instead
org.apache.lucene.search.Query#setBoost(float)
@defaultMessage Constructing a DateTime without a time zone is dangerous @defaultMessage Constructing a DateTime without a time zone is dangerous
org.joda.time.DateTime#<init>() org.joda.time.DateTime#<init>()
org.joda.time.DateTime#<init>(long) org.joda.time.DateTime#<init>(long)

View File

@ -33,23 +33,6 @@ java.util.Formatter#<init>(java.lang.String,java.lang.String,java.util.Locale)
java.io.RandomAccessFile java.io.RandomAccessFile
java.nio.file.Path#toFile() java.nio.file.Path#toFile()
@defaultMessage Don't use deprecated lucene apis
org.apache.lucene.index.DocsEnum
org.apache.lucene.index.DocsAndPositionsEnum
org.apache.lucene.queries.TermFilter
org.apache.lucene.queries.TermsFilter
org.apache.lucene.search.Filter
org.apache.lucene.search.FilteredQuery
org.apache.lucene.search.TermRangeFilter
org.apache.lucene.search.NumericRangeFilter
org.apache.lucene.search.PrefixFilter
org.apache.lucene.search.QueryWrapperFilter
org.apache.lucene.search.join.BitDocIdSetCachingWrapperFilter
org.apache.lucene.index.IndexWriter#isLocked(org.apache.lucene.store.Directory)
java.nio.file.Paths @ Use org.elasticsearch.common.io.PathUtils.get() instead.
java.nio.file.FileSystems#getDefault() @ use org.elasticsearch.common.io.PathUtils.getDefaultFileSystem() instead.
@defaultMessage Specify a location for the temp file/directory instead. @defaultMessage Specify a location for the temp file/directory instead.
java.nio.file.Files#createTempDirectory(java.lang.String,java.nio.file.attribute.FileAttribute[]) java.nio.file.Files#createTempDirectory(java.lang.String,java.nio.file.attribute.FileAttribute[])
java.nio.file.Files#createTempFile(java.lang.String,java.lang.String,java.nio.file.attribute.FileAttribute[]) java.nio.file.Files#createTempFile(java.lang.String,java.lang.String,java.nio.file.attribute.FileAttribute[])
@ -62,9 +45,6 @@ java.io.ObjectInput
java.nio.file.Files#isHidden(java.nio.file.Path) @ Dependent on the operating system, use FileSystemUtils.isHidden instead java.nio.file.Files#isHidden(java.nio.file.Path) @ Dependent on the operating system, use FileSystemUtils.isHidden instead
java.nio.file.Files#getFileStore(java.nio.file.Path) @ Use org.elasticsearch.env.Environment.getFileStore() instead, impacted by JDK-8034057
java.nio.file.Files#isWritable(java.nio.file.Path) @ Use org.elasticsearch.env.Environment.isWritable() instead, impacted by JDK-8034057
@defaultMessage Resolve hosts explicitly to the address(es) you want with InetAddress. @defaultMessage Resolve hosts explicitly to the address(es) you want with InetAddress.
java.net.InetSocketAddress#<init>(java.lang.String,int) java.net.InetSocketAddress#<init>(java.lang.String,int)
java.net.Socket#<init>(java.lang.String,int) java.net.Socket#<init>(java.lang.String,int)
@ -103,9 +83,6 @@ java.lang.Class#getDeclaredMethods() @ Do not violate java's access system: Use
java.lang.reflect.AccessibleObject#setAccessible(boolean) java.lang.reflect.AccessibleObject#setAccessible(boolean)
java.lang.reflect.AccessibleObject#setAccessible(java.lang.reflect.AccessibleObject[], boolean) java.lang.reflect.AccessibleObject#setAccessible(java.lang.reflect.AccessibleObject[], boolean)
@defaultMessage this should not have been added to lucene in the first place
org.apache.lucene.index.IndexReader#getCombinedCoreAndDeletesKey()
@defaultMessage this method needs special permission @defaultMessage this method needs special permission
java.lang.Thread#getAllStackTraces() java.lang.Thread#getAllStackTraces()
@ -126,8 +103,3 @@ java.util.Collections#EMPTY_MAP
java.util.Collections#EMPTY_SET java.util.Collections#EMPTY_SET
java.util.Collections#shuffle(java.util.List) @ Use java.util.Collections#shuffle(java.util.List, java.util.Random) with a reproducible source of randomness java.util.Collections#shuffle(java.util.List) @ Use java.util.Collections#shuffle(java.util.List, java.util.Random) with a reproducible source of randomness
@defaultMessage Use org.elasticsearch.common.Randomness#get for reproducible sources of randomness
java.util.Random#<init>()
java.util.concurrent.ThreadLocalRandom
java.security.MessageDigest#clone() @ use org.elasticsearch.common.hash.MessageDigests

View File

@ -1,8 +1,8 @@
elasticsearch = 5.0.0 elasticsearch = 5.0.0
lucene = 5.5.0 lucene = 6.0.0-snapshot-f0aa4fc
# optional dependencies # optional dependencies
spatial4j = 0.5 spatial4j = 0.6
jts = 1.13 jts = 1.13
jackson = 2.7.1 jackson = 2.7.1
log4j = 1.2.17 log4j = 1.2.17

View File

@ -42,13 +42,14 @@ dependencies {
compile "org.apache.lucene:lucene-queryparser:${versions.lucene}" compile "org.apache.lucene:lucene-queryparser:${versions.lucene}"
compile "org.apache.lucene:lucene-sandbox:${versions.lucene}" compile "org.apache.lucene:lucene-sandbox:${versions.lucene}"
compile "org.apache.lucene:lucene-spatial:${versions.lucene}" compile "org.apache.lucene:lucene-spatial:${versions.lucene}"
compile "org.apache.lucene:lucene-spatial-extras:${versions.lucene}"
compile "org.apache.lucene:lucene-spatial3d:${versions.lucene}" compile "org.apache.lucene:lucene-spatial3d:${versions.lucene}"
compile "org.apache.lucene:lucene-suggest:${versions.lucene}" compile "org.apache.lucene:lucene-suggest:${versions.lucene}"
compile 'org.elasticsearch:securesm:1.0' compile 'org.elasticsearch:securesm:1.0'
// utilities // utilities
compile 'commons-cli:commons-cli:1.3.1' compile 'net.sf.jopt-simple:jopt-simple:4.9'
compile 'com.carrotsearch:hppc:0.7.1' compile 'com.carrotsearch:hppc:0.7.1'
// time handling, remove with java 8 time // time handling, remove with java 8 time
@ -71,7 +72,7 @@ dependencies {
compile 'org.hdrhistogram:HdrHistogram:2.1.6' compile 'org.hdrhistogram:HdrHistogram:2.1.6'
// lucene spatial // lucene spatial
compile "com.spatial4j:spatial4j:${versions.spatial4j}", optional compile "org.locationtech.spatial4j:spatial4j:${versions.spatial4j}", optional
compile "com.vividsolutions:jts:${versions.jts}", optional compile "com.vividsolutions:jts:${versions.jts}", optional
// logging // logging
@ -168,11 +169,6 @@ thirdPartyAudit.excludes = [
'org.apache.commons.logging.Log', 'org.apache.commons.logging.Log',
'org.apache.commons.logging.LogFactory', 'org.apache.commons.logging.LogFactory',
// from org.apache.lucene.sandbox.queries.regex.JakartaRegexpCapabilities$JakartaRegexMatcher (lucene-sandbox)
'org.apache.regexp.CharacterIterator',
'org.apache.regexp.RE',
'org.apache.regexp.REProgram',
// from org.jboss.netty.handler.ssl.OpenSslEngine (netty) // from org.jboss.netty.handler.ssl.OpenSslEngine (netty)
'org.apache.tomcat.jni.Buffer', 'org.apache.tomcat.jni.Buffer',
'org.apache.tomcat.jni.Library', 'org.apache.tomcat.jni.Library',
@ -210,7 +206,7 @@ thirdPartyAudit.excludes = [
'org.jboss.marshalling.MarshallingConfiguration', 'org.jboss.marshalling.MarshallingConfiguration',
'org.jboss.marshalling.Unmarshaller', 'org.jboss.marshalling.Unmarshaller',
// from com.spatial4j.core.io.GeoJSONReader (spatial4j) // from org.locationtech.spatial4j.io.GeoJSONReader (spatial4j)
'org.noggit.JSONParser', 'org.noggit.JSONParser',
// from org.jboss.netty.container.osgi.NettyBundleActivator (netty) // from org.jboss.netty.container.osgi.NettyBundleActivator (netty)

View File

@ -33,7 +33,6 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.InPlaceMergeSorter; import org.apache.lucene.util.InPlaceMergeSorter;
import org.apache.lucene.util.ToStringUtils;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -247,14 +246,15 @@ public abstract class BlendedTermQuery extends Query {
if (boosts != null) { if (boosts != null) {
boost = boosts[i]; boost = boosts[i];
} }
builder.append(ToStringUtils.boost(boost)); if (boost != 1f) {
builder.append('^').append(boost);
}
builder.append(", "); builder.append(", ");
} }
if (terms.length > 0) { if (terms.length > 0) {
builder.setLength(builder.length() - 2); builder.setLength(builder.length() - 2);
} }
builder.append("])"); builder.append("])");
builder.append(ToStringUtils.boost(getBoost()));
return builder.toString(); return builder.toString();
} }

View File

@ -24,6 +24,7 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.FuzzyQuery;
@ -165,7 +166,7 @@ public class MapperQueryParser extends QueryParser {
} }
if (clauses.size() == 0) // happens for stopwords if (clauses.size() == 0) // happens for stopwords
return null; return null;
return getBooleanQuery(clauses, true); return getBooleanQueryCoordDisabled(clauses);
} }
} else { } else {
return getFieldQuerySingle(field, queryText, quoted); return getFieldQuerySingle(field, queryText, quoted);
@ -267,7 +268,7 @@ public class MapperQueryParser extends QueryParser {
} }
if (clauses.size() == 0) // happens for stopwords if (clauses.size() == 0) // happens for stopwords
return null; return null;
return getBooleanQuery(clauses, true); return getBooleanQueryCoordDisabled(clauses);
} }
} else { } else {
return super.getFieldQuery(field, queryText, slop); return super.getFieldQuery(field, queryText, slop);
@ -318,7 +319,7 @@ public class MapperQueryParser extends QueryParser {
} }
if (clauses.size() == 0) // happens for stopwords if (clauses.size() == 0) // happens for stopwords
return null; return null;
return getBooleanQuery(clauses, true); return getBooleanQueryCoordDisabled(clauses);
} }
} }
@ -380,7 +381,7 @@ public class MapperQueryParser extends QueryParser {
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD)); clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
} }
} }
return getBooleanQuery(clauses, true); return getBooleanQueryCoordDisabled(clauses);
} }
} else { } else {
return getFuzzyQuerySingle(field, termStr, minSimilarity); return getFuzzyQuerySingle(field, termStr, minSimilarity);
@ -445,7 +446,7 @@ public class MapperQueryParser extends QueryParser {
} }
if (clauses.size() == 0) // happens for stopwords if (clauses.size() == 0) // happens for stopwords
return null; return null;
return getBooleanQuery(clauses, true); return getBooleanQueryCoordDisabled(clauses);
} }
} else { } else {
return getPrefixQuerySingle(field, termStr); return getPrefixQuerySingle(field, termStr);
@ -520,7 +521,7 @@ public class MapperQueryParser extends QueryParser {
for (String token : tlist) { for (String token : tlist) {
clauses.add(new BooleanClause(super.getPrefixQuery(field, token), BooleanClause.Occur.SHOULD)); clauses.add(new BooleanClause(super.getPrefixQuery(field, token), BooleanClause.Occur.SHOULD));
} }
return getBooleanQuery(clauses, true); return getBooleanQueryCoordDisabled(clauses);
} }
} }
@ -575,7 +576,7 @@ public class MapperQueryParser extends QueryParser {
} }
if (clauses.size() == 0) // happens for stopwords if (clauses.size() == 0) // happens for stopwords
return null; return null;
return getBooleanQuery(clauses, true); return getBooleanQueryCoordDisabled(clauses);
} }
} else { } else {
return getWildcardQuerySingle(field, termStr); return getWildcardQuerySingle(field, termStr);
@ -704,7 +705,7 @@ public class MapperQueryParser extends QueryParser {
} }
if (clauses.size() == 0) // happens for stopwords if (clauses.size() == 0) // happens for stopwords
return null; return null;
return getBooleanQuery(clauses, true); return getBooleanQueryCoordDisabled(clauses);
} }
} else { } else {
return getRegexpQuerySingle(field, termStr); return getRegexpQuerySingle(field, termStr);
@ -739,10 +740,24 @@ public class MapperQueryParser extends QueryParser {
setAnalyzer(oldAnalyzer); setAnalyzer(oldAnalyzer);
} }
} }
/**
* @deprecated review all use of this, don't rely on coord
*/
@Deprecated
protected Query getBooleanQueryCoordDisabled(List<BooleanClause> clauses) throws ParseException {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.setDisableCoord(true);
for (BooleanClause clause : clauses) {
builder.add(clause);
}
return fixNegativeQueryIfNeeded(builder.build());
}
@Override @Override
protected Query getBooleanQuery(List<BooleanClause> clauses, boolean disableCoord) throws ParseException { protected Query getBooleanQuery(List<BooleanClause> clauses) throws ParseException {
Query q = super.getBooleanQuery(clauses, disableCoord); Query q = super.getBooleanQuery(clauses);
if (q == null) { if (q == null) {
return null; return null;
} }
@ -769,12 +784,12 @@ public class MapperQueryParser extends QueryParser {
} }
pq = builder.build(); pq = builder.build();
//make sure that the boost hasn't been set beforehand, otherwise we'd lose it //make sure that the boost hasn't been set beforehand, otherwise we'd lose it
assert q.getBoost() == 1f;
assert q instanceof BoostQuery == false; assert q instanceof BoostQuery == false;
return pq; return pq;
} else if (q instanceof MultiPhraseQuery) { } else if (q instanceof MultiPhraseQuery) {
((MultiPhraseQuery) q).setSlop(slop); MultiPhraseQuery.Builder builder = new MultiPhraseQuery.Builder((MultiPhraseQuery) q);
return q; builder.setSlop(slop);
return builder.build();
} else { } else {
return q; return q;
} }

View File

@ -26,8 +26,7 @@ import java.io.IOException;
/** /**
* Abstract decorator class of a DocIdSetIterator * Abstract decorator class of a DocIdSetIterator
* implementation that provides on-demand filter/validation * implementation that provides on-demand filter/validation
* mechanism on an underlying DocIdSetIterator. See {@link * mechanism on an underlying DocIdSetIterator.
* FilteredDocIdSet}.
*/ */
public abstract class XFilteredDocIdSetIterator extends DocIdSetIterator { public abstract class XFilteredDocIdSetIterator extends DocIdSetIterator {
protected DocIdSetIterator _innerIter; protected DocIdSetIterator _innerIter;

View File

@ -34,7 +34,6 @@ import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.List;
/** /**
* *
@ -68,7 +67,7 @@ public class CustomFieldQuery extends FieldQuery {
flatten(((FiltersFunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries, boost); flatten(((FiltersFunctionScoreQuery) sourceQuery).getSubQuery(), reader, flatQueries, boost);
} else if (sourceQuery instanceof MultiPhraseQuery) { } else if (sourceQuery instanceof MultiPhraseQuery) {
MultiPhraseQuery q = ((MultiPhraseQuery) sourceQuery); MultiPhraseQuery q = ((MultiPhraseQuery) sourceQuery);
convertMultiPhraseQuery(0, new int[q.getTermArrays().size()], q, q.getTermArrays(), q.getPositions(), reader, flatQueries); convertMultiPhraseQuery(0, new int[q.getTermArrays().length], q, q.getTermArrays(), q.getPositions(), reader, flatQueries);
} else if (sourceQuery instanceof BlendedTermQuery) { } else if (sourceQuery instanceof BlendedTermQuery) {
final BlendedTermQuery blendedTermQuery = (BlendedTermQuery) sourceQuery; final BlendedTermQuery blendedTermQuery = (BlendedTermQuery) sourceQuery;
flatten(blendedTermQuery.rewrite(reader), reader, flatQueries, boost); flatten(blendedTermQuery.rewrite(reader), reader, flatQueries, boost);
@ -77,7 +76,7 @@ public class CustomFieldQuery extends FieldQuery {
} }
} }
private void convertMultiPhraseQuery(int currentPos, int[] termsIdx, MultiPhraseQuery orig, List<Term[]> terms, int[] pos, IndexReader reader, Collection<Query> flatQueries) throws IOException { private void convertMultiPhraseQuery(int currentPos, int[] termsIdx, MultiPhraseQuery orig, Term[][] terms, int[] pos, IndexReader reader, Collection<Query> flatQueries) throws IOException {
if (currentPos == 0) { if (currentPos == 0) {
// if we have more than 16 terms // if we have more than 16 terms
int numTerms = 0; int numTerms = 0;
@ -87,7 +86,7 @@ public class CustomFieldQuery extends FieldQuery {
if (numTerms > 16) { if (numTerms > 16) {
for (Term[] currentPosTerm : terms) { for (Term[] currentPosTerm : terms) {
for (Term term : currentPosTerm) { for (Term term : currentPosTerm) {
super.flatten(new TermQuery(term), reader, flatQueries, orig.getBoost()); super.flatten(new TermQuery(term), reader, flatQueries, 1F);
} }
} }
return; return;
@ -97,16 +96,16 @@ public class CustomFieldQuery extends FieldQuery {
* we walk all possible ways and for each path down the MPQ we create a PhraseQuery this is what FieldQuery supports. * we walk all possible ways and for each path down the MPQ we create a PhraseQuery this is what FieldQuery supports.
* It seems expensive but most queries will pretty small. * It seems expensive but most queries will pretty small.
*/ */
if (currentPos == terms.size()) { if (currentPos == terms.length) {
PhraseQuery.Builder queryBuilder = new PhraseQuery.Builder(); PhraseQuery.Builder queryBuilder = new PhraseQuery.Builder();
queryBuilder.setSlop(orig.getSlop()); queryBuilder.setSlop(orig.getSlop());
for (int i = 0; i < termsIdx.length; i++) { for (int i = 0; i < termsIdx.length; i++) {
queryBuilder.add(terms.get(i)[termsIdx[i]], pos[i]); queryBuilder.add(terms[i][termsIdx[i]], pos[i]);
} }
Query query = queryBuilder.build(); Query query = queryBuilder.build();
this.flatten(query, reader, flatQueries, orig.getBoost()); this.flatten(query, reader, flatQueries, 1F);
} else { } else {
Term[] t = terms.get(currentPos); Term[] t = terms[currentPos];
for (int i = 0; i < t.length; i++) { for (int i = 0; i < t.length; i++) {
termsIdx[currentPos] = i; termsIdx[currentPos] = i;
convertMultiPhraseQuery(currentPos+1, termsIdx, orig, terms, pos, reader, flatQueries); convertMultiPhraseQuery(currentPos+1, termsIdx, orig, terms, pos, reader, flatQueries);

View File

@ -645,8 +645,6 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
// 87 used to be for MergeMappingException // 87 used to be for MergeMappingException
INVALID_INDEX_TEMPLATE_EXCEPTION(org.elasticsearch.indices.InvalidIndexTemplateException.class, INVALID_INDEX_TEMPLATE_EXCEPTION(org.elasticsearch.indices.InvalidIndexTemplateException.class,
org.elasticsearch.indices.InvalidIndexTemplateException::new, 88), org.elasticsearch.indices.InvalidIndexTemplateException::new, 88),
PERCOLATE_EXCEPTION(org.elasticsearch.percolator.PercolateException.class,
org.elasticsearch.percolator.PercolateException::new, 89),
REFRESH_FAILED_ENGINE_EXCEPTION(org.elasticsearch.index.engine.RefreshFailedEngineException.class, REFRESH_FAILED_ENGINE_EXCEPTION(org.elasticsearch.index.engine.RefreshFailedEngineException.class,
org.elasticsearch.index.engine.RefreshFailedEngineException::new, 90), org.elasticsearch.index.engine.RefreshFailedEngineException::new, 90),
AGGREGATION_INITIALIZATION_EXCEPTION(org.elasticsearch.search.aggregations.AggregationInitializationException.class, AGGREGATION_INITIALIZATION_EXCEPTION(org.elasticsearch.search.aggregations.AggregationInitializationException.class,

View File

@ -35,212 +35,10 @@ import java.io.IOException;
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
public class Version { public class Version {
// The logic for ID is: XXYYZZAA, where XX is major version, YY is minor version, ZZ is revision, and AA is Beta/RC indicator // The logic for ID is: XXYYZZAA, where XX is major version, YY is minor version, ZZ is revision, and AA is alpha/beta/rc indicator
// AA values below 50 are beta builds, and below 99 are RC builds, with 99 indicating a release // AA values below 25 are for alpha builder (since 5.0), and above 25 and below 50 are beta builds, and below 99 are RC builds, with 99 indicating a release
// the (internal) format of the id is there so we can easily do after/before checks on the id // the (internal) format of the id is there so we can easily do after/before checks on the id
// NOTE: indexes created with 3.6 use this constant for e.g. analysis chain emulation (imperfect)
public static final org.apache.lucene.util.Version LUCENE_3_EMULATION_VERSION = org.apache.lucene.util.Version.LUCENE_4_0_0;
public static final int V_0_18_0_ID = /*00*/180099;
public static final Version V_0_18_0 = new Version(V_0_18_0_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_1_ID = /*00*/180199;
public static final Version V_0_18_1 = new Version(V_0_18_1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_2_ID = /*00*/180299;
public static final Version V_0_18_2 = new Version(V_0_18_2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_3_ID = /*00*/180399;
public static final Version V_0_18_3 = new Version(V_0_18_3_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_4_ID = /*00*/180499;
public static final Version V_0_18_4 = new Version(V_0_18_4_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_5_ID = /*00*/180599;
public static final Version V_0_18_5 = new Version(V_0_18_5_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_6_ID = /*00*/180699;
public static final Version V_0_18_6 = new Version(V_0_18_6_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_7_ID = /*00*/180799;
public static final Version V_0_18_7 = new Version(V_0_18_7_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_18_8_ID = /*00*/180899;
public static final Version V_0_18_8 = new Version(V_0_18_8_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_0_RC1_ID = /*00*/190051;
public static final Version V_0_19_0_RC1 = new Version(V_0_19_0_RC1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_0_RC2_ID = /*00*/190052;
public static final Version V_0_19_0_RC2 = new Version(V_0_19_0_RC2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_0_RC3_ID = /*00*/190053;
public static final Version V_0_19_0_RC3 = new Version(V_0_19_0_RC3_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_0_ID = /*00*/190099;
public static final Version V_0_19_0 = new Version(V_0_19_0_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_1_ID = /*00*/190199;
public static final Version V_0_19_1 = new Version(V_0_19_1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_2_ID = /*00*/190299;
public static final Version V_0_19_2 = new Version(V_0_19_2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_3_ID = /*00*/190399;
public static final Version V_0_19_3 = new Version(V_0_19_3_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_4_ID = /*00*/190499;
public static final Version V_0_19_4 = new Version(V_0_19_4_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_5_ID = /*00*/190599;
public static final Version V_0_19_5 = new Version(V_0_19_5_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_6_ID = /*00*/190699;
public static final Version V_0_19_6 = new Version(V_0_19_6_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_7_ID = /*00*/190799;
public static final Version V_0_19_7 = new Version(V_0_19_7_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_8_ID = /*00*/190899;
public static final Version V_0_19_8 = new Version(V_0_19_8_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_9_ID = /*00*/190999;
public static final Version V_0_19_9 = new Version(V_0_19_9_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_10_ID = /*00*/191099;
public static final Version V_0_19_10 = new Version(V_0_19_10_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_11_ID = /*00*/191199;
public static final Version V_0_19_11 = new Version(V_0_19_11_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_12_ID = /*00*/191299;
public static final Version V_0_19_12 = new Version(V_0_19_12_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_19_13_ID = /*00*/191399;
public static final Version V_0_19_13 = new Version(V_0_19_13_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_0_RC1_ID = /*00*/200051;
public static final Version V_0_20_0_RC1 = new Version(V_0_20_0_RC1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_0_ID = /*00*/200099;
public static final Version V_0_20_0 = new Version(V_0_20_0_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_1_ID = /*00*/200199;
public static final Version V_0_20_1 = new Version(V_0_20_1_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_2_ID = /*00*/200299;
public static final Version V_0_20_2 = new Version(V_0_20_2_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_3_ID = /*00*/200399;
public static final Version V_0_20_3 = new Version(V_0_20_3_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_4_ID = /*00*/200499;
public static final Version V_0_20_4 = new Version(V_0_20_4_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_5_ID = /*00*/200599;
public static final Version V_0_20_5 = new Version(V_0_20_5_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_20_6_ID = /*00*/200699;
public static final Version V_0_20_6 = new Version(V_0_20_6_ID, LUCENE_3_EMULATION_VERSION);
public static final int V_0_90_0_Beta1_ID = /*00*/900001;
public static final Version V_0_90_0_Beta1 = new Version(V_0_90_0_Beta1_ID, org.apache.lucene.util.Version.LUCENE_4_1);
public static final int V_0_90_0_RC1_ID = /*00*/900051;
public static final Version V_0_90_0_RC1 = new Version(V_0_90_0_RC1_ID, org.apache.lucene.util.Version.LUCENE_4_1);
public static final int V_0_90_0_RC2_ID = /*00*/900052;
public static final Version V_0_90_0_RC2 = new Version(V_0_90_0_RC2_ID, org.apache.lucene.util.Version.LUCENE_4_2);
public static final int V_0_90_0_ID = /*00*/900099;
public static final Version V_0_90_0 = new Version(V_0_90_0_ID, org.apache.lucene.util.Version.LUCENE_4_2);
public static final int V_0_90_1_ID = /*00*/900199;
public static final Version V_0_90_1 = new Version(V_0_90_1_ID, org.apache.lucene.util.Version.LUCENE_4_3);
public static final int V_0_90_2_ID = /*00*/900299;
public static final Version V_0_90_2 = new Version(V_0_90_2_ID, org.apache.lucene.util.Version.LUCENE_4_3);
public static final int V_0_90_3_ID = /*00*/900399;
public static final Version V_0_90_3 = new Version(V_0_90_3_ID, org.apache.lucene.util.Version.LUCENE_4_4);
public static final int V_0_90_4_ID = /*00*/900499;
public static final Version V_0_90_4 = new Version(V_0_90_4_ID, org.apache.lucene.util.Version.LUCENE_4_4);
public static final int V_0_90_5_ID = /*00*/900599;
public static final Version V_0_90_5 = new Version(V_0_90_5_ID, org.apache.lucene.util.Version.LUCENE_4_4);
public static final int V_0_90_6_ID = /*00*/900699;
public static final Version V_0_90_6 = new Version(V_0_90_6_ID, org.apache.lucene.util.Version.LUCENE_4_5);
public static final int V_0_90_7_ID = /*00*/900799;
public static final Version V_0_90_7 = new Version(V_0_90_7_ID, org.apache.lucene.util.Version.LUCENE_4_5);
public static final int V_0_90_8_ID = /*00*/900899;
public static final Version V_0_90_8 = new Version(V_0_90_8_ID, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_0_90_9_ID = /*00*/900999;
public static final Version V_0_90_9 = new Version(V_0_90_9_ID, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_0_90_10_ID = /*00*/901099;
public static final Version V_0_90_10 = new Version(V_0_90_10_ID, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_0_90_11_ID = /*00*/901199;
public static final Version V_0_90_11 = new Version(V_0_90_11_ID, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_0_90_12_ID = /*00*/901299;
public static final Version V_0_90_12 = new Version(V_0_90_12_ID, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_0_90_13_ID = /*00*/901399;
public static final Version V_0_90_13 = new Version(V_0_90_13_ID, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_1_0_0_Beta1_ID = 1000001;
public static final Version V_1_0_0_Beta1 = new Version(V_1_0_0_Beta1_ID, org.apache.lucene.util.Version.LUCENE_4_5);
public static final int V_1_0_0_Beta2_ID = 1000002;
public static final Version V_1_0_0_Beta2 = new Version(V_1_0_0_Beta2_ID, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_1_0_0_RC1_ID = 1000051;
public static final Version V_1_0_0_RC1 = new Version(V_1_0_0_RC1_ID, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_1_0_0_RC2_ID = 1000052;
public static final Version V_1_0_0_RC2 = new Version(V_1_0_0_RC2_ID, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_1_0_0_ID = 1000099;
public static final Version V_1_0_0 = new Version(V_1_0_0_ID, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_1_0_1_ID = 1000199;
public static final Version V_1_0_1 = new Version(V_1_0_1_ID, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_1_0_2_ID = 1000299;
public static final Version V_1_0_2 = new Version(V_1_0_2_ID, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_1_0_3_ID = 1000399;
public static final Version V_1_0_3 = new Version(V_1_0_3_ID, org.apache.lucene.util.Version.LUCENE_4_6);
public static final int V_1_1_0_ID = 1010099;
public static final Version V_1_1_0 = new Version(V_1_1_0_ID, org.apache.lucene.util.Version.LUCENE_4_7);
public static final int V_1_1_1_ID = 1010199;
public static final Version V_1_1_1 = new Version(V_1_1_1_ID, org.apache.lucene.util.Version.LUCENE_4_7);
public static final int V_1_1_2_ID = 1010299;
public static final Version V_1_1_2 = new Version(V_1_1_2_ID, org.apache.lucene.util.Version.LUCENE_4_7);
public static final int V_1_2_0_ID = 1020099;
public static final Version V_1_2_0 = new Version(V_1_2_0_ID, org.apache.lucene.util.Version.LUCENE_4_8);
public static final int V_1_2_1_ID = 1020199;
public static final Version V_1_2_1 = new Version(V_1_2_1_ID, org.apache.lucene.util.Version.LUCENE_4_8);
public static final int V_1_2_2_ID = 1020299;
public static final Version V_1_2_2 = new Version(V_1_2_2_ID, org.apache.lucene.util.Version.LUCENE_4_8);
public static final int V_1_2_3_ID = 1020399;
public static final Version V_1_2_3 = new Version(V_1_2_3_ID, org.apache.lucene.util.Version.LUCENE_4_8);
public static final int V_1_2_4_ID = 1020499;
public static final Version V_1_2_4 = new Version(V_1_2_4_ID, org.apache.lucene.util.Version.LUCENE_4_8);
public static final int V_1_3_0_ID = 1030099;
public static final Version V_1_3_0 = new Version(V_1_3_0_ID, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_3_1_ID = 1030199;
public static final Version V_1_3_1 = new Version(V_1_3_1_ID, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_3_2_ID = 1030299;
public static final Version V_1_3_2 = new Version(V_1_3_2_ID, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_3_3_ID = 1030399;
public static final Version V_1_3_3 = new Version(V_1_3_3_ID, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_3_4_ID = 1030499;
public static final Version V_1_3_4 = new Version(V_1_3_4_ID, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_3_5_ID = 1030599;
public static final Version V_1_3_5 = new Version(V_1_3_5_ID, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_3_6_ID = 1030699;
public static final Version V_1_3_6 = new Version(V_1_3_6_ID, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_3_7_ID = 1030799;
public static final Version V_1_3_7 = new Version(V_1_3_7_ID, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_3_8_ID = 1030899;
public static final Version V_1_3_8 = new Version(V_1_3_8_ID, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_3_9_ID = 1030999;
public static final Version V_1_3_9 = new Version(V_1_3_9_ID, org.apache.lucene.util.Version.LUCENE_4_9);
public static final int V_1_4_0_Beta1_ID = 1040001;
public static final Version V_1_4_0_Beta1 = new Version(V_1_4_0_Beta1_ID, org.apache.lucene.util.Version.LUCENE_4_10_1);
public static final int V_1_4_0_ID = 1040099;
public static final Version V_1_4_0 = new Version(V_1_4_0_ID, org.apache.lucene.util.Version.LUCENE_4_10_2);
public static final int V_1_4_1_ID = 1040199;
public static final Version V_1_4_1 = new Version(V_1_4_1_ID, org.apache.lucene.util.Version.LUCENE_4_10_2);
public static final int V_1_4_2_ID = 1040299;
public static final Version V_1_4_2 = new Version(V_1_4_2_ID, org.apache.lucene.util.Version.LUCENE_4_10_2);
public static final int V_1_4_3_ID = 1040399;
public static final Version V_1_4_3 = new Version(V_1_4_3_ID, org.apache.lucene.util.Version.LUCENE_4_10_3);
public static final int V_1_4_4_ID = 1040499;
public static final Version V_1_4_4 = new Version(V_1_4_4_ID, org.apache.lucene.util.Version.LUCENE_4_10_3);
public static final int V_1_4_5_ID = 1040599;
public static final Version V_1_4_5 = new Version(V_1_4_5_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_1_5_0_ID = 1050099;
public static final Version V_1_5_0 = new Version(V_1_5_0_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_1_5_1_ID = 1050199;
public static final Version V_1_5_1 = new Version(V_1_5_1_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_1_5_2_ID = 1050299;
public static final Version V_1_5_2 = new Version(V_1_5_2_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_1_6_0_ID = 1060099;
public static final Version V_1_6_0 = new Version(V_1_6_0_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_1_6_1_ID = 1060199;
public static final Version V_1_6_1 = new Version(V_1_6_1_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_1_6_2_ID = 1060299;
public static final Version V_1_6_2 = new Version(V_1_6_2_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_1_7_0_ID = 1070099;
public static final Version V_1_7_0 = new Version(V_1_7_0_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_1_7_1_ID = 1070199;
public static final Version V_1_7_1 = new Version(V_1_7_1_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_1_7_2_ID = 1070299;
public static final Version V_1_7_2 = new Version(V_1_7_2_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_1_7_3_ID = 1070399;
public static final Version V_1_7_3 = new Version(V_1_7_3_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_1_7_4_ID = 1070499;
public static final Version V_1_7_4 = new Version(V_1_7_4_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_1_7_5_ID = 1070599;
public static final Version V_1_7_5 = new Version(V_1_7_5_ID, org.apache.lucene.util.Version.LUCENE_4_10_4);
public static final int V_2_0_0_beta1_ID = 2000001; public static final int V_2_0_0_beta1_ID = 2000001;
public static final Version V_2_0_0_beta1 = new Version(V_2_0_0_beta1_ID, org.apache.lucene.util.Version.LUCENE_5_2_1); public static final Version V_2_0_0_beta1 = new Version(V_2_0_0_beta1_ID, org.apache.lucene.util.Version.LUCENE_5_2_1);
@ -262,10 +60,12 @@ public class Version {
public static final Version V_2_1_2 = new Version(V_2_1_2_ID, org.apache.lucene.util.Version.LUCENE_5_3_1); public static final Version V_2_1_2 = new Version(V_2_1_2_ID, org.apache.lucene.util.Version.LUCENE_5_3_1);
public static final int V_2_2_0_ID = 2020099; public static final int V_2_2_0_ID = 2020099;
public static final Version V_2_2_0 = new Version(V_2_2_0_ID, org.apache.lucene.util.Version.LUCENE_5_4_1); public static final Version V_2_2_0 = new Version(V_2_2_0_ID, org.apache.lucene.util.Version.LUCENE_5_4_1);
public static final int V_2_2_1_ID = 2020199;
public static final Version V_2_2_1 = new Version(V_2_2_1_ID, org.apache.lucene.util.Version.LUCENE_5_4_1);
public static final int V_2_3_0_ID = 2030099; public static final int V_2_3_0_ID = 2030099;
public static final Version V_2_3_0 = new Version(V_2_3_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0); public static final Version V_2_3_0 = new Version(V_2_3_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
public static final int V_5_0_0_ID = 5000099; public static final int V_5_0_0_ID = 5000099;
public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0); public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_0_0);
public static final Version CURRENT = V_5_0_0; public static final Version CURRENT = V_5_0_0;
static { static {
@ -283,6 +83,8 @@ public class Version {
return V_5_0_0; return V_5_0_0;
case V_2_3_0_ID: case V_2_3_0_ID:
return V_2_3_0; return V_2_3_0;
case V_2_2_1_ID:
return V_2_2_1;
case V_2_2_0_ID: case V_2_2_0_ID:
return V_2_2_0; return V_2_2_0;
case V_2_1_2_ID: case V_2_1_2_ID:
@ -303,198 +105,6 @@ public class Version {
return V_2_0_0_beta2; return V_2_0_0_beta2;
case V_2_0_0_beta1_ID: case V_2_0_0_beta1_ID:
return V_2_0_0_beta1; return V_2_0_0_beta1;
case V_1_7_5_ID:
return V_1_7_5;
case V_1_7_4_ID:
return V_1_7_4;
case V_1_7_3_ID:
return V_1_7_3;
case V_1_7_2_ID:
return V_1_7_2;
case V_1_7_1_ID:
return V_1_7_1;
case V_1_7_0_ID:
return V_1_7_0;
case V_1_6_2_ID:
return V_1_6_2;
case V_1_6_1_ID:
return V_1_6_1;
case V_1_6_0_ID:
return V_1_6_0;
case V_1_5_2_ID:
return V_1_5_2;
case V_1_5_1_ID:
return V_1_5_1;
case V_1_5_0_ID:
return V_1_5_0;
case V_1_4_5_ID:
return V_1_4_5;
case V_1_4_4_ID:
return V_1_4_4;
case V_1_4_3_ID:
return V_1_4_3;
case V_1_4_2_ID:
return V_1_4_2;
case V_1_4_1_ID:
return V_1_4_1;
case V_1_4_0_ID:
return V_1_4_0;
case V_1_4_0_Beta1_ID:
return V_1_4_0_Beta1;
case V_1_3_9_ID:
return V_1_3_9;
case V_1_3_8_ID:
return V_1_3_8;
case V_1_3_7_ID:
return V_1_3_7;
case V_1_3_6_ID:
return V_1_3_6;
case V_1_3_5_ID:
return V_1_3_5;
case V_1_3_4_ID:
return V_1_3_4;
case V_1_3_3_ID:
return V_1_3_3;
case V_1_3_2_ID:
return V_1_3_2;
case V_1_3_1_ID:
return V_1_3_1;
case V_1_3_0_ID:
return V_1_3_0;
case V_1_2_4_ID:
return V_1_2_4;
case V_1_2_3_ID:
return V_1_2_3;
case V_1_2_2_ID:
return V_1_2_2;
case V_1_2_1_ID:
return V_1_2_1;
case V_1_2_0_ID:
return V_1_2_0;
case V_1_1_2_ID:
return V_1_1_2;
case V_1_1_1_ID:
return V_1_1_1;
case V_1_1_0_ID:
return V_1_1_0;
case V_1_0_3_ID:
return V_1_0_3;
case V_1_0_2_ID:
return V_1_0_2;
case V_1_0_1_ID:
return V_1_0_1;
case V_1_0_0_ID:
return V_1_0_0;
case V_1_0_0_RC2_ID:
return V_1_0_0_RC2;
case V_1_0_0_RC1_ID:
return V_1_0_0_RC1;
case V_1_0_0_Beta2_ID:
return V_1_0_0_Beta2;
case V_1_0_0_Beta1_ID:
return V_1_0_0_Beta1;
case V_0_90_13_ID:
return V_0_90_13;
case V_0_90_12_ID:
return V_0_90_12;
case V_0_90_11_ID:
return V_0_90_11;
case V_0_90_10_ID:
return V_0_90_10;
case V_0_90_9_ID:
return V_0_90_9;
case V_0_90_8_ID:
return V_0_90_8;
case V_0_90_7_ID:
return V_0_90_7;
case V_0_90_6_ID:
return V_0_90_6;
case V_0_90_5_ID:
return V_0_90_5;
case V_0_90_4_ID:
return V_0_90_4;
case V_0_90_3_ID:
return V_0_90_3;
case V_0_90_2_ID:
return V_0_90_2;
case V_0_90_1_ID:
return V_0_90_1;
case V_0_90_0_ID:
return V_0_90_0;
case V_0_90_0_RC2_ID:
return V_0_90_0_RC2;
case V_0_90_0_RC1_ID:
return V_0_90_0_RC1;
case V_0_90_0_Beta1_ID:
return V_0_90_0_Beta1;
case V_0_20_6_ID:
return V_0_20_6;
case V_0_20_5_ID:
return V_0_20_5;
case V_0_20_4_ID:
return V_0_20_4;
case V_0_20_3_ID:
return V_0_20_3;
case V_0_20_2_ID:
return V_0_20_2;
case V_0_20_1_ID:
return V_0_20_1;
case V_0_20_0_ID:
return V_0_20_0;
case V_0_20_0_RC1_ID:
return V_0_20_0_RC1;
case V_0_19_0_RC1_ID:
return V_0_19_0_RC1;
case V_0_19_0_RC2_ID:
return V_0_19_0_RC2;
case V_0_19_0_RC3_ID:
return V_0_19_0_RC3;
case V_0_19_0_ID:
return V_0_19_0;
case V_0_19_1_ID:
return V_0_19_1;
case V_0_19_2_ID:
return V_0_19_2;
case V_0_19_3_ID:
return V_0_19_3;
case V_0_19_4_ID:
return V_0_19_4;
case V_0_19_5_ID:
return V_0_19_5;
case V_0_19_6_ID:
return V_0_19_6;
case V_0_19_7_ID:
return V_0_19_7;
case V_0_19_8_ID:
return V_0_19_8;
case V_0_19_9_ID:
return V_0_19_9;
case V_0_19_10_ID:
return V_0_19_10;
case V_0_19_11_ID:
return V_0_19_11;
case V_0_19_12_ID:
return V_0_19_12;
case V_0_19_13_ID:
return V_0_19_13;
case V_0_18_0_ID:
return V_0_18_0;
case V_0_18_1_ID:
return V_0_18_1;
case V_0_18_2_ID:
return V_0_18_2;
case V_0_18_3_ID:
return V_0_18_3;
case V_0_18_4_ID:
return V_0_18_4;
case V_0_18_5_ID:
return V_0_18_5;
case V_0_18_6_ID:
return V_0_18_6;
case V_0_18_7_ID:
return V_0_18_7;
case V_0_18_8_ID:
return V_0_18_8;
default: default:
return new Version(id, org.apache.lucene.util.Version.LATEST); return new Version(id, org.apache.lucene.util.Version.LATEST);
} }
@ -531,15 +141,23 @@ public class Version {
if (!Strings.hasLength(version)) { if (!Strings.hasLength(version)) {
return Version.CURRENT; return Version.CURRENT;
} }
final boolean snapshot; // this is some BWC for 2.x and before indices
if (snapshot = version.endsWith("-SNAPSHOT")) {
version = version.substring(0, version.length() - 9);
}
String[] parts = version.split("\\.|\\-"); String[] parts = version.split("\\.|\\-");
if (parts.length < 3 || parts.length > 4) { if (parts.length < 3 || parts.length > 4) {
throw new IllegalArgumentException("the version needs to contain major, minor, and revision, and optionally the build: " + version); throw new IllegalArgumentException("the version needs to contain major, minor, and revision, and optionally the build: " + version);
} }
try { try {
final int rawMajor = Integer.parseInt(parts[0]);
if (rawMajor >= 5 && snapshot) { // we don't support snapshot as part of the version here anymore
throw new IllegalArgumentException("illegal version format - snapshots are only supported until version 2.x");
}
final int betaOffset = rawMajor < 5 ? 0 : 25;
//we reverse the version id calculation based on some assumption as we can't reliably reverse the modulo //we reverse the version id calculation based on some assumption as we can't reliably reverse the modulo
final int major = Integer.parseInt(parts[0]) * 1000000; final int major = rawMajor * 1000000;
final int minor = Integer.parseInt(parts[1]) * 10000; final int minor = Integer.parseInt(parts[1]) * 10000;
final int revision = Integer.parseInt(parts[2]) * 100; final int revision = Integer.parseInt(parts[2]) * 100;
@ -547,11 +165,17 @@ public class Version {
int build = 99; int build = 99;
if (parts.length == 4) { if (parts.length == 4) {
String buildStr = parts[3]; String buildStr = parts[3];
if (buildStr.startsWith("Beta") || buildStr.startsWith("beta")) { if (buildStr.startsWith("alpha")) {
build = Integer.parseInt(buildStr.substring(4)); assert rawMajor >= 5 : "major must be >= 5 but was " + major;
} build = Integer.parseInt(buildStr.substring(5));
if (buildStr.startsWith("RC") || buildStr.startsWith("rc")) { assert build < 25 : "expected a beta build but " + build + " >= 25";
} else if (buildStr.startsWith("Beta") || buildStr.startsWith("beta")) {
build = betaOffset + Integer.parseInt(buildStr.substring(4));
assert build < 50 : "expected a beta build but " + build + " >= 50";
} else if (buildStr.startsWith("RC") || buildStr.startsWith("rc")) {
build = Integer.parseInt(buildStr.substring(2)) + 50; build = Integer.parseInt(buildStr.substring(2)) + 50;
} else {
throw new IllegalArgumentException("unable to parse version " + version);
} }
} }
@ -614,13 +238,16 @@ public class Version {
public String toString() { public String toString() {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
sb.append(major).append('.').append(minor).append('.').append(revision); sb.append(major).append('.').append(minor).append('.').append(revision);
if (isBeta()) { if (isAlpha()) {
sb.append("-alpha");
sb.append(build);
} else if (isBeta()) {
if (major >= 2) { if (major >= 2) {
sb.append("-beta"); sb.append("-beta");
} else { } else {
sb.append(".Beta"); sb.append(".Beta");
} }
sb.append(build); sb.append(major < 5 ? build : build-25);
} else if (build < 99) { } else if (build < 99) {
if (major >= 2) { if (major >= 2) {
sb.append("-rc"); sb.append("-rc");
@ -656,7 +283,16 @@ public class Version {
} }
public boolean isBeta() { public boolean isBeta() {
return build < 50; return major < 5 ? build < 50 : build >= 25 && build < 50;
}
/**
* Returns true iff this version is an alpha version
* Note: This has been introduced in elasticsearch version 5. Previous versions will never
* have an alpha version.
*/
public boolean isAlpha() {
return major < 5 ? false : build < 25;
} }
public boolean isRC() { public boolean isRC() {

View File

@ -165,7 +165,6 @@ import org.elasticsearch.action.percolate.MultiPercolateAction;
import org.elasticsearch.action.percolate.PercolateAction; import org.elasticsearch.action.percolate.PercolateAction;
import org.elasticsearch.action.percolate.TransportMultiPercolateAction; import org.elasticsearch.action.percolate.TransportMultiPercolateAction;
import org.elasticsearch.action.percolate.TransportPercolateAction; import org.elasticsearch.action.percolate.TransportPercolateAction;
import org.elasticsearch.action.percolate.TransportShardMultiPercolateAction;
import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.ClearScrollAction;
import org.elasticsearch.action.search.MultiSearchAction; import org.elasticsearch.action.search.MultiSearchAction;
import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchAction;
@ -331,7 +330,7 @@ public class ActionModule extends AbstractModule {
registerAction(SearchScrollAction.INSTANCE, TransportSearchScrollAction.class); registerAction(SearchScrollAction.INSTANCE, TransportSearchScrollAction.class);
registerAction(MultiSearchAction.INSTANCE, TransportMultiSearchAction.class); registerAction(MultiSearchAction.INSTANCE, TransportMultiSearchAction.class);
registerAction(PercolateAction.INSTANCE, TransportPercolateAction.class); registerAction(PercolateAction.INSTANCE, TransportPercolateAction.class);
registerAction(MultiPercolateAction.INSTANCE, TransportMultiPercolateAction.class, TransportShardMultiPercolateAction.class); registerAction(MultiPercolateAction.INSTANCE, TransportMultiPercolateAction.class);
registerAction(ExplainAction.INSTANCE, TransportExplainAction.class); registerAction(ExplainAction.INSTANCE, TransportExplainAction.class);
registerAction(ClearScrollAction.INSTANCE, TransportClearScrollAction.class); registerAction(ClearScrollAction.INSTANCE, TransportClearScrollAction.class);
registerAction(RecoveryAction.INSTANCE, TransportRecoveryAction.class); registerAction(RecoveryAction.INSTANCE, TransportRecoveryAction.class);

View File

@ -197,9 +197,7 @@ public class ClusterHealthResponse extends ActionResponse implements StatusToXCo
numberOfPendingTasks = in.readInt(); numberOfPendingTasks = in.readInt();
timedOut = in.readBoolean(); timedOut = in.readBoolean();
numberOfInFlightFetch = in.readInt(); numberOfInFlightFetch = in.readInt();
if (in.getVersion().onOrAfter(Version.V_1_7_0)) { delayedUnassignedShards= in.readInt();
delayedUnassignedShards= in.readInt();
}
taskMaxWaitingTime = TimeValue.readTimeValue(in); taskMaxWaitingTime = TimeValue.readTimeValue(in);
} }
@ -212,9 +210,7 @@ public class ClusterHealthResponse extends ActionResponse implements StatusToXCo
out.writeInt(numberOfPendingTasks); out.writeInt(numberOfPendingTasks);
out.writeBoolean(timedOut); out.writeBoolean(timedOut);
out.writeInt(numberOfInFlightFetch); out.writeInt(numberOfInFlightFetch);
if (out.getVersion().onOrAfter(Version.V_1_7_0)) { out.writeInt(delayedUnassignedShards);
out.writeInt(delayedUnassignedShards);
}
taskMaxWaitingTime.writeTo(out); taskMaxWaitingTime.writeTo(out);
} }

View File

@ -24,7 +24,6 @@ import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterStateUpdateTask;
@ -32,6 +31,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -213,7 +213,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction<
} }
if (request.indices() != null && request.indices().length > 0) { if (request.indices() != null && request.indices().length > 0) {
try { try {
indexNameExpressionResolver.concreteIndices(clusterState, IndicesOptions.strictExpand(), request.indices()); indexNameExpressionResolver.concreteIndexNames(clusterState, IndicesOptions.strictExpand(), request.indices());
waitForCounter++; waitForCounter++;
} catch (IndexNotFoundException e) { } catch (IndexNotFoundException e) {
response.setStatus(ClusterHealthStatus.RED); // no indices, make sure its RED response.setStatus(ClusterHealthStatus.RED); // no indices, make sure its RED
@ -280,7 +280,7 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction<
String[] concreteIndices; String[] concreteIndices;
try { try {
concreteIndices = indexNameExpressionResolver.concreteIndices(clusterState, request); concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request);
} catch (IndexNotFoundException e) { } catch (IndexNotFoundException e) {
// one of the specified indices is not there - treat it as RED. // one of the specified indices is not there - treat it as RED.
ClusterHealthResponse response = new ClusterHealthResponse(clusterName.value(), Strings.EMPTY_ARRAY, clusterState, ClusterHealthResponse response = new ClusterHealthResponse(clusterName.value(), Strings.EMPTY_ARRAY, clusterState,

View File

@ -24,8 +24,8 @@ import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.BaseNodeRequest;
import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.action.support.nodes.TransportNodesAction;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.http.HttpInfo; import org.elasticsearch.http.HttpInfo;
import org.elasticsearch.ingest.core.IngestInfo;
import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.monitor.jvm.JvmInfo;
import org.elasticsearch.monitor.os.OsInfo; import org.elasticsearch.monitor.os.OsInfo;
import org.elasticsearch.monitor.process.ProcessInfo; import org.elasticsearch.monitor.process.ProcessInfo;
@ -74,12 +75,15 @@ public class NodeInfo extends BaseNodeResponse {
@Nullable @Nullable
private PluginsAndModules plugins; private PluginsAndModules plugins;
NodeInfo() { @Nullable
private IngestInfo ingest;
public NodeInfo() {
} }
public NodeInfo(Version version, Build build, DiscoveryNode node, @Nullable Map<String, String> serviceAttributes, @Nullable Settings settings, public NodeInfo(Version version, Build build, DiscoveryNode node, @Nullable Map<String, String> serviceAttributes, @Nullable Settings settings,
@Nullable OsInfo os, @Nullable ProcessInfo process, @Nullable JvmInfo jvm, @Nullable ThreadPoolInfo threadPool, @Nullable OsInfo os, @Nullable ProcessInfo process, @Nullable JvmInfo jvm, @Nullable ThreadPoolInfo threadPool,
@Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsAndModules plugins) { @Nullable TransportInfo transport, @Nullable HttpInfo http, @Nullable PluginsAndModules plugins, @Nullable IngestInfo ingest) {
super(node); super(node);
this.version = version; this.version = version;
this.build = build; this.build = build;
@ -92,6 +96,7 @@ public class NodeInfo extends BaseNodeResponse {
this.transport = transport; this.transport = transport;
this.http = http; this.http = http;
this.plugins = plugins; this.plugins = plugins;
this.ingest = ingest;
} }
/** /**
@ -176,6 +181,11 @@ public class NodeInfo extends BaseNodeResponse {
return this.plugins; return this.plugins;
} }
@Nullable
public IngestInfo getIngest() {
return ingest;
}
public static NodeInfo readNodeInfo(StreamInput in) throws IOException { public static NodeInfo readNodeInfo(StreamInput in) throws IOException {
NodeInfo nodeInfo = new NodeInfo(); NodeInfo nodeInfo = new NodeInfo();
nodeInfo.readFrom(in); nodeInfo.readFrom(in);
@ -220,6 +230,9 @@ public class NodeInfo extends BaseNodeResponse {
plugins = new PluginsAndModules(); plugins = new PluginsAndModules();
plugins.readFrom(in); plugins.readFrom(in);
} }
if (in.readBoolean()) {
ingest = new IngestInfo(in);
}
} }
@Override @Override
@ -285,5 +298,11 @@ public class NodeInfo extends BaseNodeResponse {
out.writeBoolean(true); out.writeBoolean(true);
plugins.writeTo(out); plugins.writeTo(out);
} }
if (ingest == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
ingest.writeTo(out);
}
} }
} }

View File

@ -38,6 +38,7 @@ public class NodesInfoRequest extends BaseNodesRequest<NodesInfoRequest> {
private boolean transport = true; private boolean transport = true;
private boolean http = true; private boolean http = true;
private boolean plugins = true; private boolean plugins = true;
private boolean ingest = true;
public NodesInfoRequest() { public NodesInfoRequest() {
} }
@ -62,6 +63,7 @@ public class NodesInfoRequest extends BaseNodesRequest<NodesInfoRequest> {
transport = false; transport = false;
http = false; http = false;
plugins = false; plugins = false;
ingest = false;
return this; return this;
} }
@ -77,6 +79,7 @@ public class NodesInfoRequest extends BaseNodesRequest<NodesInfoRequest> {
transport = true; transport = true;
http = true; http = true;
plugins = true; plugins = true;
ingest = true;
return this; return this;
} }
@ -202,6 +205,22 @@ public class NodesInfoRequest extends BaseNodesRequest<NodesInfoRequest> {
return plugins; return plugins;
} }
/**
* Should information about ingest be returned
* @param ingest true if you want info
*/
public NodesInfoRequest ingest(boolean ingest) {
this.ingest = ingest;
return this;
}
/**
* @return true if information about ingest is requested
*/
public boolean ingest() {
return ingest;
}
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
@ -213,6 +232,7 @@ public class NodesInfoRequest extends BaseNodesRequest<NodesInfoRequest> {
transport = in.readBoolean(); transport = in.readBoolean();
http = in.readBoolean(); http = in.readBoolean();
plugins = in.readBoolean(); plugins = in.readBoolean();
ingest = in.readBoolean();
} }
@Override @Override
@ -226,5 +246,6 @@ public class NodesInfoRequest extends BaseNodesRequest<NodesInfoRequest> {
out.writeBoolean(transport); out.writeBoolean(transport);
out.writeBoolean(http); out.writeBoolean(http);
out.writeBoolean(plugins); out.writeBoolean(plugins);
out.writeBoolean(ingest);
} }
} }

View File

@ -110,4 +110,12 @@ public class NodesInfoRequestBuilder extends NodesOperationRequestBuilder<NodesI
request().plugins(plugins); request().plugins(plugins);
return this; return this;
} }
/**
* Should the node ingest info be returned.
*/
public NodesInfoRequestBuilder setIngest(boolean ingest) {
request().ingest(ingest);
return this;
}
} }

View File

@ -121,6 +121,9 @@ public class NodesInfoResponse extends BaseNodesResponse<NodeInfo> implements To
if (nodeInfo.getPlugins() != null) { if (nodeInfo.getPlugins() != null) {
nodeInfo.getPlugins().toXContent(builder, params); nodeInfo.getPlugins().toXContent(builder, params);
} }
if (nodeInfo.getIngest() != null) {
nodeInfo.getIngest().toXContent(builder, params);
}
builder.endObject(); builder.endObject();
} }

View File

@ -23,8 +23,8 @@ import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.BaseNodeRequest;
import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.action.support.nodes.TransportNodesAction;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -80,7 +80,7 @@ public class TransportNodesInfoAction extends TransportNodesAction<NodesInfoRequ
protected NodeInfo nodeOperation(NodeInfoRequest nodeRequest) { protected NodeInfo nodeOperation(NodeInfoRequest nodeRequest) {
NodesInfoRequest request = nodeRequest.request; NodesInfoRequest request = nodeRequest.request;
return nodeService.info(request.settings(), request.os(), request.process(), request.jvm(), request.threadPool(), return nodeService.info(request.settings(), request.os(), request.process(), request.jvm(), request.threadPool(),
request.transport(), request.http(), request.plugins()); request.transport(), request.http(), request.plugins(), request.ingest());
} }
@Override @Override
@ -95,7 +95,7 @@ public class TransportNodesInfoAction extends TransportNodesAction<NodesInfoRequ
public NodeInfoRequest() { public NodeInfoRequest() {
} }
NodeInfoRequest(String nodeId, NodesInfoRequest request) { public NodeInfoRequest(String nodeId, NodesInfoRequest request) {
super(nodeId); super(nodeId);
this.request = request; this.request = request;
} }

View File

@ -20,7 +20,7 @@
package org.elasticsearch.action.admin.cluster.node.liveness; package org.elasticsearch.action.admin.cluster.node.liveness;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportChannel;

View File

@ -31,6 +31,7 @@ import org.elasticsearch.discovery.DiscoveryStats;
import org.elasticsearch.http.HttpStats; import org.elasticsearch.http.HttpStats;
import org.elasticsearch.indices.NodeIndicesStats; import org.elasticsearch.indices.NodeIndicesStats;
import org.elasticsearch.indices.breaker.AllCircuitBreakerStats; import org.elasticsearch.indices.breaker.AllCircuitBreakerStats;
import org.elasticsearch.ingest.IngestStats;
import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.monitor.fs.FsInfo;
import org.elasticsearch.monitor.jvm.JvmStats; import org.elasticsearch.monitor.jvm.JvmStats;
import org.elasticsearch.monitor.os.OsStats; import org.elasticsearch.monitor.os.OsStats;
@ -81,6 +82,9 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
@Nullable @Nullable
private DiscoveryStats discoveryStats; private DiscoveryStats discoveryStats;
@Nullable
private IngestStats ingestStats;
NodeStats() { NodeStats() {
} }
@ -89,7 +93,8 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
@Nullable FsInfo fs, @Nullable TransportStats transport, @Nullable HttpStats http, @Nullable FsInfo fs, @Nullable TransportStats transport, @Nullable HttpStats http,
@Nullable AllCircuitBreakerStats breaker, @Nullable AllCircuitBreakerStats breaker,
@Nullable ScriptStats scriptStats, @Nullable ScriptStats scriptStats,
@Nullable DiscoveryStats discoveryStats) { @Nullable DiscoveryStats discoveryStats,
@Nullable IngestStats ingestStats) {
super(node); super(node);
this.timestamp = timestamp; this.timestamp = timestamp;
this.indices = indices; this.indices = indices;
@ -103,6 +108,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
this.breaker = breaker; this.breaker = breaker;
this.scriptStats = scriptStats; this.scriptStats = scriptStats;
this.discoveryStats = discoveryStats; this.discoveryStats = discoveryStats;
this.ingestStats = ingestStats;
} }
public long getTimestamp() { public long getTimestamp() {
@ -187,6 +193,11 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
return this.discoveryStats; return this.discoveryStats;
} }
@Nullable
public IngestStats getIngestStats() {
return ingestStats;
}
public static NodeStats readNodeStats(StreamInput in) throws IOException { public static NodeStats readNodeStats(StreamInput in) throws IOException {
NodeStats nodeInfo = new NodeStats(); NodeStats nodeInfo = new NodeStats();
nodeInfo.readFrom(in); nodeInfo.readFrom(in);
@ -224,7 +235,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
breaker = AllCircuitBreakerStats.readOptionalAllCircuitBreakerStats(in); breaker = AllCircuitBreakerStats.readOptionalAllCircuitBreakerStats(in);
scriptStats = in.readOptionalStreamable(ScriptStats::new); scriptStats = in.readOptionalStreamable(ScriptStats::new);
discoveryStats = in.readOptionalStreamable(() -> new DiscoveryStats(null)); discoveryStats = in.readOptionalStreamable(() -> new DiscoveryStats(null));
ingestStats = in.readOptionalWritable(IngestStats::new);
} }
@Override @Override
@ -282,6 +293,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
out.writeOptionalStreamable(breaker); out.writeOptionalStreamable(breaker);
out.writeOptionalStreamable(scriptStats); out.writeOptionalStreamable(scriptStats);
out.writeOptionalStreamable(discoveryStats); out.writeOptionalStreamable(discoveryStats);
out.writeOptionalWriteable(ingestStats);
} }
@Override @Override
@ -337,6 +349,10 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
getDiscoveryStats().toXContent(builder, params); getDiscoveryStats().toXContent(builder, params);
} }
if (getIngestStats() != null) {
getIngestStats().toXContent(builder, params);
}
return builder; return builder;
} }
} }

View File

@ -42,6 +42,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
private boolean breaker; private boolean breaker;
private boolean script; private boolean script;
private boolean discovery; private boolean discovery;
private boolean ingest;
public NodesStatsRequest() { public NodesStatsRequest() {
} }
@ -69,6 +70,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
this.breaker = true; this.breaker = true;
this.script = true; this.script = true;
this.discovery = true; this.discovery = true;
this.ingest = true;
return this; return this;
} }
@ -87,6 +89,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
this.breaker = false; this.breaker = false;
this.script = false; this.script = false;
this.discovery = false; this.discovery = false;
this.ingest = false;
return this; return this;
} }
@ -250,6 +253,17 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
return this; return this;
} }
public boolean ingest() {
return ingest;
}
/**
* Should ingest statistics be returned.
*/
public NodesStatsRequest ingest(boolean ingest) {
this.ingest = ingest;
return this;
}
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
@ -265,6 +279,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
breaker = in.readBoolean(); breaker = in.readBoolean();
script = in.readBoolean(); script = in.readBoolean();
discovery = in.readBoolean(); discovery = in.readBoolean();
ingest = in.readBoolean();
} }
@Override @Override
@ -281,6 +296,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
out.writeBoolean(breaker); out.writeBoolean(breaker);
out.writeBoolean(script); out.writeBoolean(script);
out.writeBoolean(discovery); out.writeBoolean(discovery);
out.writeBoolean(ingest);
} }
} }

View File

@ -137,4 +137,12 @@ public class NodesStatsRequestBuilder extends NodesOperationRequestBuilder<Nodes
request.discovery(discovery); request.discovery(discovery);
return this; return this;
} }
/**
* Should ingest statistics be returned.
*/
public NodesStatsRequestBuilder ingest(boolean ingest) {
request.ingest(ingest);
return this;
}
} }

View File

@ -23,8 +23,8 @@ import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.BaseNodeRequest;
import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.action.support.nodes.TransportNodesAction;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -80,7 +80,8 @@ public class TransportNodesStatsAction extends TransportNodesAction<NodesStatsRe
protected NodeStats nodeOperation(NodeStatsRequest nodeStatsRequest) { protected NodeStats nodeOperation(NodeStatsRequest nodeStatsRequest) {
NodesStatsRequest request = nodeStatsRequest.request; NodesStatsRequest request = nodeStatsRequest.request;
return nodeService.stats(request.indices(), request.os(), request.process(), request.jvm(), request.threadPool(), return nodeService.stats(request.indices(), request.os(), request.process(), request.jvm(), request.threadPool(),
request.fs(), request.transport(), request.http(), request.breaker(), request.script(), request.discovery()); request.fs(), request.transport(), request.http(), request.breaker(), request.script(), request.discovery(),
request.ingest());
} }
@Override @Override

View File

@ -53,12 +53,18 @@ public class CancelTasksRequest extends BaseTasksRequest<CancelTasksRequest> {
return super.match(task) && task instanceof CancellableTask; return super.match(task) && task instanceof CancellableTask;
} }
public CancelTasksRequest reason(String reason) { /**
* Set the reason for canceling the task.
*/
public CancelTasksRequest setReason(String reason) {
this.reason = reason; this.reason = reason;
return this; return this;
} }
public String reason() { /**
* The reason for canceling the task.
*/
public String getReason() {
return reason; return reason;
} }
} }

View File

@ -26,10 +26,10 @@ import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.action.support.tasks.TransportTasksAction;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -84,21 +84,21 @@ public class TransportCancelTasksAction extends TransportTasksAction<Cancellable
} }
protected void processTasks(CancelTasksRequest request, Consumer<CancellableTask> operation) { protected void processTasks(CancelTasksRequest request, Consumer<CancellableTask> operation) {
if (request.taskId().isSet() == false) { if (request.getTaskId().isSet() == false) {
// we are only checking one task, we can optimize it // we are only checking one task, we can optimize it
CancellableTask task = taskManager.getCancellableTask(request.taskId().getId()); CancellableTask task = taskManager.getCancellableTask(request.getTaskId().getId());
if (task != null) { if (task != null) {
if (request.match(task)) { if (request.match(task)) {
operation.accept(task); operation.accept(task);
} else { } else {
throw new IllegalArgumentException("task [" + request.taskId() + "] doesn't support this operation"); throw new IllegalArgumentException("task [" + request.getTaskId() + "] doesn't support this operation");
} }
} else { } else {
if (taskManager.getTask(request.taskId().getId()) != null) { if (taskManager.getTask(request.getTaskId().getId()) != null) {
// The task exists, but doesn't support cancellation // The task exists, but doesn't support cancellation
throw new IllegalArgumentException("task [" + request.taskId() + "] doesn't support cancellation"); throw new IllegalArgumentException("task [" + request.getTaskId() + "] doesn't support cancellation");
} else { } else {
throw new ResourceNotFoundException("task [{}] doesn't support cancellation", request.taskId()); throw new ResourceNotFoundException("task [{}] doesn't support cancellation", request.getTaskId());
} }
} }
} else { } else {
@ -113,14 +113,14 @@ public class TransportCancelTasksAction extends TransportTasksAction<Cancellable
@Override @Override
protected synchronized TaskInfo taskOperation(CancelTasksRequest request, CancellableTask cancellableTask) { protected synchronized TaskInfo taskOperation(CancelTasksRequest request, CancellableTask cancellableTask) {
final BanLock banLock = new BanLock(nodes -> removeBanOnNodes(cancellableTask, nodes)); final BanLock banLock = new BanLock(nodes -> removeBanOnNodes(cancellableTask, nodes));
Set<String> childNodes = taskManager.cancel(cancellableTask, request.reason(), banLock::onTaskFinished); Set<String> childNodes = taskManager.cancel(cancellableTask, request.getReason(), banLock::onTaskFinished);
if (childNodes != null) { if (childNodes != null) {
if (childNodes.isEmpty()) { if (childNodes.isEmpty()) {
logger.trace("cancelling task {} with no children", cancellableTask.getId()); logger.trace("cancelling task {} with no children", cancellableTask.getId());
return cancellableTask.taskInfo(clusterService.localNode(), false); return cancellableTask.taskInfo(clusterService.localNode(), false);
} else { } else {
logger.trace("cancelling task {} with children on nodes [{}]", cancellableTask.getId(), childNodes); logger.trace("cancelling task {} with children on nodes [{}]", cancellableTask.getId(), childNodes);
setBanOnNodes(request.reason(), cancellableTask, childNodes, banLock); setBanOnNodes(request.getReason(), cancellableTask, childNodes, banLock);
return cancellableTask.taskInfo(clusterService.localNode(), false); return cancellableTask.taskInfo(clusterService.localNode(), false);
} }
} else { } else {

View File

@ -31,31 +31,49 @@ import java.io.IOException;
public class ListTasksRequest extends BaseTasksRequest<ListTasksRequest> { public class ListTasksRequest extends BaseTasksRequest<ListTasksRequest> {
private boolean detailed = false; private boolean detailed = false;
private boolean waitForCompletion = false;
/** /**
* Should the detailed task information be returned. * Should the detailed task information be returned.
*/ */
public boolean detailed() { public boolean getDetailed() {
return this.detailed; return this.detailed;
} }
/** /**
* Should the detailed task information be returned. * Should the detailed task information be returned.
*/ */
public ListTasksRequest detailed(boolean detailed) { public ListTasksRequest setDetailed(boolean detailed) {
this.detailed = detailed; this.detailed = detailed;
return this; return this;
} }
/**
* Should this request wait for all found tasks to complete?
*/
public boolean getWaitForCompletion() {
return waitForCompletion;
}
/**
* Should this request wait for all found tasks to complete?
*/
public ListTasksRequest setWaitForCompletion(boolean waitForCompletion) {
this.waitForCompletion = waitForCompletion;
return this;
}
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
detailed = in.readBoolean(); detailed = in.readBoolean();
waitForCompletion = in.readBoolean();
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
out.writeBoolean(detailed); out.writeBoolean(detailed);
out.writeBoolean(waitForCompletion);
} }
} }

View File

@ -35,7 +35,15 @@ public class ListTasksRequestBuilder extends TasksRequestBuilder<ListTasksReques
* Should detailed task information be returned. * Should detailed task information be returned.
*/ */
public ListTasksRequestBuilder setDetailed(boolean detailed) { public ListTasksRequestBuilder setDetailed(boolean detailed) {
request.detailed(detailed); request.setDetailed(detailed);
return this;
}
/**
* Should this request wait for all found tasks to complete?
*/
public final ListTasksRequestBuilder setWaitForCompletion(boolean waitForCompletion) {
request.setWaitForCompletion(waitForCompletion);
return this; return this;
} }
} }

View File

@ -19,28 +19,36 @@
package org.elasticsearch.action.admin.cluster.node.tasks.list; package org.elasticsearch.action.admin.cluster.node.tasks.list;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.action.support.tasks.TransportTasksAction;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import java.io.IOException; import java.io.IOException;
import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.function.Consumer;
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
/** /**
* *
*/ */
public class TransportListTasksAction extends TransportTasksAction<Task, ListTasksRequest, ListTasksResponse, TaskInfo> { public class TransportListTasksAction extends TransportTasksAction<Task, ListTasksRequest, ListTasksResponse, TaskInfo> {
private static final TimeValue WAIT_FOR_COMPLETION_POLL = timeValueMillis(100);
private static final TimeValue DEFAULT_WAIT_FOR_COMPLETION_TIMEOUT = timeValueSeconds(30);
@Inject @Inject
public TransportListTasksAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { public TransportListTasksAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
@ -59,7 +67,34 @@ public class TransportListTasksAction extends TransportTasksAction<Task, ListTas
@Override @Override
protected TaskInfo taskOperation(ListTasksRequest request, Task task) { protected TaskInfo taskOperation(ListTasksRequest request, Task task) {
return task.taskInfo(clusterService.localNode(), request.detailed()); return task.taskInfo(clusterService.localNode(), request.getDetailed());
}
@Override
protected void processTasks(ListTasksRequest request, Consumer<Task> operation) {
if (false == request.getWaitForCompletion()) {
super.processTasks(request, operation);
return;
}
// If we should wait for completion then we have to intercept every found task and wait for it to leave the manager.
TimeValue timeout = request.getTimeout();
if (timeout == null) {
timeout = DEFAULT_WAIT_FOR_COMPLETION_TIMEOUT;
}
long timeoutTime = System.nanoTime() + timeout.nanos();
super.processTasks(request, operation.andThen((Task t) -> {
while (System.nanoTime() - timeoutTime < 0) {
if (taskManager.getTask(t.getId()) == null) {
return;
}
try {
Thread.sleep(WAIT_FOR_COMPLETION_POLL.millis());
} catch (InterruptedException e) {
throw new ElasticsearchException("Interrupted waiting for completion of [{}]", e, t);
}
}
throw new ElasticsearchTimeoutException("Timed out waiting for completion of [{}]", t);
}));
} }
@Override @Override

View File

@ -22,12 +22,12 @@ package org.elasticsearch.action.admin.cluster.repositories.delete;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.RepositoriesService;

View File

@ -22,7 +22,6 @@ package org.elasticsearch.action.admin.cluster.repositories.get;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
@ -30,6 +29,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.metadata.RepositoriesMetaData; import org.elasticsearch.cluster.metadata.RepositoriesMetaData;
import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;

View File

@ -22,12 +22,12 @@ package org.elasticsearch.action.admin.cluster.repositories.put;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.RepositoriesService;

View File

@ -23,11 +23,11 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.RepositoriesService;

View File

@ -23,7 +23,6 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
@ -31,6 +30,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.RoutingExplanations; import org.elasticsearch.cluster.routing.allocation.RoutingExplanations;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Priority; import org.elasticsearch.common.Priority;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;

View File

@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.AckedClusterStateUpdateTask;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
@ -33,6 +32,7 @@ import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Priority; import org.elasticsearch.common.Priority;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;

View File

@ -67,7 +67,7 @@ public class ClusterSearchShardsGroup implements Streamable, ToXContent {
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
index = Index.readIndex(in); index = new Index(in);
shardId = in.readVInt(); shardId = in.readVInt();
shards = new ShardRouting[in.readVInt()]; shards = new ShardRouting[in.readVInt()];
for (int i = 0; i < shards.length; i++) { for (int i = 0; i < shards.length; i++) {

View File

@ -22,7 +22,6 @@ package org.elasticsearch.action.admin.cluster.shards;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
@ -31,6 +30,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
@ -59,7 +59,7 @@ public class TransportClusterSearchShardsAction extends TransportMasterNodeReadA
@Override @Override
protected ClusterBlockException checkBlock(ClusterSearchShardsRequest request, ClusterState state) { protected ClusterBlockException checkBlock(ClusterSearchShardsRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndices(state, request)); return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndexNames(state, request));
} }
@Override @Override
@ -70,7 +70,7 @@ public class TransportClusterSearchShardsAction extends TransportMasterNodeReadA
@Override @Override
protected void masterOperation(final ClusterSearchShardsRequest request, final ClusterState state, final ActionListener<ClusterSearchShardsResponse> listener) { protected void masterOperation(final ClusterSearchShardsRequest request, final ClusterState state, final ActionListener<ClusterSearchShardsResponse> listener) {
ClusterState clusterState = clusterService.state(); ClusterState clusterState = clusterService.state();
String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterState, request); String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request);
Map<String, Set<String>> routingMap = indexNameExpressionResolver.resolveSearchRouting(state, request.routing(), request.indices()); Map<String, Set<String>> routingMap = indexNameExpressionResolver.resolveSearchRouting(state, request.routing(), request.indices());
Set<String> nodeIds = new HashSet<>(); Set<String> nodeIds = new HashSet<>();
GroupShardsIterator groupShardsIterator = clusterService.operationRouting().searchShards(clusterState, concreteIndices, routingMap, request.preference()); GroupShardsIterator groupShardsIterator = clusterService.operationRouting().searchShards(clusterState, concreteIndices, routingMap, request.preference());

View File

@ -22,12 +22,12 @@ package org.elasticsearch.action.admin.cluster.snapshots.create;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.metadata.SnapshotId;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotInfo;
@ -66,7 +66,7 @@ public class TransportCreateSnapshotAction extends TransportMasterNodeAction<Cre
if (clusterBlockException != null) { if (clusterBlockException != null) {
return clusterBlockException; return clusterBlockException;
} }
return state.blocks().indicesBlockedException(ClusterBlockLevel.READ, indexNameExpressionResolver.concreteIndices(state, request)); return state.blocks().indicesBlockedException(ClusterBlockLevel.READ, indexNameExpressionResolver.concreteIndexNames(state, request));
} }
@Override @Override

View File

@ -22,12 +22,12 @@ package org.elasticsearch.action.admin.cluster.snapshots.delete;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.metadata.SnapshotId;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.snapshots.SnapshotsService;

View File

@ -22,12 +22,12 @@ package org.elasticsearch.action.admin.cluster.snapshots.get;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.metadata.SnapshotId;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;

View File

@ -22,12 +22,12 @@ package org.elasticsearch.action.admin.cluster.snapshots.restore;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.metadata.SnapshotId;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.snapshots.RestoreInfo; import org.elasticsearch.snapshots.RestoreInfo;

View File

@ -28,10 +28,10 @@ import org.elasticsearch.action.support.nodes.BaseNodesRequest;
import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.action.support.nodes.TransportNodesAction;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.metadata.SnapshotId;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;

View File

@ -24,13 +24,13 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.SnapshotsInProgress;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.metadata.SnapshotId;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;

View File

@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
@ -32,6 +31,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.metadata.MetaData.Custom; import org.elasticsearch.cluster.metadata.MetaData.Custom;
import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
@ -106,7 +106,7 @@ public class TransportClusterStateAction extends TransportMasterNodeReadAction<C
} }
if (request.indices().length > 0) { if (request.indices().length > 0) {
String[] indices = indexNameExpressionResolver.concreteIndices(currentState, request); String[] indices = indexNameExpressionResolver.concreteIndexNames(currentState, request);
for (String filteredIndex : indices) { for (String filteredIndex : indices) {
IndexMetaData indexMetaData = currentState.metaData().index(filteredIndex); IndexMetaData indexMetaData = currentState.metaData().index(filteredIndex);
if (indexMetaData != null) { if (indexMetaData != null) {

View File

@ -31,7 +31,7 @@ import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.cache.query.QueryCacheStats;
import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.engine.SegmentsStats;
import org.elasticsearch.index.fielddata.FieldDataStats; import org.elasticsearch.index.fielddata.FieldDataStats;
import org.elasticsearch.index.percolator.PercolateStats; import org.elasticsearch.index.percolator.PercolatorQueryCacheStats;
import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.index.shard.DocsStats;
import org.elasticsearch.index.store.StoreStats; import org.elasticsearch.index.store.StoreStats;
import org.elasticsearch.search.suggest.completion.CompletionStats; import org.elasticsearch.search.suggest.completion.CompletionStats;
@ -48,7 +48,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
private QueryCacheStats queryCache; private QueryCacheStats queryCache;
private CompletionStats completion; private CompletionStats completion;
private SegmentsStats segments; private SegmentsStats segments;
private PercolateStats percolate; private PercolatorQueryCacheStats percolatorCache;
private ClusterStatsIndices() { private ClusterStatsIndices() {
} }
@ -62,7 +62,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
this.queryCache = new QueryCacheStats(); this.queryCache = new QueryCacheStats();
this.completion = new CompletionStats(); this.completion = new CompletionStats();
this.segments = new SegmentsStats(); this.segments = new SegmentsStats();
this.percolate = new PercolateStats(); this.percolatorCache = new PercolatorQueryCacheStats();
for (ClusterStatsNodeResponse r : nodeResponses) { for (ClusterStatsNodeResponse r : nodeResponses) {
for (org.elasticsearch.action.admin.indices.stats.ShardStats shardStats : r.shardsStats()) { for (org.elasticsearch.action.admin.indices.stats.ShardStats shardStats : r.shardsStats()) {
@ -85,7 +85,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
queryCache.add(shardCommonStats.queryCache); queryCache.add(shardCommonStats.queryCache);
completion.add(shardCommonStats.completion); completion.add(shardCommonStats.completion);
segments.add(shardCommonStats.segments); segments.add(shardCommonStats.segments);
percolate.add(shardCommonStats.percolate); percolatorCache.add(shardCommonStats.percolatorCache);
} }
} }
@ -128,8 +128,8 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
return segments; return segments;
} }
public PercolateStats getPercolate() { public PercolatorQueryCacheStats getPercolatorCache() {
return percolate; return percolatorCache;
} }
@Override @Override
@ -142,7 +142,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
queryCache = QueryCacheStats.readQueryCacheStats(in); queryCache = QueryCacheStats.readQueryCacheStats(in);
completion = CompletionStats.readCompletionStats(in); completion = CompletionStats.readCompletionStats(in);
segments = SegmentsStats.readSegmentsStats(in); segments = SegmentsStats.readSegmentsStats(in);
percolate = PercolateStats.readPercolateStats(in); percolatorCache = PercolatorQueryCacheStats.readPercolateStats(in);
} }
@Override @Override
@ -155,7 +155,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
queryCache.writeTo(out); queryCache.writeTo(out);
completion.writeTo(out); completion.writeTo(out);
segments.writeTo(out); segments.writeTo(out);
percolate.writeTo(out); percolatorCache.writeTo(out);
} }
public static ClusterStatsIndices readIndicesStats(StreamInput in) throws IOException { public static ClusterStatsIndices readIndicesStats(StreamInput in) throws IOException {
@ -178,7 +178,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
queryCache.toXContent(builder, params); queryCache.toXContent(builder, params);
completion.toXContent(builder, params); completion.toXContent(builder, params);
segments.toXContent(builder, params); segments.toXContent(builder, params);
percolate.toXContent(builder, params); percolatorCache.toXContent(builder, params);
return builder; return builder;
} }

View File

@ -28,10 +28,10 @@ import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.BaseNodeRequest;
import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.action.support.nodes.TransportNodesAction;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.cluster.health.ClusterStateHealth; import org.elasticsearch.cluster.health.ClusterStateHealth;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -56,7 +56,7 @@ public class TransportClusterStatsAction extends TransportNodesAction<ClusterSta
private static final CommonStatsFlags SHARD_STATS_FLAGS = new CommonStatsFlags(CommonStatsFlags.Flag.Docs, CommonStatsFlags.Flag.Store, private static final CommonStatsFlags SHARD_STATS_FLAGS = new CommonStatsFlags(CommonStatsFlags.Flag.Docs, CommonStatsFlags.Flag.Store,
CommonStatsFlags.Flag.FieldData, CommonStatsFlags.Flag.QueryCache, CommonStatsFlags.Flag.Completion, CommonStatsFlags.Flag.Segments, CommonStatsFlags.Flag.FieldData, CommonStatsFlags.Flag.QueryCache, CommonStatsFlags.Flag.Completion, CommonStatsFlags.Flag.Segments,
CommonStatsFlags.Flag.Percolate); CommonStatsFlags.Flag.PercolatorCache);
private final NodeService nodeService; private final NodeService nodeService;
private final IndicesService indicesService; private final IndicesService indicesService;
@ -98,14 +98,14 @@ public class TransportClusterStatsAction extends TransportNodesAction<ClusterSta
@Override @Override
protected ClusterStatsNodeResponse nodeOperation(ClusterStatsNodeRequest nodeRequest) { protected ClusterStatsNodeResponse nodeOperation(ClusterStatsNodeRequest nodeRequest) {
NodeInfo nodeInfo = nodeService.info(false, true, false, true, false, true, false, true); NodeInfo nodeInfo = nodeService.info(false, true, false, true, false, true, false, true, false);
NodeStats nodeStats = nodeService.stats(CommonStatsFlags.NONE, false, true, true, false, true, false, false, false, false, false); NodeStats nodeStats = nodeService.stats(CommonStatsFlags.NONE, false, true, true, false, true, false, false, false, false, false, false);
List<ShardStats> shardsStats = new ArrayList<>(); List<ShardStats> shardsStats = new ArrayList<>();
for (IndexService indexService : indicesService) { for (IndexService indexService : indicesService) {
for (IndexShard indexShard : indexService) { for (IndexShard indexShard : indexService) {
if (indexShard.routingEntry() != null && indexShard.routingEntry().active()) { if (indexShard.routingEntry() != null && indexShard.routingEntry().active()) {
// only report on fully started shards // only report on fully started shards
shardsStats.add(new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexShard, SHARD_STATS_FLAGS), indexShard.commitStats())); shardsStats.add(new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexService.cache().getPercolatorQueryCache(), indexShard, SHARD_STATS_FLAGS), indexShard.commitStats()));
} }
} }
} }

View File

@ -22,11 +22,11 @@ package org.elasticsearch.action.admin.cluster.tasks;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.cluster.service.PendingClusterTask; import org.elasticsearch.cluster.service.PendingClusterTask;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;

View File

@ -23,7 +23,6 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
@ -31,6 +30,7 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.AliasAction; import org.elasticsearch.cluster.metadata.AliasAction;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaDataIndexAliasesService; import org.elasticsearch.cluster.metadata.MetaDataIndexAliasesService;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.rest.action.admin.indices.alias.delete.AliasesNotFoundException; import org.elasticsearch.rest.action.admin.indices.alias.delete.AliasesNotFoundException;
@ -90,11 +90,11 @@ public class TransportIndicesAliasesAction extends TransportMasterNodeAction<Ind
Set<String> aliases = new HashSet<>(); Set<String> aliases = new HashSet<>();
for (AliasActions action : actions) { for (AliasActions action : actions) {
//expand indices //expand indices
String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request.indicesOptions(), action.indices()); String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request.indicesOptions(), action.indices());
//collect the aliases //collect the aliases
Collections.addAll(aliases, action.aliases()); Collections.addAll(aliases, action.aliases());
for (String index : concreteIndices) { for (String index : concreteIndices) {
for (String alias : action.concreteAliases(state.metaData(), index)) { for (String alias : action.concreteAliases(state.metaData(), index)) {
AliasAction finalAction = new AliasAction(action.aliasAction()); AliasAction finalAction = new AliasAction(action.aliasAction());
finalAction.index(index); finalAction.index(index);
finalAction.alias(alias); finalAction.alias(alias);

View File

@ -22,11 +22,11 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
@ -50,7 +50,7 @@ public class TransportAliasesExistAction extends TransportMasterNodeReadAction<G
@Override @Override
protected ClusterBlockException checkBlock(GetAliasesRequest request, ClusterState state) { protected ClusterBlockException checkBlock(GetAliasesRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndices(state, request)); return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndexNames(state, request));
} }
@Override @Override
@ -60,7 +60,7 @@ public class TransportAliasesExistAction extends TransportMasterNodeReadAction<G
@Override @Override
protected void masterOperation(GetAliasesRequest request, ClusterState state, ActionListener<AliasesExistResponse> listener) { protected void masterOperation(GetAliasesRequest request, ClusterState state, ActionListener<AliasesExistResponse> listener) {
String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request);
boolean result = state.metaData().hasAliases(request.aliases(), concreteIndices); boolean result = state.metaData().hasAliases(request.aliases(), concreteIndices);
listener.onResponse(new AliasesExistResponse(result)); listener.onResponse(new AliasesExistResponse(result));
} }

View File

@ -21,12 +21,12 @@ package org.elasticsearch.action.admin.indices.alias.get;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.AliasMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -53,7 +53,7 @@ public class TransportGetAliasesAction extends TransportMasterNodeReadAction<Get
@Override @Override
protected ClusterBlockException checkBlock(GetAliasesRequest request, ClusterState state) { protected ClusterBlockException checkBlock(GetAliasesRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndices(state, request)); return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndexNames(state, request));
} }
@Override @Override
@ -63,7 +63,7 @@ public class TransportGetAliasesAction extends TransportMasterNodeReadAction<Get
@Override @Override
protected void masterOperation(GetAliasesRequest request, ClusterState state, ActionListener<GetAliasesResponse> listener) { protected void masterOperation(GetAliasesRequest request, ClusterState state, ActionListener<GetAliasesResponse> listener) {
String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request);
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
ImmutableOpenMap<String, List<AliasMetaData>> result = (ImmutableOpenMap) state.metaData().findAliases(request.aliases(), concreteIndices); ImmutableOpenMap<String, List<AliasMetaData>> result = (ImmutableOpenMap) state.metaData().findAliases(request.aliases(), concreteIndices);
listener.onResponse(new GetAliasesResponse(result)); listener.onResponse(new GetAliasesResponse(result));

View File

@ -32,11 +32,11 @@ import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.routing.ShardsIterator;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.io.FastStringReader;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;

View File

@ -22,13 +22,13 @@ package org.elasticsearch.action.admin.indices.cache.clear;
import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.routing.ShardsIterator;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -77,7 +77,7 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastByNodeAc
@Override @Override
protected EmptyResult shardOperation(ClearIndicesCacheRequest request, ShardRouting shardRouting) { protected EmptyResult shardOperation(ClearIndicesCacheRequest request, ShardRouting shardRouting) {
IndexService service = indicesService.indexService(shardRouting.getIndexName()); IndexService service = indicesService.indexService(shardRouting.index());
if (service != null) { if (service != null) {
IndexShard shard = service.getShardOrNull(shardRouting.id()); IndexShard shard = service.getShardOrNull(shardRouting.id());
boolean clearedAtLeastOne = false; boolean clearedAtLeastOne = false;

View File

@ -23,17 +23,19 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.DestructiveOperations;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; import org.elasticsearch.cluster.metadata.MetaDataIndexStateService;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
@ -46,7 +48,8 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction<CloseIn
private final MetaDataIndexStateService indexStateService; private final MetaDataIndexStateService indexStateService;
private final DestructiveOperations destructiveOperations; private final DestructiveOperations destructiveOperations;
private volatile boolean closeIndexEnabled; private volatile boolean closeIndexEnabled;
public static final Setting<Boolean> CLUSTER_INDICES_CLOSE_ENABLE_SETTING = Setting.boolSetting("cluster.indices.close.enable", true, true, Setting.Scope.CLUSTER); public static final Setting<Boolean> CLUSTER_INDICES_CLOSE_ENABLE_SETTING =
Setting.boolSetting("cluster.indices.close.enable", true, Property.Dynamic, Property.NodeScope);
@Inject @Inject
public TransportCloseIndexAction(Settings settings, TransportService transportService, ClusterService clusterService, public TransportCloseIndexAction(Settings settings, TransportService transportService, ClusterService clusterService,
@ -86,12 +89,12 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction<CloseIn
@Override @Override
protected ClusterBlockException checkBlock(CloseIndexRequest request, ClusterState state) { protected ClusterBlockException checkBlock(CloseIndexRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, indexNameExpressionResolver.concreteIndices(state, request)); return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, indexNameExpressionResolver.concreteIndexNames(state, request));
} }
@Override @Override
protected void masterOperation(final CloseIndexRequest request, final ClusterState state, final ActionListener<CloseIndexResponse> listener) { protected void masterOperation(final CloseIndexRequest request, final ClusterState state, final ActionListener<CloseIndexResponse> listener) {
final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); final Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request);
CloseIndexClusterStateUpdateRequest updateRequest = new CloseIndexClusterStateUpdateRequest() CloseIndexClusterStateUpdateRequest updateRequest = new CloseIndexClusterStateUpdateRequest()
.ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout()) .ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout())
.indices(concreteIndices); .indices(concreteIndices);

View File

@ -22,13 +22,13 @@ package org.elasticsearch.action.admin.indices.create;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.indices.IndexAlreadyExistsException; import org.elasticsearch.indices.IndexAlreadyExistsException;

View File

@ -23,18 +23,23 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.DestructiveOperations;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaDataDeleteIndexService; import org.elasticsearch.cluster.metadata.MetaDataDeleteIndexService;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
/** /**
* Delete index action. * Delete index action.
*/ */
@ -70,13 +75,13 @@ public class TransportDeleteIndexAction extends TransportMasterNodeAction<Delete
@Override @Override
protected ClusterBlockException checkBlock(DeleteIndexRequest request, ClusterState state) { protected ClusterBlockException checkBlock(DeleteIndexRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, indexNameExpressionResolver.concreteIndices(state, request)); return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, indexNameExpressionResolver.concreteIndexNames(state, request));
} }
@Override @Override
protected void masterOperation(final DeleteIndexRequest request, final ClusterState state, final ActionListener<DeleteIndexResponse> listener) { protected void masterOperation(final DeleteIndexRequest request, final ClusterState state, final ActionListener<DeleteIndexResponse> listener) {
final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); final Set<Index> concreteIndices = new HashSet<>(Arrays.asList(indexNameExpressionResolver.concreteIndices(state, request)));
if (concreteIndices.length == 0) { if (concreteIndices.isEmpty()) {
listener.onResponse(new DeleteIndexResponse(true)); listener.onResponse(new DeleteIndexResponse(true));
return; return;
} }

View File

@ -23,11 +23,11 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexNotFoundException;
@ -60,7 +60,7 @@ public class TransportIndicesExistsAction extends TransportMasterNodeReadAction<
protected ClusterBlockException checkBlock(IndicesExistsRequest request, ClusterState state) { protected ClusterBlockException checkBlock(IndicesExistsRequest request, ClusterState state) {
//make sure through indices options that the concrete indices call never throws IndexMissingException //make sure through indices options that the concrete indices call never throws IndexMissingException
IndicesOptions indicesOptions = IndicesOptions.fromOptions(true, true, request.indicesOptions().expandWildcardsOpen(), request.indicesOptions().expandWildcardsClosed()); IndicesOptions indicesOptions = IndicesOptions.fromOptions(true, true, request.indicesOptions().expandWildcardsOpen(), request.indicesOptions().expandWildcardsClosed());
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndices(state, indicesOptions, request.indices())); return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndexNames(state, indicesOptions, request.indices()));
} }
@Override @Override
@ -68,7 +68,7 @@ public class TransportIndicesExistsAction extends TransportMasterNodeReadAction<
boolean exists; boolean exists;
try { try {
// Similar as the previous behaviour, but now also aliases and wildcards are supported. // Similar as the previous behaviour, but now also aliases and wildcards are supported.
indexNameExpressionResolver.concreteIndices(state, request); indexNameExpressionResolver.concreteIndexNames(state, request);
exists = true; exists = true;
} catch (IndexNotFoundException e) { } catch (IndexNotFoundException e) {
exists = false; exists = false;

View File

@ -21,12 +21,12 @@ package org.elasticsearch.action.admin.indices.exists.types;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -57,12 +57,12 @@ public class TransportTypesExistsAction extends TransportMasterNodeReadAction<Ty
@Override @Override
protected ClusterBlockException checkBlock(TypesExistsRequest request, ClusterState state) { protected ClusterBlockException checkBlock(TypesExistsRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndices(state, request)); return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndexNames(state, request));
} }
@Override @Override
protected void masterOperation(final TypesExistsRequest request, final ClusterState state, final ActionListener<TypesExistsResponse> listener) { protected void masterOperation(final TypesExistsRequest request, final ClusterState state, final ActionListener<TypesExistsResponse> listener) {
String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request.indicesOptions(), request.indices()); String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request.indicesOptions(), request.indices());
if (concreteIndices.length == 0) { if (concreteIndices.length == 0) {
listener.onResponse(new TypesExistsResponse(false)); listener.onResponse(new TypesExistsResponse(false));
return; return;

View File

@ -23,8 +23,8 @@ import org.elasticsearch.action.ReplicationResponse;
import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.replication.TransportBroadcastReplicationAction; import org.elasticsearch.action.support.replication.TransportBroadcastReplicationAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;

View File

@ -22,12 +22,11 @@ package org.elasticsearch.action.admin.indices.flush;
import org.elasticsearch.action.ReplicationResponse; import org.elasticsearch.action.ReplicationResponse;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.replication.TransportReplicationAction; import org.elasticsearch.action.support.replication.TransportReplicationAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -46,10 +45,9 @@ public class TransportShardFlushAction extends TransportReplicationAction<ShardF
@Inject @Inject
public TransportShardFlushAction(Settings settings, TransportService transportService, ClusterService clusterService, public TransportShardFlushAction(Settings settings, TransportService transportService, ClusterService clusterService,
IndicesService indicesService, ThreadPool threadPool, ShardStateAction shardStateAction, IndicesService indicesService, ThreadPool threadPool, ShardStateAction shardStateAction,
MappingUpdatedAction mappingUpdatedAction, ActionFilters actionFilters, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, NAME, transportService, clusterService, indicesService, threadPool, shardStateAction,
super(settings, NAME, transportService, clusterService, indicesService, threadPool, shardStateAction, mappingUpdatedAction, actionFilters, indexNameExpressionResolver, ShardFlushRequest::new, ShardFlushRequest::new, ThreadPool.Names.FLUSH);
actionFilters, indexNameExpressionResolver, ShardFlushRequest::new, ShardFlushRequest::new, ThreadPool.Names.FLUSH);
} }
@Override @Override

View File

@ -22,13 +22,13 @@ package org.elasticsearch.action.admin.indices.forcemerge;
import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.routing.ShardsIterator;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;

View File

@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature; import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.info.TransportClusterInfoAction; import org.elasticsearch.action.support.master.info.TransportClusterInfoAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
@ -32,6 +31,7 @@ import org.elasticsearch.cluster.metadata.AliasMetaData;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
@ -60,7 +60,7 @@ public class TransportGetIndexAction extends TransportClusterInfoAction<GetIndex
@Override @Override
protected ClusterBlockException checkBlock(GetIndexRequest request, ClusterState state) { protected ClusterBlockException checkBlock(GetIndexRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndices(state, request)); return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndexNames(state, request));
} }
@Override @Override

View File

@ -22,9 +22,9 @@ package org.elasticsearch.action.admin.indices.mapping.get;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
@ -56,7 +56,7 @@ public class TransportGetFieldMappingsAction extends HandledTransportAction<GetF
@Override @Override
protected void doExecute(GetFieldMappingsRequest request, final ActionListener<GetFieldMappingsResponse> listener) { protected void doExecute(GetFieldMappingsRequest request, final ActionListener<GetFieldMappingsResponse> listener) {
ClusterState clusterState = clusterService.state(); ClusterState clusterState = clusterService.state();
String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterState, request); String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request);
final AtomicInteger indexCounter = new AtomicInteger(); final AtomicInteger indexCounter = new AtomicInteger();
final AtomicInteger completionCounter = new AtomicInteger(concreteIndices.length); final AtomicInteger completionCounter = new AtomicInteger(concreteIndices.length);
final AtomicReferenceArray<Object> indexResponses = new AtomicReferenceArray<>(concreteIndices.length); final AtomicReferenceArray<Object> indexResponses = new AtomicReferenceArray<>(concreteIndices.length);

View File

@ -23,12 +23,12 @@ import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.routing.ShardsIterator;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;

View File

@ -22,12 +22,12 @@ package org.elasticsearch.action.admin.indices.mapping.get;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.info.TransportClusterInfoAction; import org.elasticsearch.action.support.master.info.TransportClusterInfoAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -52,7 +52,7 @@ public class TransportGetMappingsAction extends TransportClusterInfoAction<GetMa
@Override @Override
protected ClusterBlockException checkBlock(GetMappingsRequest request, ClusterState state) { protected ClusterBlockException checkBlock(GetMappingsRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndices(state, request)); return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndexNames(state, request));
} }
@Override @Override

View File

@ -32,9 +32,12 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.Index;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.action.ValidateActions.addValidationError;
@ -65,6 +68,7 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
private String source; private String source;
private boolean updateAllTypes = false; private boolean updateAllTypes = false;
private Index concreteIndex;
public PutMappingRequest() { public PutMappingRequest() {
} }
@ -90,6 +94,10 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
} else if (source.isEmpty()) { } else if (source.isEmpty()) {
validationException = addValidationError("mapping source is empty", validationException); validationException = addValidationError("mapping source is empty", validationException);
} }
if (concreteIndex != null && (indices != null && indices.length > 0)) {
validationException = addValidationError("either concrete index or unresolved indices can be set, concrete index: ["
+ concreteIndex + "] and indices: " + Arrays.asList(indices) , validationException);
}
return validationException; return validationException;
} }
@ -102,6 +110,22 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
return this; return this;
} }
/**
* Sets a concrete index for this put mapping request.
*/
public PutMappingRequest setConcreteIndex(Index index) {
Objects.requireNonNull(indices, "index must not be null");
this.concreteIndex = index;
return this;
}
/**
* Returns a concrete index for this mapping or <code>null</code> if no concrete index is defined
*/
public Index getConcreteIndex() {
return concreteIndex;
}
/** /**
* The indices the mappings will be put. * The indices the mappings will be put.
*/ */
@ -259,6 +283,7 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
source = in.readString(); source = in.readString();
updateAllTypes = in.readBoolean(); updateAllTypes = in.readBoolean();
readTimeout(in); readTimeout(in);
concreteIndex = in.readOptionalWritable(Index::new);
} }
@Override @Override
@ -270,5 +295,6 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
out.writeString(source); out.writeString(source);
out.writeBoolean(updateAllTypes); out.writeBoolean(updateAllTypes);
writeTimeout(out); writeTimeout(out);
out.writeOptionalWriteable(concreteIndex);
} }
} }

View File

@ -23,6 +23,7 @@ import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder; import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.Index;
import java.util.Map; import java.util.Map;
@ -40,6 +41,11 @@ public class PutMappingRequestBuilder extends AcknowledgedRequestBuilder<PutMapp
return this; return this;
} }
public PutMappingRequestBuilder setConcreteIndex(Index index) {
request.setConcreteIndex(index);
return this;
}
/** /**
* Specifies what type of requested indices to ignore and wildcard indices expressions. * Specifies what type of requested indices to ignore and wildcard indices expressions.
* <p> * <p>

View File

@ -22,15 +22,16 @@ package org.elasticsearch.action.admin.indices.mapping.put;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaDataMappingService; import org.elasticsearch.cluster.metadata.MetaDataMappingService;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
@ -63,13 +64,19 @@ public class TransportPutMappingAction extends TransportMasterNodeAction<PutMapp
@Override @Override
protected ClusterBlockException checkBlock(PutMappingRequest request, ClusterState state) { protected ClusterBlockException checkBlock(PutMappingRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, indexNameExpressionResolver.concreteIndices(state, request)); String[] indices;
if (request.getConcreteIndex() == null) {
indices = indexNameExpressionResolver.concreteIndexNames(state, request);
} else {
indices = new String[] {request.getConcreteIndex().getName()};
}
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, indices);
} }
@Override @Override
protected void masterOperation(final PutMappingRequest request, final ClusterState state, final ActionListener<PutMappingResponse> listener) { protected void masterOperation(final PutMappingRequest request, final ClusterState state, final ActionListener<PutMappingResponse> listener) {
try { try {
final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); final Index[] concreteIndices = request.getConcreteIndex() == null ? indexNameExpressionResolver.concreteIndices(state, request) : new Index[] {request.getConcreteIndex()};
PutMappingClusterStateUpdateRequest updateRequest = new PutMappingClusterStateUpdateRequest() PutMappingClusterStateUpdateRequest updateRequest = new PutMappingClusterStateUpdateRequest()
.ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout()) .ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout())
.indices(concreteIndices).type(request.type()) .indices(concreteIndices).type(request.type())

View File

@ -23,15 +23,16 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.DestructiveOperations;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; import org.elasticsearch.cluster.metadata.MetaDataIndexStateService;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
@ -73,12 +74,12 @@ public class TransportOpenIndexAction extends TransportMasterNodeAction<OpenInde
@Override @Override
protected ClusterBlockException checkBlock(OpenIndexRequest request, ClusterState state) { protected ClusterBlockException checkBlock(OpenIndexRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, indexNameExpressionResolver.concreteIndices(state, request)); return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, indexNameExpressionResolver.concreteIndexNames(state, request));
} }
@Override @Override
protected void masterOperation(final OpenIndexRequest request, final ClusterState state, final ActionListener<OpenIndexResponse> listener) { protected void masterOperation(final OpenIndexRequest request, final ClusterState state, final ActionListener<OpenIndexResponse> listener) {
final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); final Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request);
OpenIndexClusterStateUpdateRequest updateRequest = new OpenIndexClusterStateUpdateRequest() OpenIndexClusterStateUpdateRequest updateRequest = new OpenIndexClusterStateUpdateRequest()
.ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout()) .ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout())
.indices(concreteIndices); .indices(concreteIndices);

View File

@ -22,13 +22,13 @@ package org.elasticsearch.action.admin.indices.recovery;
import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.routing.ShardsIterator;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;

View File

@ -24,8 +24,8 @@ import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.replication.BasicReplicationRequest;
import org.elasticsearch.action.support.replication.TransportBroadcastReplicationAction; import org.elasticsearch.action.support.replication.TransportBroadcastReplicationAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;

View File

@ -23,12 +23,11 @@ import org.elasticsearch.action.ReplicationResponse;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.replication.BasicReplicationRequest;
import org.elasticsearch.action.support.replication.TransportReplicationAction; import org.elasticsearch.action.support.replication.TransportReplicationAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -48,10 +47,9 @@ public class TransportShardRefreshAction extends TransportReplicationAction<Basi
@Inject @Inject
public TransportShardRefreshAction(Settings settings, TransportService transportService, ClusterService clusterService, public TransportShardRefreshAction(Settings settings, TransportService transportService, ClusterService clusterService,
IndicesService indicesService, ThreadPool threadPool, ShardStateAction shardStateAction, IndicesService indicesService, ThreadPool threadPool, ShardStateAction shardStateAction,
MappingUpdatedAction mappingUpdatedAction, ActionFilters actionFilters, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, NAME, transportService, clusterService, indicesService, threadPool, shardStateAction,
super(settings, NAME, transportService, clusterService, indicesService, threadPool, shardStateAction, mappingUpdatedAction, actionFilters, indexNameExpressionResolver, BasicReplicationRequest::new, BasicReplicationRequest::new, ThreadPool.Names.REFRESH);
actionFilters, indexNameExpressionResolver, BasicReplicationRequest::new, BasicReplicationRequest::new, ThreadPool.Names.REFRESH);
} }
@Override @Override

View File

@ -22,13 +22,13 @@ package org.elasticsearch.action.admin.indices.segments;
import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.routing.ShardsIterator;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -93,7 +93,7 @@ public class TransportIndicesSegmentsAction extends TransportBroadcastByNodeActi
@Override @Override
protected ShardSegments shardOperation(IndicesSegmentsRequest request, ShardRouting shardRouting) { protected ShardSegments shardOperation(IndicesSegmentsRequest request, ShardRouting shardRouting) {
IndexService indexService = indicesService.indexServiceSafe(shardRouting.getIndexName()); IndexService indexService = indicesService.indexServiceSafe(shardRouting.index());
IndexShard indexShard = indexService.getShard(shardRouting.id()); IndexShard indexShard = indexService.getShard(shardRouting.id());
return new ShardSegments(indexShard.routingEntry(), indexShard.segments(request.verbose())); return new ShardSegments(indexShard.routingEntry(), indexShard.segments(request.verbose()));
} }

View File

@ -22,18 +22,19 @@ package org.elasticsearch.action.admin.indices.settings.get;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsFilter;
import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.index.Index;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
@ -61,7 +62,7 @@ public class TransportGetSettingsAction extends TransportMasterNodeReadAction<Ge
@Override @Override
protected ClusterBlockException checkBlock(GetSettingsRequest request, ClusterState state) { protected ClusterBlockException checkBlock(GetSettingsRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndices(state, request)); return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndexNames(state, request));
} }
@ -72,9 +73,9 @@ public class TransportGetSettingsAction extends TransportMasterNodeReadAction<Ge
@Override @Override
protected void masterOperation(GetSettingsRequest request, ClusterState state, ActionListener<GetSettingsResponse> listener) { protected void masterOperation(GetSettingsRequest request, ClusterState state, ActionListener<GetSettingsResponse> listener) {
String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request);
ImmutableOpenMap.Builder<String, Settings> indexToSettingsBuilder = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder<String, Settings> indexToSettingsBuilder = ImmutableOpenMap.builder();
for (String concreteIndex : concreteIndices) { for (Index concreteIndex : concreteIndices) {
IndexMetaData indexMetaData = state.getMetaData().index(concreteIndex); IndexMetaData indexMetaData = state.getMetaData().index(concreteIndex);
if (indexMetaData == null) { if (indexMetaData == null) {
continue; continue;
@ -93,7 +94,7 @@ public class TransportGetSettingsAction extends TransportMasterNodeReadAction<Ge
} }
settings = settingsBuilder.build(); settings = settingsBuilder.build();
} }
indexToSettingsBuilder.put(concreteIndex, settings); indexToSettingsBuilder.put(concreteIndex.getName(), settings);
} }
listener.onResponse(new GetSettingsResponse(indexToSettingsBuilder.build())); listener.onResponse(new GetSettingsResponse(indexToSettingsBuilder.build()));
} }

View File

@ -22,7 +22,6 @@ package org.elasticsearch.action.admin.indices.settings.put;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
@ -30,8 +29,10 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaDataUpdateSettingsService; import org.elasticsearch.cluster.metadata.MetaDataUpdateSettingsService;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
@ -65,7 +66,7 @@ public class TransportUpdateSettingsAction extends TransportMasterNodeAction<Upd
if (request.settings().getAsMap().size() == 1 && IndexMetaData.INDEX_BLOCKS_METADATA_SETTING.exists(request.settings()) || IndexMetaData.INDEX_READ_ONLY_SETTING.exists(request.settings())) { if (request.settings().getAsMap().size() == 1 && IndexMetaData.INDEX_BLOCKS_METADATA_SETTING.exists(request.settings()) || IndexMetaData.INDEX_READ_ONLY_SETTING.exists(request.settings())) {
return null; return null;
} }
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, indexNameExpressionResolver.concreteIndices(state, request)); return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, indexNameExpressionResolver.concreteIndexNames(state, request));
} }
@Override @Override
@ -75,7 +76,7 @@ public class TransportUpdateSettingsAction extends TransportMasterNodeAction<Upd
@Override @Override
protected void masterOperation(final UpdateSettingsRequest request, final ClusterState state, final ActionListener<UpdateSettingsResponse> listener) { protected void masterOperation(final UpdateSettingsRequest request, final ClusterState state, final ActionListener<UpdateSettingsResponse> listener) {
final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); final Index[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request);
UpdateSettingsClusterStateUpdateRequest clusterStateUpdateRequest = new UpdateSettingsClusterStateUpdateRequest() UpdateSettingsClusterStateUpdateRequest clusterStateUpdateRequest = new UpdateSettingsClusterStateUpdateRequest()
.indices(concreteIndices) .indices(concreteIndices)
.settings(request.settings()) .settings(request.settings())

View File

@ -23,7 +23,6 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
@ -38,6 +37,7 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenIntMap;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
@ -87,7 +87,7 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc
protected void masterOperation(IndicesShardStoresRequest request, ClusterState state, ActionListener<IndicesShardStoresResponse> listener) { protected void masterOperation(IndicesShardStoresRequest request, ClusterState state, ActionListener<IndicesShardStoresResponse> listener) {
final RoutingTable routingTables = state.routingTable(); final RoutingTable routingTables = state.routingTable();
final RoutingNodes routingNodes = state.getRoutingNodes(); final RoutingNodes routingNodes = state.getRoutingNodes();
final String[] concreteIndices = indexNameExpressionResolver.concreteIndices(state, request); final String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(state, request);
final Set<ShardId> shardIdsToFetch = new HashSet<>(); final Set<ShardId> shardIdsToFetch = new HashSet<>();
logger.trace("using cluster state version [{}] to determine shards", state.version()); logger.trace("using cluster state version [{}] to determine shards", state.version());
@ -115,7 +115,7 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc
@Override @Override
protected ClusterBlockException checkBlock(IndicesShardStoresRequest request, ClusterState state) { protected ClusterBlockException checkBlock(IndicesShardStoresRequest request, ClusterState state) {
return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndices(state, request)); return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_READ, indexNameExpressionResolver.concreteIndexNames(state, request));
} }
private class AsyncShardStoresInfoFetches { private class AsyncShardStoresInfoFetches {

View File

@ -32,9 +32,10 @@ import org.elasticsearch.index.engine.SegmentsStats;
import org.elasticsearch.index.fielddata.FieldDataStats; import org.elasticsearch.index.fielddata.FieldDataStats;
import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.flush.FlushStats;
import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.get.GetStats;
import org.elasticsearch.index.percolator.PercolatorQueryCache;
import org.elasticsearch.index.shard.IndexingStats; import org.elasticsearch.index.shard.IndexingStats;
import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.merge.MergeStats;
import org.elasticsearch.index.percolator.PercolateStats; import org.elasticsearch.index.percolator.PercolatorQueryCacheStats;
import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.recovery.RecoveryStats;
import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.refresh.RefreshStats;
import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.search.stats.SearchStats;
@ -101,8 +102,8 @@ public class CommonStats implements Streamable, ToXContent {
case Segments: case Segments:
segments = new SegmentsStats(); segments = new SegmentsStats();
break; break;
case Percolate: case PercolatorCache:
percolate = new PercolateStats(); percolatorCache = new PercolatorQueryCacheStats();
break; break;
case Translog: case Translog:
translog = new TranslogStats(); translog = new TranslogStats();
@ -123,7 +124,8 @@ public class CommonStats implements Streamable, ToXContent {
} }
public CommonStats(IndicesQueryCache indicesQueryCache, IndexShard indexShard, CommonStatsFlags flags) { public CommonStats(IndicesQueryCache indicesQueryCache, PercolatorQueryCache percolatorQueryCache,
IndexShard indexShard, CommonStatsFlags flags) {
CommonStatsFlags.Flag[] setFlags = flags.getFlags(); CommonStatsFlags.Flag[] setFlags = flags.getFlags();
@ -168,8 +170,8 @@ public class CommonStats implements Streamable, ToXContent {
case Segments: case Segments:
segments = indexShard.segmentStats(flags.includeSegmentFileSizes()); segments = indexShard.segmentStats(flags.includeSegmentFileSizes());
break; break;
case Percolate: case PercolatorCache:
percolate = indexShard.percolateStats(); percolatorCache = percolatorQueryCache.getStats(indexShard.shardId());
break; break;
case Translog: case Translog:
translog = indexShard.translogStats(); translog = indexShard.translogStats();
@ -223,7 +225,7 @@ public class CommonStats implements Streamable, ToXContent {
public FieldDataStats fieldData; public FieldDataStats fieldData;
@Nullable @Nullable
public PercolateStats percolate; public PercolatorQueryCacheStats percolatorCache;
@Nullable @Nullable
public CompletionStats completion; public CompletionStats completion;
@ -333,13 +335,13 @@ public class CommonStats implements Streamable, ToXContent {
} else { } else {
fieldData.add(stats.getFieldData()); fieldData.add(stats.getFieldData());
} }
if (percolate == null) { if (percolatorCache == null) {
if (stats.getPercolate() != null) { if (stats.getPercolatorCache() != null) {
percolate = new PercolateStats(); percolatorCache = new PercolatorQueryCacheStats();
percolate.add(stats.getPercolate()); percolatorCache.add(stats.getPercolatorCache());
} }
} else { } else {
percolate.add(stats.getPercolate()); percolatorCache.add(stats.getPercolatorCache());
} }
if (completion == null) { if (completion == null) {
if (stats.getCompletion() != null) { if (stats.getCompletion() != null) {
@ -447,8 +449,8 @@ public class CommonStats implements Streamable, ToXContent {
} }
@Nullable @Nullable
public PercolateStats getPercolate() { public PercolatorQueryCacheStats getPercolatorCache() {
return percolate; return percolatorCache;
} }
@Nullable @Nullable
@ -489,7 +491,7 @@ public class CommonStats implements Streamable, ToXContent {
/** /**
* Utility method which computes total memory by adding * Utility method which computes total memory by adding
* FieldData, Percolate, Segments (memory, index writer, version map) * FieldData, PercolatorCache, Segments (memory, index writer, version map)
*/ */
public ByteSizeValue getTotalMemory() { public ByteSizeValue getTotalMemory() {
long size = 0; long size = 0;
@ -499,9 +501,6 @@ public class CommonStats implements Streamable, ToXContent {
if (this.getQueryCache() != null) { if (this.getQueryCache() != null) {
size += this.getQueryCache().getMemorySizeInBytes(); size += this.getQueryCache().getMemorySizeInBytes();
} }
if (this.getPercolate() != null) {
size += this.getPercolate().getMemorySizeInBytes();
}
if (this.getSegments() != null) { if (this.getSegments() != null) {
size += this.getSegments().getMemoryInBytes() + size += this.getSegments().getMemoryInBytes() +
this.getSegments().getIndexWriterMemoryInBytes() + this.getSegments().getIndexWriterMemoryInBytes() +
@ -547,7 +546,7 @@ public class CommonStats implements Streamable, ToXContent {
fieldData = FieldDataStats.readFieldDataStats(in); fieldData = FieldDataStats.readFieldDataStats(in);
} }
if (in.readBoolean()) { if (in.readBoolean()) {
percolate = PercolateStats.readPercolateStats(in); percolatorCache = PercolatorQueryCacheStats.readPercolateStats(in);
} }
if (in.readBoolean()) { if (in.readBoolean()) {
completion = CompletionStats.readCompletionStats(in); completion = CompletionStats.readCompletionStats(in);
@ -629,11 +628,11 @@ public class CommonStats implements Streamable, ToXContent {
out.writeBoolean(true); out.writeBoolean(true);
fieldData.writeTo(out); fieldData.writeTo(out);
} }
if (percolate == null) { if (percolatorCache == null) {
out.writeBoolean(false); out.writeBoolean(false);
} else { } else {
out.writeBoolean(true); out.writeBoolean(true);
percolate.writeTo(out); percolatorCache.writeTo(out);
} }
if (completion == null) { if (completion == null) {
out.writeBoolean(false); out.writeBoolean(false);
@ -689,8 +688,8 @@ public class CommonStats implements Streamable, ToXContent {
if (fieldData != null) { if (fieldData != null) {
fieldData.toXContent(builder, params); fieldData.toXContent(builder, params);
} }
if (percolate != null) { if (percolatorCache != null) {
percolate.toXContent(builder, params); percolatorCache.toXContent(builder, params);
} }
if (completion != null) { if (completion != null) {
completion.toXContent(builder, params); completion.toXContent(builder, params);

View File

@ -240,7 +240,7 @@ public class CommonStatsFlags implements Streamable, Cloneable {
FieldData("fielddata"), FieldData("fielddata"),
Docs("docs"), Docs("docs"),
Warmer("warmer"), Warmer("warmer"),
Percolate("percolate"), PercolatorCache("percolator_cache"),
Completion("completion"), Completion("completion"),
Segments("segments"), Segments("segments"),
Translog("translog"), Translog("translog"),

View File

@ -185,12 +185,12 @@ public class IndicesStatsRequest extends BroadcastRequest<IndicesStatsRequest> {
} }
public IndicesStatsRequest percolate(boolean percolate) { public IndicesStatsRequest percolate(boolean percolate) {
flags.set(Flag.Percolate, percolate); flags.set(Flag.PercolatorCache, percolate);
return this; return this;
} }
public boolean percolate() { public boolean percolate() {
return flags.isSet(Flag.Percolate); return flags.isSet(Flag.PercolatorCache);
} }
public IndicesStatsRequest segments(boolean segments) { public IndicesStatsRequest segments(boolean segments) {

View File

@ -22,13 +22,13 @@ package org.elasticsearch.action.admin.indices.stats;
import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.routing.ShardsIterator;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -140,7 +140,7 @@ public class TransportIndicesStatsAction extends TransportBroadcastByNodeAction<
flags.fieldDataFields(request.fieldDataFields()); flags.fieldDataFields(request.fieldDataFields());
} }
if (request.percolate()) { if (request.percolate()) {
flags.set(CommonStatsFlags.Flag.Percolate); flags.set(CommonStatsFlags.Flag.PercolatorCache);
} }
if (request.segments()) { if (request.segments()) {
flags.set(CommonStatsFlags.Flag.Segments); flags.set(CommonStatsFlags.Flag.Segments);
@ -163,6 +163,6 @@ public class TransportIndicesStatsAction extends TransportBroadcastByNodeAction<
flags.set(CommonStatsFlags.Flag.Recovery); flags.set(CommonStatsFlags.Flag.Recovery);
} }
return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexShard, flags), indexShard.commitStats()); return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexService.cache().getPercolatorQueryCache(), indexShard, flags), indexShard.commitStats());
} }
} }

View File

@ -21,12 +21,12 @@ package org.elasticsearch.action.admin.indices.template.delete;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService; import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;

View File

@ -22,12 +22,12 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;

View File

@ -21,13 +21,13 @@ package org.elasticsearch.action.admin.indices.template.put;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService; import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;

View File

@ -23,13 +23,13 @@ import org.elasticsearch.Version;
import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.routing.ShardsIterator;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;

View File

@ -25,7 +25,6 @@ import org.elasticsearch.action.PrimaryMissingActionException;
import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
@ -35,6 +34,7 @@ import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.routing.ShardsIterator;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;

View File

@ -22,13 +22,13 @@ package org.elasticsearch.action.admin.indices.upgrade.post;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaDataUpdateSettingsService; import org.elasticsearch.cluster.metadata.MetaDataUpdateSettingsService;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;

View File

@ -29,13 +29,13 @@ import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.action.support.broadcast.TransportBroadcastAction;
import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cache.recycler.PageCacheRecycler;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;

Some files were not shown because too many files have changed in this diff Show More