Merge branch 'master' into pr/attachments-add-test-forced-values

This commit is contained in:
David Pilato 2016-04-29 14:55:42 +02:00
commit 2636703afa
1695 changed files with 45291 additions and 29518 deletions

13
.github/PULL_REQUEST_TEMPLATE.md vendored Normal file
View File

@ -0,0 +1,13 @@
<!--
Thank you for your interest in and contributing to Elasticsearch! There
are a few simple things to check before submitting your pull request
that can help with the review process. You should delete these items
from your submission, but they are here to help bring them to your
attention.
-->
- Have you signed the [contributor license agreement](https://www.elastic.co/contributor-agreement)?
- Have you followed the [contributor guidelines](https://github.com/elastic/elasticsearch/blob/master/.github/CONTRIBUTING.md)?
- If submitting code, have you built your formula locally prior to submission with `gradle check`?
- If submitting code, is your pull request against master? Unless there is a good reason otherwise, we prefer pull requests against master and will backport as needed.
- If submitting code, have you checked that your submission is for an [OS that we support](https://www.elastic.co/support/matrix#show_os)?

View File

@ -95,7 +95,7 @@ cd elasticsearch/
gradle assemble
```
You will find the newly built packages under: `./distribution/build/distributions/`.
You will find the newly built packages under: `./distribution/(deb|rpm|tar|zip)/build/distributions/`.
Before submitting your changes, run the test suite to make sure that nothing is broken, with:

View File

@ -1,5 +1,5 @@
Elasticsearch
Copyright 2009-2015 Elasticsearch
Copyright 2009-2016 Elasticsearch
This product includes software developed by The Apache Software
Foundation (http://www.apache.org/).

View File

@ -9,7 +9,7 @@ Elasticsearch is a distributed RESTful search engine built for the cloud. Featur
* Distributed and Highly Available Search Engine.
** Each index is fully sharded with a configurable number of shards.
** Each shard can have one or more replicas.
** Read / Search operations performed on either one of the replica shard.
** Read / Search operations performed on any of the replica shards.
* Multi Tenant with Multi Types.
** Support for more than one index.
** Support for more than one type per index.
@ -222,7 +222,7 @@ h1. License
<pre>
This software is licensed under the Apache License, version 2 ("ALv2"), quoted below.
Copyright 2009-2015 Elasticsearch <https://www.elastic.co>
Copyright 2009-2016 Elasticsearch <https://www.elastic.co>
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of

View File

@ -18,7 +18,10 @@
*/
import com.bmuschko.gradle.nexus.NexusPlugin
import org.eclipse.jgit.lib.Repository
import org.eclipse.jgit.lib.RepositoryBuilder
import org.gradle.plugins.ide.eclipse.model.SourceFolder
import org.apache.tools.ant.taskdefs.condition.Os
// common maven publishing configuration
subprojects {
@ -50,21 +53,37 @@ subprojects {
javadoc = true
tests = false
}
nexus {
String buildSnapshot = System.getProperty('build.snapshot', 'true')
if (buildSnapshot == 'false') {
Repository repo = new RepositoryBuilder().findGitDir(new File('.')).build()
String shortHash = repo.resolve('HEAD')?.name?.substring(0,7)
repositoryUrl = project.hasProperty('build.repository') ? project.property('build.repository') : "file://${System.getenv('HOME')}/elasticsearch-releases/${version}-${shortHash}/"
}
}
// we have our own username/password prompts so that they only happen once
// TODO: add gpg signing prompts
// TODO: add gpg signing prompts, which is tricky, as the buildDeb/buildRpm tasks are executed before this code block
project.gradle.taskGraph.whenReady { taskGraph ->
if (taskGraph.allTasks.any { it.name == 'uploadArchives' }) {
Console console = System.console()
if (project.hasProperty('nexusUsername') == false) {
String nexusUsername = console.readLine('\nNexus username: ')
// no need for username/password on local deploy
if (project.nexus.repositoryUrl.startsWith('file://')) {
project.rootProject.allprojects.each {
it.ext.nexusUsername = nexusUsername
it.ext.nexusUsername = 'foo'
it.ext.nexusPassword = 'bar'
}
}
if (project.hasProperty('nexusPassword') == false) {
String nexusPassword = new String(console.readPassword('\nNexus password: '))
project.rootProject.allprojects.each {
it.ext.nexusPassword = nexusPassword
} else {
if (project.hasProperty('nexusUsername') == false) {
String nexusUsername = console.readLine('\nNexus username: ')
project.rootProject.allprojects.each {
it.ext.nexusUsername = nexusUsername
}
}
if (project.hasProperty('nexusPassword') == false) {
String nexusPassword = new String(console.readPassword('\nNexus password: '))
project.rootProject.allprojects.each {
it.ext.nexusPassword = nexusPassword
}
}
}
}
@ -181,6 +200,10 @@ allprojects {
outputDir = file('build-idea/classes/main')
testOutputDir = file('build-idea/classes/test')
// also ignore other possible build dirs
excludeDirs += file('build')
excludeDirs += file('build-eclipse')
iml {
// fix so that Gradle idea plugin properly generates support for resource folders
// see also https://issues.gradle.org/browse/GRADLE-2975
@ -227,6 +250,9 @@ allprojects {
// Name all the non-root projects after their path so that paths get grouped together when imported into eclipse.
if (path != ':') {
eclipse.project.name = path
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
eclipse.project.name = eclipse.project.name.replace(':', '_')
}
}
plugins.withType(JavaBasePlugin) {

View File

@ -80,6 +80,7 @@ dependencies {
File tempPropertiesFile = new File(project.buildDir, "version.properties")
task writeVersionProperties {
inputs.properties(props)
outputs.file(tempPropertiesFile)
doLast {
OutputStream stream = Files.newOutputStream(tempPropertiesFile.toPath());
try {

View File

@ -100,9 +100,12 @@ class BuildPlugin implements Plugin<Project> {
println " OS Info : ${System.getProperty('os.name')} ${System.getProperty('os.version')} (${System.getProperty('os.arch')})"
if (gradleJavaVersionDetails != javaVersionDetails) {
println " JDK Version (gradle) : ${gradleJavaVersionDetails}"
println " JAVA_HOME (gradle) : ${gradleJavaHome}"
println " JDK Version (compile) : ${javaVersionDetails}"
println " JAVA_HOME (compile) : ${javaHome}"
} else {
println " JDK Version : ${gradleJavaVersionDetails}"
println " JAVA_HOME : ${gradleJavaHome}"
}
// enforce gradle version
@ -352,6 +355,11 @@ class BuildPlugin implements Plugin<Project> {
}
options.encoding = 'UTF-8'
//options.incremental = true
// gradle ignores target/source compatibility when it is "unnecessary", but since to compile with
// java 9, gradle is running in java 8, it incorrectly thinks it is unnecessary
assert minimumJava == JavaVersion.VERSION_1_8
options.compilerArgs << '-target' << '1.8' << '-source' << '1.8'
}
}
}

View File

@ -57,11 +57,13 @@ class PluginPropertiesTask extends Copy {
// configure property substitution
from(templateFile)
into(generatedResourcesDir)
expand(generateSubstitutions())
Map<String, String> properties = generateSubstitutions()
expand(properties)
inputs.properties(properties)
}
}
Map generateSubstitutions() {
Map<String, String> generateSubstitutions() {
def stringSnap = { version ->
if (version.endsWith("-SNAPSHOT")) {
return version.substring(0, version.length() - 9)

View File

@ -21,7 +21,6 @@ package org.elasticsearch.gradle.precommit
import org.elasticsearch.gradle.LoggedExec
import org.gradle.api.file.FileCollection
import org.gradle.api.tasks.InputFile
import org.gradle.api.tasks.OutputFile
/**
@ -35,14 +34,12 @@ public class JarHellTask extends LoggedExec {
* inputs (ie the jars/class files).
*/
@OutputFile
public File successMarker = new File(project.buildDir, 'markers/jarHell')
/** The classpath to run jarhell check on, defaults to the test runtime classpath */
@InputFile
public FileCollection classpath = project.sourceSets.test.runtimeClasspath
File successMarker = new File(project.buildDir, 'markers/jarHell')
public JarHellTask() {
project.afterEvaluate {
FileCollection classpath = project.sourceSets.test.runtimeClasspath
inputs.files(classpath)
dependsOn(classpath)
description = "Runs CheckJarHell on ${classpath}"
executable = new File(project.javaHome, 'bin/java')

View File

@ -22,6 +22,8 @@ import org.apache.rat.anttasks.Report
import org.apache.rat.anttasks.SubstringLicenseMatcher
import org.apache.rat.license.SimpleLicenseFamily
import org.elasticsearch.gradle.AntTask
import org.gradle.api.file.FileCollection
import org.gradle.api.tasks.OutputFile
import org.gradle.api.tasks.SourceSet
import java.nio.file.Files
@ -33,8 +35,22 @@ import java.nio.file.Files
*/
public class LicenseHeadersTask extends AntTask {
@OutputFile
File reportFile = new File(project.buildDir, 'reports/licenseHeaders/rat.log')
/**
* The list of java files to check. protected so the afterEvaluate closure in the
* constructor can write to it.
*/
protected List<FileCollection> javaFiles
LicenseHeadersTask() {
description = "Checks sources for missing, incorrect, or unacceptable license headers"
// Delay resolving the dependencies until after evaluation so we pick up generated sources
project.afterEvaluate {
javaFiles = project.sourceSets.collect({it.allJava})
inputs.files(javaFiles)
}
}
@Override
@ -43,17 +59,13 @@ public class LicenseHeadersTask extends AntTask {
ant.project.addDataTypeDefinition('substringMatcher', SubstringLicenseMatcher)
ant.project.addDataTypeDefinition('approvedLicense', SimpleLicenseFamily)
// create a file for the log to go to under reports/
File reportDir = new File(project.buildDir, "reports/licenseHeaders")
reportDir.mkdirs()
File reportFile = new File(reportDir, "rat.log")
Files.deleteIfExists(reportFile.toPath())
// run rat, going to the file
List<FileCollection> input = javaFiles
ant.ratReport(reportFile: reportFile.absolutePath, addDefaultLicenseMatchers: true) {
// checks all the java sources (allJava)
for (SourceSet set : project.sourceSets) {
for (File dir : set.allJava.srcDirs) {
for (FileCollection dirSet : input) {
for (File dir: dirSet.srcDirs) {
// sometimes these dirs don't exist, e.g. site-plugin has no actual java src/main...
if (dir.exists()) {
ant.fileset(dir: dir)
@ -85,12 +97,12 @@ public class LicenseHeadersTask extends AntTask {
// parsers generated by antlr
pattern(substring: "ANTLR GENERATED CODE")
}
// approved categories
approvedLicense(familyName: "Apache")
approvedLicense(familyName: "Generated")
approvedLicense(familyName: "Generated")
}
// check the license file for any errors, this should be fast.
boolean zeroUnknownLicenses = false
boolean foundProblemsWithFiles = false
@ -98,12 +110,12 @@ public class LicenseHeadersTask extends AntTask {
if (line.startsWith("0 Unknown Licenses")) {
zeroUnknownLicenses = true
}
if (line.startsWith(" !")) {
foundProblemsWithFiles = true
}
}
if (zeroUnknownLicenses == false || foundProblemsWithFiles) {
// print the unapproved license section, usually its all you need to fix problems.
int sectionNumber = 0

View File

@ -87,16 +87,43 @@ class PrecommitTasks {
}
private static Task configureCheckstyle(Project project) {
// Always copy the checkstyle configuration files to 'buildDir/checkstyle' since the resources could be located in a jar
// file. If the resources are located in a jar, Gradle will fail when it tries to turn the URL into a file
URL checkstyleConfUrl = PrecommitTasks.getResource("/checkstyle.xml")
URL checkstyleSuppressionsUrl = PrecommitTasks.getResource("/checkstyle_suppressions.xml")
File checkstyleDir = new File(project.buildDir, "checkstyle")
File checkstyleSuppressions = new File(checkstyleDir, "checkstyle_suppressions.xml")
File checkstyleConf = new File(checkstyleDir, "checkstyle.xml");
Task copyCheckstyleConf = project.tasks.create("copyCheckstyleConf")
// configure inputs and outputs so up to date works properly
copyCheckstyleConf.outputs.files(checkstyleSuppressions, checkstyleConf)
if ("jar".equals(checkstyleConfUrl.getProtocol())) {
JarURLConnection jarURLConnection = (JarURLConnection) checkstyleConfUrl.openConnection()
copyCheckstyleConf.inputs.file(jarURLConnection.getJarFileURL())
} else if ("file".equals(checkstyleConfUrl.getProtocol())) {
copyCheckstyleConf.inputs.files(checkstyleConfUrl.getFile(), checkstyleSuppressionsUrl.getFile())
}
copyCheckstyleConf.doLast {
checkstyleDir.mkdirs()
// withStream will close the output stream and IOGroovyMethods#getBytes reads the InputStream fully and closes it
new FileOutputStream(checkstyleConf).withStream {
it.write(checkstyleConfUrl.openStream().getBytes())
}
new FileOutputStream(checkstyleSuppressions).withStream {
it.write(checkstyleSuppressionsUrl.openStream().getBytes())
}
}
Task checkstyleTask = project.tasks.create('checkstyle')
// Apply the checkstyle plugin to create `checkstyleMain` and `checkstyleTest`. It only
// creates them if there is main or test code to check and it makes `check` depend
// on them. But we want `precommit` to depend on `checkstyle` which depends on them so
// we have to swap them.
project.pluginManager.apply('checkstyle')
URL checkstyleSuppressions = PrecommitTasks.getResource('/checkstyle_suppressions.xml')
project.checkstyle {
config = project.resources.text.fromFile(
PrecommitTasks.getResource('/checkstyle.xml'), 'UTF-8')
config = project.resources.text.fromFile(checkstyleConf, 'UTF-8')
configProperties = [
suppressions: checkstyleSuppressions
]
@ -106,6 +133,7 @@ class PrecommitTasks {
if (task != null) {
project.tasks['check'].dependsOn.remove(task)
checkstyleTask.dependsOn(task)
task.dependsOn(copyCheckstyleConf)
task.inputs.file(checkstyleSuppressions)
}
}

View File

@ -27,6 +27,9 @@ import org.apache.tools.ant.Project;
import org.elasticsearch.gradle.AntTask;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.file.FileCollection;
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.InputFiles
import org.gradle.api.tasks.OutputFile
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
@ -40,17 +43,62 @@ import java.util.regex.Pattern;
* Basic static checking to keep tabs on third party JARs
*/
public class ThirdPartyAuditTask extends AntTask {
// patterns for classes to exclude, because we understand their issues
private String[] excludes = new String[0];
private List<String> excludes = [];
/**
* Input for the task. Set javadoc for {#link getJars} for more. Protected
* so the afterEvaluate closure in the constructor can write it.
*/
protected FileCollection jars;
/**
* Classpath against which to run the third patty audit. Protected so the
* afterEvaluate closure in the constructor can write it.
*/
protected FileCollection classpath;
/**
* We use a simple "marker" file that we touch when the task succeeds
* as the task output. This is compared against the modified time of the
* inputs (ie the jars/class files).
*/
@OutputFile
File successMarker = new File(project.buildDir, 'markers/thirdPartyAudit')
ThirdPartyAuditTask() {
// we depend on this because its the only reliable configuration
// this probably makes the build slower: gradle you suck here when it comes to configurations, you pay the price.
dependsOn(project.configurations.testCompile);
description = "Checks third party JAR bytecode for missing classes, use of internal APIs, and other horrors'";
project.afterEvaluate {
Configuration configuration = project.configurations.findByName('runtime');
if (configuration == null) {
// some projects apparently do not have 'runtime'? what a nice inconsistency,
// basically only serves to waste time in build logic!
configuration = project.configurations.findByName('testCompile');
}
assert configuration != null;
classpath = configuration
// we only want third party dependencies.
jars = configuration.fileCollection({ dependency ->
dependency.group.startsWith("org.elasticsearch") == false
});
// we don't want provided dependencies, which we have already scanned. e.g. don't
// scan ES core's dependencies for every single plugin
Configuration provided = project.configurations.findByName('provided')
if (provided != null) {
jars -= provided
}
inputs.files(jars)
onlyIf { jars.isEmpty() == false }
}
}
/**
* classes that should be excluded from the scan,
* e.g. because we know what sheisty stuff those particular classes are up to.
@ -61,21 +109,22 @@ public class ThirdPartyAuditTask extends AntTask {
throw new IllegalArgumentException("illegal third party audit exclusion: '" + s + "', wildcards are not permitted!");
}
}
excludes = classes;
excludes = classes.sort();
}
/**
* Returns current list of exclusions.
*/
public String[] getExcludes() {
@Input
public List<String> getExcludes() {
return excludes;
}
// yes, we parse Uwe Schindler's errors to find missing classes, and to keep a continuous audit. Just don't let him know!
static final Pattern MISSING_CLASS_PATTERN =
Pattern.compile(/WARNING: The referenced class '(.*)' cannot be loaded\. Please fix the classpath\!/);
static final Pattern VIOLATION_PATTERN =
static final Pattern VIOLATION_PATTERN =
Pattern.compile(/\s\sin ([a-zA-Z0-9\$\.]+) \(.*\)/);
// we log everything and capture errors and handle them with our whitelist
@ -124,32 +173,8 @@ public class ThirdPartyAuditTask extends AntTask {
@Override
protected void runAnt(AntBuilder ant) {
Configuration configuration = project.configurations.findByName('runtime');
if (configuration == null) {
// some projects apparently do not have 'runtime'? what a nice inconsistency,
// basically only serves to waste time in build logic!
configuration = project.configurations.findByName('testCompile');
}
assert configuration != null;
ant.project.addTaskDefinition('thirdPartyAudit', de.thetaphi.forbiddenapis.ant.AntTask);
// we only want third party dependencies.
FileCollection jars = configuration.fileCollection({ dependency ->
dependency.group.startsWith("org.elasticsearch") == false
});
// we don't want provided dependencies, which we have already scanned. e.g. don't
// scan ES core's dependencies for every single plugin
Configuration provided = project.configurations.findByName('provided');
if (provided != null) {
jars -= provided;
}
// no dependencies matched, we are done
if (jars.isEmpty()) {
return;
}
// print which jars we are going to scan, always
// this is not the time to try to be succinct! Forbidden will print plenty on its own!
Set<String> names = new TreeSet<>();
@ -171,26 +196,23 @@ public class ThirdPartyAuditTask extends AntTask {
}
// convert exclusion class names to binary file names
String[] excludedFiles = new String[excludes.length];
for (int i = 0; i < excludes.length; i++) {
excludedFiles[i] = excludes[i].replace('.', '/') + ".class";
}
Set<String> excludedSet = new TreeSet<>(Arrays.asList(excludedFiles));
List<String> excludedFiles = excludes.collect {it.replace('.', '/') + ".class"}
Set<String> excludedSet = new TreeSet<>(excludedFiles);
// jarHellReprise
Set<String> sheistySet = getSheistyClasses(tmpDir.toPath());
try {
try {
ant.thirdPartyAudit(internalRuntimeForbidden: false,
failOnUnsupportedJava: false,
failOnMissingClasses: false,
signaturesFile: new File(getClass().getResource('/forbidden/third-party-audit.txt').toURI()),
classpath: configuration.asPath) {
classpath: classpath.asPath) {
fileset(dir: tmpDir)
}
} catch (BuildException ignore) {}
EvilLogger evilLogger = null;
EvilLogger evilLogger = null;
for (BuildListener listener : ant.project.getBuildListeners()) {
if (listener instanceof EvilLogger) {
evilLogger = (EvilLogger) listener;
@ -228,6 +250,8 @@ public class ThirdPartyAuditTask extends AntTask {
// clean up our mess (if we succeed)
ant.delete(dir: tmpDir.getAbsolutePath());
successMarker.setText("", 'UTF-8')
}
/**
@ -235,11 +259,11 @@ public class ThirdPartyAuditTask extends AntTask {
*/
private Set<String> getSheistyClasses(Path root) {
// system.parent = extensions loader.
// note: for jigsaw, this evilness will need modifications (e.g. use jrt filesystem!).
// note: for jigsaw, this evilness will need modifications (e.g. use jrt filesystem!).
// but groovy/gradle needs to work at all first!
ClassLoader ext = ClassLoader.getSystemClassLoader().getParent();
assert ext != null;
Set<String> sheistySet = new TreeSet<>();
Files.walkFileTree(root, new SimpleFileVisitor<Path>() {
@Override

View File

@ -451,7 +451,7 @@ class ClusterFormationTasks {
// gradle task options are not processed until the end of the configuration phase
if (node.config.debug) {
println 'Running elasticsearch in debug mode, suspending until connected on port 8000'
node.env['JAVA_OPTS'] = '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000'
node.env['ES_JAVA_OPTS'] = '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000'
}
node.getCommandString().eachLine { line -> logger.info(line) }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.gradle.test
import org.apache.tools.ant.taskdefs.condition.Os
import org.elasticsearch.gradle.VersionProperties
import org.gradle.api.InvalidUserDataException
import org.gradle.api.Project
import org.gradle.api.Task
@ -129,18 +128,18 @@ class NodeInfo {
args.add("${esScript}")
}
env = [
'JAVA_HOME' : project.javaHome,
'ES_GC_OPTS': config.jvmArgs // we pass these with the undocumented gc opts so the argline can set gc, etc
]
env = [ 'JAVA_HOME' : project.javaHome ]
args.addAll("-E", "es.node.portsfile=true")
env.put('ES_JAVA_OPTS', config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" "))
String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ")
String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs
env.put('ES_JAVA_OPTS', esJavaOpts)
for (Map.Entry<String, String> property : System.properties.entrySet()) {
if (property.getKey().startsWith('es.')) {
args.add("-E")
args.add("${property.getKey()}=${property.getValue()}")
}
}
env.put('ES_JVM_OPTIONS', new File(confDir, 'jvm.options'))
args.addAll("-E", "es.path.conf=${confDir}")
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
args.add('"') // end the entire command, quoted

View File

@ -14,10 +14,8 @@
files start to pass. -->
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]queries[/\\]BlendedTermQuery.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]queries[/\\]ExtendedCommonTermsQuery.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]queryparser[/\\]classic[/\\]MapperQueryParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]search[/\\]postingshighlight[/\\]CustomPostingsHighlighter.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]search[/\\]vectorhighlight[/\\]CustomFieldQuery.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]Version.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]Action.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ActionModule.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ActionRequestBuilder.java" checks="LineLength" />
@ -31,8 +29,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]node[/\\]info[/\\]TransportNodesInfoAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]node[/\\]stats[/\\]NodesStatsRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]node[/\\]stats[/\\]TransportNodesStatsAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]node[/\\]tasks[/\\]list[/\\]ListTasksResponse.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]node[/\\]tasks[/\\]list[/\\]TransportListTasksAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]delete[/\\]DeleteRepositoryRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]delete[/\\]TransportDeleteRepositoryAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]get[/\\]GetRepositoriesRequestBuilder.java" checks="LineLength" />
@ -157,11 +153,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]delete[/\\]DeleteRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]delete[/\\]TransportDeleteAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]explain[/\\]TransportExplainAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]fieldstats[/\\]FieldStats.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]fieldstats[/\\]FieldStatsRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]fieldstats[/\\]FieldStatsRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]fieldstats[/\\]FieldStatsResponse.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]fieldstats[/\\]TransportFieldStatsTransportAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]GetRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]MultiGetRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]TransportGetAction.java" checks="LineLength" />
@ -234,14 +225,11 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]replication[/\\]ReplicationRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]replication[/\\]ReplicationRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]replication[/\\]TransportBroadcastReplicationAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]replication[/\\]TransportReplicationAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]instance[/\\]InstanceShardOperationRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]instance[/\\]TransportInstanceSingleOperationAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]shard[/\\]SingleShardOperationRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]shard[/\\]SingleShardRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]shard[/\\]TransportSingleShardAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]tasks[/\\]TasksRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]tasks[/\\]TransportTasksAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]MultiTermVectorsRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]MultiTermVectorsRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]TermVectorsRequest.java" checks="LineLength" />
@ -343,7 +331,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]io[/\\]Channels.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]joda[/\\]Joda.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]Lucene.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]all[/\\]AllTermQuery.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]index[/\\]ElasticsearchDirectoryReader.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]index[/\\]FilterableTermsEnum.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]index[/\\]FreqTermsEnum.java" checks="LineLength" />
@ -357,12 +344,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]network[/\\]NetworkService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]recycler[/\\]Recyclers.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]rounding[/\\]Rounding.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]settings[/\\]AbstractScopedSettings.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]settings[/\\]ClusterSettings.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]settings[/\\]IndexScopedSettings.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]settings[/\\]Setting.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]settings[/\\]Settings.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]settings[/\\]loader[/\\]XContentSettingsLoader.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]unit[/\\]ByteSizeValue.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]unit[/\\]TimeValue.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]BigArrays.java" checks="LineLength" />
@ -406,11 +387,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]PrimaryShardAllocator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]ReplicaShardAllocator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]TransportNodesListGatewayMetaState.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]TransportNodesListGatewayStartedShards.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]HttpTransportSettings.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]netty[/\\]HttpRequestHandler.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]netty[/\\]NettyHttpChannel.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]netty[/\\]NettyHttpServerTransport.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]AlreadyExpiredException.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]CompositeIndexEventListener.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexSettings.java" checks="LineLength" />
@ -474,18 +450,17 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ParseContext.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ParsedDocument.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]CompletionFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]DateFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]DoubleFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]FloatFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]NumberFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]LegacyDateFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]LegacyDoubleFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]LegacyFloatFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]LegacyNumberFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]StringFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]TokenCountFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]LegacyTokenCountFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]TypeParsers.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]geo[/\\]BaseGeoPointFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]geo[/\\]GeoPointFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]geo[/\\]GeoPointFieldMapperLegacy.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]geo[/\\]GeoShapeFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]internal[/\\]AllFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]internal[/\\]FieldNamesFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]internal[/\\]IdFieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]internal[/\\]IndexFieldMapper.java" checks="LineLength" />
@ -510,7 +485,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryBuilders.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryShardContext.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryValidationException.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]SimpleQueryParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]support[/\\]InnerHitsQueryParserHelper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]support[/\\]QueryParsers.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]MatchQuery.java" checks="LineLength" />
@ -627,7 +601,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestPendingClusterTasksAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestShardsAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestThreadPoolAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]fieldstats[/\\]RestFieldStatsAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]get[/\\]RestMultiGetAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]index[/\\]RestIndexAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]main[/\\]RestMainAction.java" checks="LineLength" />
@ -733,7 +706,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]percentiles[/\\]tdigest[/\\]TDigestPercentilesAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]scripted[/\\]InternalScriptedMetric.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]scripted[/\\]ScriptedMetricAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]scripted[/\\]ScriptedMetricParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]stats[/\\]StatsAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]stats[/\\]extended[/\\]ExtendedStatsAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]stats[/\\]extended[/\\]ExtendedStatsParser.java" checks="LineLength" />
@ -750,7 +722,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]AggregationContext.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]AggregationPath.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]GeoPointParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]ValueType.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]ValuesSourceParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]format[/\\]ValueFormat.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]format[/\\]ValueParser.java" checks="LineLength" />
@ -787,11 +758,9 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]FieldLookup.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]LeafDocLookup.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]LeafFieldsLookup.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]profile[/\\]ProfileResult.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]query[/\\]QueryPhase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]rescore[/\\]QueryRescorer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]rescore[/\\]RescoreParseElement.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]searchafter[/\\]SearchAfterBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]sort[/\\]GeoDistanceSortParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]sort[/\\]ScriptSortParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]sort[/\\]SortParseElement.java" checks="LineLength" />
@ -814,12 +783,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotShardsService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotsService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]threadpool[/\\]ThreadPool.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]PlainTransportFuture.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]RequestHandlerRegistry.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]Transport.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]TransportChannelResponseHandler.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]TransportService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]netty[/\\]NettyTransport.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]queries[/\\]BlendedTermQueryTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]search[/\\]postingshighlight[/\\]CustomPostingsHighlighterTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]ESExceptionTests.java" checks="LineLength" />
@ -829,8 +792,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]RejectionActionIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]HotThreadsIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]health[/\\]ClusterHealthResponsesTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]node[/\\]tasks[/\\]TasksIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]node[/\\]tasks[/\\]TransportTasksActionTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]RepositoryBlocksIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]settings[/\\]SettingsUpdaterTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]snapshots[/\\]SnapshotBlocksIT.java" checks="LineLength" />
@ -848,7 +809,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BulkProcessorIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]BulkRequestTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]RetryTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]fieldstats[/\\]FieldStatsRequestTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]MultiGetShardRequestTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]BulkRequestModifierTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]IngestProxyActionFilterTests.java" checks="LineLength" />
@ -866,7 +826,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]broadcast[/\\]node[/\\]TransportBroadcastByNodeActionTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]TransportMasterNodeActionTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]replication[/\\]BroadcastReplicationTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]replication[/\\]TransportReplicationActionTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]instance[/\\]TransportInstanceSingleOperationActionTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]AbstractTermVectorsTestCase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]GetTermVectorsCheckDocFreqIT.java" checks="LineLength" />
@ -882,9 +841,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bwcompat[/\\]RecoveryWithUnsupportedIndicesIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bwcompat[/\\]RestoreBackwardsCompatIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]AbstractClientHeadersTestCase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]FailAndRetryMockTransport.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]TransportClientIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]transport[/\\]TransportClientRetryIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterHealthIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterInfoServiceIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterModuleTests.java" checks="LineLength" />
@ -981,9 +937,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]uid[/\\]VersionsTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]network[/\\]CidrsTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]rounding[/\\]TimeZoneRoundingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]settings[/\\]ScopedSettingsTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]settings[/\\]SettingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]transport[/\\]BoundTransportAddressTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]unit[/\\]DistanceUnitTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]unit[/\\]FuzzinessTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]BigArraysTests.java" checks="LineLength" />
@ -1005,7 +958,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]ZenFaultDetectionTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]ZenUnicastDiscoveryIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]zen[/\\]NodeJoinControllerTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]zen[/\\]ZenDiscoveryIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]zen[/\\]ZenDiscoveryUnitTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]zen[/\\]ping[/\\]unicast[/\\]UnicastZenPingIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]discovery[/\\]zen[/\\]publish[/\\]PublishClusterStateActionTests.java" checks="LineLength" />
@ -1013,8 +965,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]env[/\\]EnvironmentTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]env[/\\]NodeEnvironmentTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]explain[/\\]ExplainActionIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]fieldstats[/\\]FieldStatsIntegrationIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]fieldstats[/\\]FieldStatsTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayModuleTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayServiceTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayTests.java" checks="LineLength" />
@ -1030,8 +980,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]ReusePeerRecoverySharedTest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]get[/\\]GetActionIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]netty[/\\]NettyHttpServerPipeliningTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]netty[/\\]NettyPipeliningDisabledIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]netty[/\\]NettyPipeliningEnabledIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexModuleTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexServiceTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexWithShadowReplicasIT.java" checks="LineLength" />
@ -1068,7 +1016,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DynamicMappingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]FieldTypeTestCase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]MapperServiceTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]all[/\\]SimpleAllMapperTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]binary[/\\]BinaryMappingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]boost[/\\]CustomBoostMappingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]boost[/\\]FieldLevelBoostTests.java" checks="LineLength" />
@ -1078,8 +1025,8 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]BooleanFieldMapperTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]CompletionFieldTypeTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]MultiFieldCopyToMapperTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]TokenCountFieldMapperTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]date[/\\]SimpleDateMappingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]core[/\\]LegacyTokenCountFieldMapperTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]date[/\\]LegacyDateMappingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]dynamictemplate[/\\]genericstore[/\\]GenericStoreDynamicTemplateTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]dynamictemplate[/\\]pathmatch[/\\]PathMatchDynamicTemplateTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]dynamictemplate[/\\]simple[/\\]SimpleDynamicTemplatesTests.java" checks="LineLength" />
@ -1095,12 +1042,12 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]index[/\\]IndexTypeMapperTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]internal[/\\]FieldNamesFieldMapperTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]internal[/\\]TypeFieldMapperTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ip[/\\]SimpleIpMappingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]ip[/\\]LegacyIpMappingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]merge[/\\]TestMergeMapperTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]multifield[/\\]MultiFieldTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]multifield[/\\]merge[/\\]JavaMultiFieldMergeTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]nested[/\\]NestedMappingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]numeric[/\\]SimpleNumericTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]numeric[/\\]LegacyNumericTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]object[/\\]NullValueObjectMappingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]object[/\\]SimpleObjectMappingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]parent[/\\]ParentMappingTests.java" checks="LineLength" />
@ -1126,7 +1073,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]HasParentQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]MoreLikeThisQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]MultiMatchQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryStringQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]RandomQueryBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]RangeQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]ScoreModeTests.java" checks="LineLength" />
@ -1160,7 +1106,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndicesLifecycleListenerIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndicesLifecycleListenerSingleNodeTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndicesOptionsIntegrationIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndicesServiceTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]analysis[/\\]PreBuiltAnalyzerIntegrationIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]analyze[/\\]AnalyzeActionIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]analyze[/\\]HunspellServiceIT.java" checks="LineLength" />
@ -1290,7 +1235,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]scroll[/\\]DuelScrollIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]scroll[/\\]SearchScrollIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]scroll[/\\]SearchScrollWithFailingNodesIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]searchafter[/\\]SearchAfterBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]searchafter[/\\]SearchAfterIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]simple[/\\]SimpleSearchIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]sort[/\\]SortParserTests.java" checks="LineLength" />
@ -1316,15 +1260,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]threadpool[/\\]ThreadPoolSerializationTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]threadpool[/\\]UpdateThreadPoolSettingsTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]timestamp[/\\]SimpleTimestampIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]AbstractSimpleTransportTestCase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]ActionNamesIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]ContextAndHeaderTransportIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]NettySizeHeaderFrameDecoderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]local[/\\]SimpleLocalTransportTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]netty[/\\]NettyScheduledPingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]netty[/\\]NettyTransportIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]netty[/\\]NettyTransportMultiPortIntegrationIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]netty[/\\]NettyTransportMultiPortTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]tribe[/\\]TribeIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]ttl[/\\]SimpleTTLIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]update[/\\]UpdateIT.java" checks="LineLength" />
@ -1491,7 +1426,6 @@
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]engine[/\\]MockInternalEngine.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]hamcrest[/\\]ElasticsearchAssertions.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]junit[/\\]listeners[/\\]LoggingListener.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]junit[/\\]rule[/\\]RepeatOnExceptionRule.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]ESRestTestCase.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]RestTestExecutionContext.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]client[/\\]RestClient.java" checks="LineLength" />
@ -1514,24 +1448,16 @@
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]support[/\\]FileUtils.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]store[/\\]MockFSDirectoryService.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]store[/\\]MockFSIndexStore.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]transport[/\\]AssertingLocalTransport.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]transport[/\\]CapturingTransport.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]transport[/\\]MockTransportService.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]test[/\\]FileUtilsTests.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]test[/\\]JsonPathTests.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]rest[/\\]test[/\\]RestTestParserTests.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]test[/\\]InternalTestClusterTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]node[/\\]tasks[/\\]list[/\\]TaskInfo.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]cli[/\\]CliTool.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]settings[/\\]SettingsModule.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]admin[/\\]indices[/\\]settings[/\\]RestGetSettingsAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]tribe[/\\]TribeService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]transport[/\\]TransportModuleTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]sort[/\\]GeoDistanceSortBuilderIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]CorsNotSetIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]settings[/\\]SettingsModuleTests.java" checks="LineLength" />
<suppress files="plugins[/\\]store-smb[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]SmbDirectoryWrapper.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]tasks[/\\]MockTaskManager.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]inject[/\\]ModuleTestCase.java" checks="LineLength" />
</suppressions>

View File

@ -28,3 +28,8 @@ java.security.MessageDigest#clone() @ use org.elasticsearch.common.hash.MessageD
@defaultMessage this should not have been added to lucene in the first place
org.apache.lucene.index.IndexReader#getCombinedCoreAndDeletesKey()
@defaultMessage Soon to be removed
org.apache.lucene.document.FieldType#numericType()
org.apache.lucene.document.InetAddressPoint#newPrefixQuery(java.lang.String, java.net.InetAddress, int) @LUCENE-7232

View File

@ -1,5 +1,5 @@
elasticsearch = 5.0.0-alpha1
lucene = 6.0.0-snapshot-f0aa4fc
elasticsearch = 5.0.0-alpha2
lucene = 6.0.0
# optional dependencies
spatial4j = 0.6

View File

@ -219,7 +219,7 @@ h1. License
<pre>
This software is licensed under the Apache License, version 2 ("ALv2"), quoted below.
Copyright 2009-2015 Elasticsearch <https://www.elastic.co>
Copyright 2009-2016 Elasticsearch <https://www.elastic.co>
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of

View File

@ -0,0 +1,117 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.document;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Arrays;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.common.SuppressForbidden;
/**
* Forked utility methods from Lucene's InetAddressPoint until LUCENE-7232 and
* LUCENE-7234 are released.
*/
// TODO: remove me when we upgrade to Lucene 6.1
@SuppressForbidden(reason="uses InetAddress.getHostAddress")
public final class XInetAddressPoint {
private XInetAddressPoint() {}
/** The minimum value that an ip address can hold. */
public static final InetAddress MIN_VALUE;
/** The maximum value that an ip address can hold. */
public static final InetAddress MAX_VALUE;
static {
MIN_VALUE = InetAddressPoint.decode(new byte[InetAddressPoint.BYTES]);
byte[] maxValueBytes = new byte[InetAddressPoint.BYTES];
Arrays.fill(maxValueBytes, (byte) 0xFF);
MAX_VALUE = InetAddressPoint.decode(maxValueBytes);
}
/**
* Return the {@link InetAddress} that compares immediately greater than
* {@code address}.
* @throws ArithmeticException if the provided address is the
* {@link #MAX_VALUE maximum ip address}
*/
public static InetAddress nextUp(InetAddress address) {
if (address.equals(MAX_VALUE)) {
throw new ArithmeticException("Overflow: there is no greater InetAddress than "
+ address.getHostAddress());
}
byte[] delta = new byte[InetAddressPoint.BYTES];
delta[InetAddressPoint.BYTES-1] = 1;
byte[] nextUpBytes = new byte[InetAddressPoint.BYTES];
NumericUtils.add(InetAddressPoint.BYTES, 0, InetAddressPoint.encode(address), delta, nextUpBytes);
return InetAddressPoint.decode(nextUpBytes);
}
/**
* Return the {@link InetAddress} that compares immediately less than
* {@code address}.
* @throws ArithmeticException if the provided address is the
* {@link #MIN_VALUE minimum ip address}
*/
public static InetAddress nextDown(InetAddress address) {
if (address.equals(MIN_VALUE)) {
throw new ArithmeticException("Underflow: there is no smaller InetAddress than "
+ address.getHostAddress());
}
byte[] delta = new byte[InetAddressPoint.BYTES];
delta[InetAddressPoint.BYTES-1] = 1;
byte[] nextDownBytes = new byte[InetAddressPoint.BYTES];
NumericUtils.subtract(InetAddressPoint.BYTES, 0, InetAddressPoint.encode(address), delta, nextDownBytes);
return InetAddressPoint.decode(nextDownBytes);
}
/**
* Create a prefix query for matching a CIDR network range.
*
* @param field field name. must not be {@code null}.
* @param value any host address
* @param prefixLength the network prefix length for this address. This is also known as the subnet mask in the context of IPv4
* addresses.
* @throws IllegalArgumentException if {@code field} is null, or prefixLength is invalid.
* @return a query matching documents with addresses contained within this network
*/
// TODO: remove me when we upgrade to Lucene 6.0.1
public static Query newPrefixQuery(String field, InetAddress value, int prefixLength) {
if (value == null) {
throw new IllegalArgumentException("InetAddress must not be null");
}
if (prefixLength < 0 || prefixLength > 8 * value.getAddress().length) {
throw new IllegalArgumentException("illegal prefixLength '" + prefixLength
+ "'. Must be 0-32 for IPv4 ranges, 0-128 for IPv6 ranges");
}
// create the lower value by zeroing out the host portion, upper value by filling it with all ones.
byte lower[] = value.getAddress();
byte upper[] = value.getAddress();
for (int i = prefixLength; i < 8 * lower.length; i++) {
int m = 1 << (7 - (i & 7));
lower[i >> 3] &= ~m;
upper[i >> 3] |= m;
}
try {
return InetAddressPoint.newRangeQuery(field, InetAddress.getByAddress(lower), InetAddress.getByAddress(upper));
} catch (UnknownHostException e) {
throw new AssertionError(e); // values are coming from InetAddress
}
}
}

View File

@ -0,0 +1,130 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lucene.index;
import org.apache.lucene.util.StringHelper;
import java.io.IOException;
/**
* Forked utility methods from Lucene's PointValues until LUCENE-7257 is released.
*/
public class XPointValues {
/** Return the cumulated number of points across all leaves of the given
* {@link IndexReader}. Leaves that do not have points for the given field
* are ignored.
* @see PointValues#size(String) */
public static long size(IndexReader reader, String field) throws IOException {
long size = 0;
for (LeafReaderContext ctx : reader.leaves()) {
FieldInfo info = ctx.reader().getFieldInfos().fieldInfo(field);
if (info == null || info.getPointDimensionCount() == 0) {
continue;
}
PointValues values = ctx.reader().getPointValues();
size += values.size(field);
}
return size;
}
/** Return the cumulated number of docs that have points across all leaves
* of the given {@link IndexReader}. Leaves that do not have points for the
* given field are ignored.
* @see PointValues#getDocCount(String) */
public static int getDocCount(IndexReader reader, String field) throws IOException {
int count = 0;
for (LeafReaderContext ctx : reader.leaves()) {
FieldInfo info = ctx.reader().getFieldInfos().fieldInfo(field);
if (info == null || info.getPointDimensionCount() == 0) {
continue;
}
PointValues values = ctx.reader().getPointValues();
count += values.getDocCount(field);
}
return count;
}
/** Return the minimum packed values across all leaves of the given
* {@link IndexReader}. Leaves that do not have points for the given field
* are ignored.
* @see PointValues#getMinPackedValue(String) */
public static byte[] getMinPackedValue(IndexReader reader, String field) throws IOException {
byte[] minValue = null;
for (LeafReaderContext ctx : reader.leaves()) {
FieldInfo info = ctx.reader().getFieldInfos().fieldInfo(field);
if (info == null || info.getPointDimensionCount() == 0) {
continue;
}
PointValues values = ctx.reader().getPointValues();
byte[] leafMinValue = values.getMinPackedValue(field);
if (leafMinValue == null) {
continue;
}
if (minValue == null) {
minValue = leafMinValue.clone();
} else {
final int numDimensions = values.getNumDimensions(field);
final int numBytesPerDimension = values.getBytesPerDimension(field);
for (int i = 0; i < numDimensions; ++i) {
int offset = i * numBytesPerDimension;
if (StringHelper.compare(numBytesPerDimension, leafMinValue, offset, minValue, offset) < 0) {
System.arraycopy(leafMinValue, offset, minValue, offset, numBytesPerDimension);
}
}
}
}
return minValue;
}
/** Return the maximum packed values across all leaves of the given
* {@link IndexReader}. Leaves that do not have points for the given field
* are ignored.
* @see PointValues#getMaxPackedValue(String) */
public static byte[] getMaxPackedValue(IndexReader reader, String field) throws IOException {
byte[] maxValue = null;
for (LeafReaderContext ctx : reader.leaves()) {
FieldInfo info = ctx.reader().getFieldInfos().fieldInfo(field);
if (info == null || info.getPointDimensionCount() == 0) {
continue;
}
PointValues values = ctx.reader().getPointValues();
byte[] leafMaxValue = values.getMaxPackedValue(field);
if (leafMaxValue == null) {
continue;
}
if (maxValue == null) {
maxValue = leafMaxValue.clone();
} else {
final int numDimensions = values.getNumDimensions(field);
final int numBytesPerDimension = values.getBytesPerDimension(field);
for (int i = 0; i < numDimensions; ++i) {
int offset = i * numBytesPerDimension;
if (StringHelper.compare(numBytesPerDimension, leafMaxValue, offset, maxValue, offset) > 0) {
System.arraycopy(leafMaxValue, offset, maxValue, offset, numBytesPerDimension);
}
}
}
}
return maxValue;
}
/** Default constructor */
private XPointValues() {
}
}

View File

@ -22,6 +22,7 @@ package org.apache.lucene.queryparser.classic;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
@ -32,6 +33,7 @@ import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.MultiPhraseQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SynonymQuery;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.common.lucene.search.Queries;
@ -39,6 +41,7 @@ import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.core.LegacyDateFieldMapper;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.support.QueryParsers;
@ -105,7 +108,8 @@ public class MapperQueryParser extends QueryParser {
}
/**
* We override this one so we can get the fuzzy part to be treated as string, so people can do: "age:10~5" or "timestamp:2012-10-10~5d"
* We override this one so we can get the fuzzy part to be treated as string,
* so people can do: "age:10~5" or "timestamp:2012-10-10~5d"
*/
@Override
Query handleBareFuzzy(String qfield, Token fuzzySlop, String termImage) throws ParseException {
@ -164,8 +168,7 @@ public class MapperQueryParser extends QueryParser {
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses);
}
} else {
@ -215,7 +218,8 @@ public class MapperQueryParser extends QueryParser {
}
if (currentFieldType != null) {
Query query = null;
if (currentFieldType.useTermQueryWithQueryString()) {
if (currentFieldType.tokenized() == false) {
// this might be a structured field like a numeric
try {
query = currentFieldType.termQuery(queryText, context);
} catch (RuntimeException e) {
@ -266,8 +270,7 @@ public class MapperQueryParser extends QueryParser {
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses);
}
} else {
@ -276,7 +279,8 @@ public class MapperQueryParser extends QueryParser {
}
@Override
protected Query getRangeQuery(String field, String part1, String part2, boolean startInclusive, boolean endInclusive) throws ParseException {
protected Query getRangeQuery(String field, String part1, String part2,
boolean startInclusive, boolean endInclusive) throws ParseException {
if ("*".equals(part1)) {
part1 = null;
}
@ -317,23 +321,26 @@ public class MapperQueryParser extends QueryParser {
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses);
}
}
private Query getRangeQuerySingle(String field, String part1, String part2, boolean startInclusive, boolean endInclusive) {
private Query getRangeQuerySingle(String field, String part1, String part2,
boolean startInclusive, boolean endInclusive) {
currentFieldType = context.fieldMapper(field);
if (currentFieldType != null) {
if (lowercaseExpandedTerms && !currentFieldType.isNumeric()) {
if (lowercaseExpandedTerms && currentFieldType.tokenized()) {
part1 = part1 == null ? null : part1.toLowerCase(locale);
part2 = part2 == null ? null : part2.toLowerCase(locale);
}
try {
Query rangeQuery;
if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) {
if (currentFieldType instanceof LegacyDateFieldMapper.DateFieldType && settings.timeZone() != null) {
LegacyDateFieldMapper.DateFieldType dateFieldType = (LegacyDateFieldMapper.DateFieldType) this.currentFieldType;
rangeQuery = dateFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null);
} else if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) {
DateFieldMapper.DateFieldType dateFieldType = (DateFieldMapper.DateFieldType) this.currentFieldType;
rangeQuery = dateFieldType.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null);
} else {
@ -392,7 +399,8 @@ public class MapperQueryParser extends QueryParser {
currentFieldType = context.fieldMapper(field);
if (currentFieldType != null) {
try {
return currentFieldType.fuzzyQuery(termStr, Fuzziness.build(minSimilarity), fuzzyPrefixLength, settings.fuzzyMaxExpansions(), FuzzyQuery.defaultTranspositions);
return currentFieldType.fuzzyQuery(termStr, Fuzziness.build(minSimilarity),
fuzzyPrefixLength, settings.fuzzyMaxExpansions(), FuzzyQuery.defaultTranspositions);
} catch (RuntimeException e) {
if (settings.lenient()) {
return null;
@ -407,7 +415,8 @@ public class MapperQueryParser extends QueryParser {
protected Query newFuzzyQuery(Term term, float minimumSimilarity, int prefixLength) {
String text = term.text();
int numEdits = FuzzyQuery.floatToEdits(minimumSimilarity, text.codePointCount(0, text.length()));
FuzzyQuery query = new FuzzyQuery(term, numEdits, prefixLength, settings.fuzzyMaxExpansions(), FuzzyQuery.defaultTranspositions);
FuzzyQuery query = new FuzzyQuery(term, numEdits, prefixLength,
settings.fuzzyMaxExpansions(), FuzzyQuery.defaultTranspositions);
QueryParsers.setRewriteMethod(query, settings.fuzzyRewriteMethod());
return query;
}
@ -444,8 +453,7 @@ public class MapperQueryParser extends QueryParser {
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses);
}
} else {
@ -463,7 +471,7 @@ public class MapperQueryParser extends QueryParser {
setAnalyzer(context.getSearchAnalyzer(currentFieldType));
}
Query query = null;
if (currentFieldType.useTermQueryWithQueryString()) {
if (currentFieldType.tokenized() == false) {
query = currentFieldType.prefixQuery(termStr, multiTermRewriteMethod, context);
}
if (query == null) {
@ -486,7 +494,7 @@ public class MapperQueryParser extends QueryParser {
if (!settings.analyzeWildcard()) {
return super.getPrefixQuery(field, termStr);
}
List<String> tlist;
List<List<String> > tlist;
// get Analyzer from superclass and tokenize the term
TokenStream source = null;
try {
@ -497,7 +505,9 @@ public class MapperQueryParser extends QueryParser {
return super.getPrefixQuery(field, termStr);
}
tlist = new ArrayList<>();
List<String> currentPos = new ArrayList<>();
CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class);
PositionIncrementAttribute posAtt = source.addAttribute(PositionIncrementAttribute.class);
while (true) {
try {
@ -505,7 +515,14 @@ public class MapperQueryParser extends QueryParser {
} catch (IOException e) {
break;
}
tlist.add(termAtt.toString());
if (currentPos.isEmpty() == false && posAtt.getPositionIncrement() > 0) {
tlist.add(currentPos);
currentPos = new ArrayList<>();
}
currentPos.add(termAtt.toString());
}
if (currentPos.isEmpty() == false) {
tlist.add(currentPos);
}
} finally {
if (source != null) {
@ -513,16 +530,45 @@ public class MapperQueryParser extends QueryParser {
}
}
if (tlist.size() == 1) {
return super.getPrefixQuery(field, tlist.get(0));
} else {
// build a boolean query with prefix on each one...
List<BooleanClause> clauses = new ArrayList<>();
for (String token : tlist) {
clauses.add(new BooleanClause(super.getPrefixQuery(field, token), BooleanClause.Occur.SHOULD));
}
return getBooleanQueryCoordDisabled(clauses);
if (tlist.size() == 0) {
return null;
}
if (tlist.size() == 1 && tlist.get(0).size() == 1) {
return super.getPrefixQuery(field, tlist.get(0).get(0));
}
// build a boolean query with prefix on the last position only.
List<BooleanClause> clauses = new ArrayList<>();
for (int pos = 0; pos < tlist.size(); pos++) {
List<String> plist = tlist.get(pos);
boolean isLastPos = (pos == tlist.size() - 1);
Query posQuery;
if (plist.size() == 1) {
if (isLastPos) {
posQuery = super.getPrefixQuery(field, plist.get(0));
} else {
posQuery = newTermQuery(new Term(field, plist.get(0)));
}
} else if (isLastPos == false) {
// build a synonym query for terms in the same position.
Term[] terms = new Term[plist.size()];
for (int i = 0; i < plist.size(); i++) {
terms[i] = new Term(field, plist.get(i));
}
posQuery = new SynonymQuery(terms);
} else {
List<BooleanClause> innerClauses = new ArrayList<>();
for (String token : plist) {
innerClauses.add(new BooleanClause(super.getPrefixQuery(field, token),
BooleanClause.Occur.SHOULD));
}
posQuery = getBooleanQueryCoordDisabled(innerClauses);
}
clauses.add(new BooleanClause(posQuery,
getDefaultOperator() == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD));
}
return getBooleanQuery(clauses);
}
@Override
@ -574,8 +620,7 @@ public class MapperQueryParser extends QueryParser {
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses);
}
} else {
@ -703,8 +748,7 @@ public class MapperQueryParser extends QueryParser {
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
}
}
if (clauses.size() == 0) // happens for stopwords
return null;
if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses);
}
} else {
@ -722,8 +766,9 @@ public class MapperQueryParser extends QueryParser {
setAnalyzer(context.getSearchAnalyzer(currentFieldType));
}
Query query = null;
if (currentFieldType.useTermQueryWithQueryString()) {
query = currentFieldType.regexpQuery(termStr, RegExp.ALL, maxDeterminizedStates, multiTermRewriteMethod, context);
if (currentFieldType.tokenized() == false) {
query = currentFieldType.regexpQuery(termStr, RegExp.ALL,
maxDeterminizedStates, multiTermRewriteMethod, context);
}
if (query == null) {
query = super.getRegexpQuery(field, termStr);
@ -740,7 +785,7 @@ public class MapperQueryParser extends QueryParser {
setAnalyzer(oldAnalyzer);
}
}
/**
* @deprecated review all use of this, don't rely on coord
*/

View File

@ -0,0 +1,267 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lucene.search.suggest.analyzing;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStreamToAutomaton;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IntsRef;
import org.apache.lucene.util.UnicodeUtil;
import org.apache.lucene.util.automaton.Automata;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.FiniteStringsIterator;
import org.apache.lucene.util.automaton.LevenshteinAutomata;
import org.apache.lucene.util.automaton.Operations;
import org.apache.lucene.util.automaton.UTF32ToUTF8;
import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.PairOutputs;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.apache.lucene.util.automaton.Operations.DEFAULT_MAX_DETERMINIZED_STATES;
/**
* Implements a fuzzy {@link AnalyzingSuggester}. The similarity measurement is
* based on the Damerau-Levenshtein (optimal string alignment) algorithm, though
* you can explicitly choose classic Levenshtein by passing <code>false</code>
* for the <code>transpositions</code> parameter.
* <p>
* At most, this query will match terms up to
* {@value org.apache.lucene.util.automaton.LevenshteinAutomata#MAXIMUM_SUPPORTED_DISTANCE}
* edits. Higher distances are not supported. Note that the
* fuzzy distance is measured in "byte space" on the bytes
* returned by the {@link org.apache.lucene.analysis.TokenStream}'s {@link
* org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute}, usually UTF8. By default
* the analyzed bytes must be at least 3 {@link
* #DEFAULT_MIN_FUZZY_LENGTH} bytes before any edits are
* considered. Furthermore, the first 1 {@link
* #DEFAULT_NON_FUZZY_PREFIX} byte is not allowed to be
* edited. We allow up to 1 (@link
* #DEFAULT_MAX_EDITS} edit.
* If {@link #unicodeAware} parameter in the constructor is set to true, maxEdits,
* minFuzzyLength, transpositions and nonFuzzyPrefix are measured in Unicode code
* points (actual letters) instead of bytes.*
*
* <p>
* NOTE: This suggester does not boost suggestions that
* required no edits over suggestions that did require
* edits. This is a known limitation.
*
* <p>
* Note: complex query analyzers can have a significant impact on the lookup
* performance. It's recommended to not use analyzers that drop or inject terms
* like synonyms to keep the complexity of the prefix intersection low for good
* lookup performance. At index time, complex analyzers can safely be used.
* </p>
*/
public final class XFuzzySuggester extends XAnalyzingSuggester {
private final int maxEdits;
private final boolean transpositions;
private final int nonFuzzyPrefix;
private final int minFuzzyLength;
private final boolean unicodeAware;
/**
* Measure maxEdits, minFuzzyLength, transpositions and nonFuzzyPrefix
* parameters in Unicode code points (actual letters)
* instead of bytes.
*/
public static final boolean DEFAULT_UNICODE_AWARE = false;
/**
* The default minimum length of the key passed to {@link
* #lookup} before any edits are allowed.
*/
public static final int DEFAULT_MIN_FUZZY_LENGTH = 3;
/**
* The default prefix length where edits are not allowed.
*/
public static final int DEFAULT_NON_FUZZY_PREFIX = 1;
/**
* The default maximum number of edits for fuzzy
* suggestions.
*/
public static final int DEFAULT_MAX_EDITS = 1;
/**
* The default transposition value passed to {@link org.apache.lucene.util.automaton.LevenshteinAutomata}
*/
public static final boolean DEFAULT_TRANSPOSITIONS = true;
/**
* Creates a {@link FuzzySuggester} instance initialized with default values.
*
* @param analyzer the analyzer used for this suggester
*/
public XFuzzySuggester(Analyzer analyzer) {
this(analyzer, analyzer);
}
/**
* Creates a {@link FuzzySuggester} instance with an index &amp; a query analyzer initialized with default values.
*
* @param indexAnalyzer
* Analyzer that will be used for analyzing suggestions while building the index.
* @param queryAnalyzer
* Analyzer that will be used for analyzing query text during lookup
*/
public XFuzzySuggester(Analyzer indexAnalyzer, Analyzer queryAnalyzer) {
this(indexAnalyzer, null, queryAnalyzer, EXACT_FIRST | PRESERVE_SEP, 256, -1,
DEFAULT_MAX_EDITS, DEFAULT_TRANSPOSITIONS,
DEFAULT_NON_FUZZY_PREFIX, DEFAULT_MIN_FUZZY_LENGTH, DEFAULT_UNICODE_AWARE,
null, false, 0, SEP_LABEL, PAYLOAD_SEP, END_BYTE, HOLE_CHARACTER);
}
/**
* Creates a {@link FuzzySuggester} instance.
*
* @param indexAnalyzer Analyzer that will be used for
* analyzing suggestions while building the index.
* @param queryAnalyzer Analyzer that will be used for
* analyzing query text during lookup
* @param options see {@link #EXACT_FIRST}, {@link #PRESERVE_SEP}
* @param maxSurfaceFormsPerAnalyzedForm Maximum number of
* surface forms to keep for a single analyzed form.
* When there are too many surface forms we discard the
* lowest weighted ones.
* @param maxGraphExpansions Maximum number of graph paths
* to expand from the analyzed form. Set this to -1 for
* no limit.
* @param maxEdits must be &gt;= 0 and &lt;= {@link org.apache.lucene.util.automaton.LevenshteinAutomata#MAXIMUM_SUPPORTED_DISTANCE} .
* @param transpositions <code>true</code> if transpositions should be treated as a primitive
* edit operation. If this is false, comparisons will implement the classic
* Levenshtein algorithm.
* @param nonFuzzyPrefix length of common (non-fuzzy) prefix (see default {@link #DEFAULT_NON_FUZZY_PREFIX}
* @param minFuzzyLength minimum length of lookup key before any edits are allowed (see default {@link #DEFAULT_MIN_FUZZY_LENGTH})
* @param sepLabel separation label
* @param payloadSep payload separator byte
* @param endByte end byte marker byte
*/
public XFuzzySuggester(Analyzer indexAnalyzer, Automaton queryPrefix, Analyzer queryAnalyzer,
int options, int maxSurfaceFormsPerAnalyzedForm, int maxGraphExpansions,
int maxEdits, boolean transpositions, int nonFuzzyPrefix, int minFuzzyLength,
boolean unicodeAware, FST<PairOutputs.Pair<Long, BytesRef>> fst, boolean hasPayloads,
int maxAnalyzedPathsForOneInput, int sepLabel, int payloadSep, int endByte, int holeCharacter) {
super(indexAnalyzer, queryPrefix, queryAnalyzer, options, maxSurfaceFormsPerAnalyzedForm, maxGraphExpansions,
true, fst, hasPayloads, maxAnalyzedPathsForOneInput, sepLabel, payloadSep, endByte, holeCharacter);
if (maxEdits < 0 || maxEdits > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE) {
throw new IllegalArgumentException(
"maxEdits must be between 0 and " + LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE);
}
if (nonFuzzyPrefix < 0) {
throw new IllegalArgumentException("nonFuzzyPrefix must not be >= 0 (got " + nonFuzzyPrefix + ")");
}
if (minFuzzyLength < 0) {
throw new IllegalArgumentException("minFuzzyLength must not be >= 0 (got " + minFuzzyLength + ")");
}
this.maxEdits = maxEdits;
this.transpositions = transpositions;
this.nonFuzzyPrefix = nonFuzzyPrefix;
this.minFuzzyLength = minFuzzyLength;
this.unicodeAware = unicodeAware;
}
@Override
protected List<FSTUtil.Path<PairOutputs.Pair<Long,BytesRef>>> getFullPrefixPaths(
List<FSTUtil.Path<PairOutputs.Pair<Long,BytesRef>>> prefixPaths, Automaton lookupAutomaton,
FST<PairOutputs.Pair<Long,BytesRef>> fst)
throws IOException {
// TODO: right now there's no penalty for fuzzy/edits,
// ie a completion whose prefix matched exactly what the
// user typed gets no boost over completions that
// required an edit, which get no boost over completions
// requiring two edits. I suspect a multiplicative
// factor is appropriate (eg, say a fuzzy match must be at
// least 2X better weight than the non-fuzzy match to
// "compete") ... in which case I think the wFST needs
// to be log weights or something ...
Automaton levA = convertAutomaton(toLevenshteinAutomata(lookupAutomaton));
/*
Writer w = new OutputStreamWriter(new FileOutputStream("out.dot"), "UTF-8");
w.write(levA.toDot());
w.close();
System.out.println("Wrote LevA to out.dot");
*/
return FSTUtil.intersectPrefixPaths(levA, fst);
}
@Override
protected Automaton convertAutomaton(Automaton a) {
if (unicodeAware) {
// FLORIAN EDIT: get converted Automaton from superclass
Automaton utf8automaton = new UTF32ToUTF8().convert(super.convertAutomaton(a));
// This automaton should not blow up during determinize:
utf8automaton = Operations.determinize(utf8automaton, Integer.MAX_VALUE);
return utf8automaton;
} else {
return super.convertAutomaton(a);
}
}
@Override
public TokenStreamToAutomaton getTokenStreamToAutomaton() {
final TokenStreamToAutomaton tsta = super.getTokenStreamToAutomaton();
tsta.setUnicodeArcs(unicodeAware);
return tsta;
}
Automaton toLevenshteinAutomata(Automaton automaton) {
List<Automaton> subs = new ArrayList<>();
FiniteStringsIterator finiteStrings = new FiniteStringsIterator(automaton);
for (IntsRef string; (string = finiteStrings.next()) != null;) {
if (string.length <= nonFuzzyPrefix || string.length < minFuzzyLength) {
subs.add(Automata.makeString(string.ints, string.offset, string.length));
} else {
int ints[] = new int[string.length-nonFuzzyPrefix];
System.arraycopy(string.ints, string.offset+nonFuzzyPrefix, ints, 0, ints.length);
// TODO: maybe add alphaMin to LevenshteinAutomata,
// and pass 1 instead of 0? We probably don't want
// to allow the trailing dedup bytes to be
// edited... but then 0 byte is "in general" allowed
// on input (but not in UTF8).
LevenshteinAutomata lev = new LevenshteinAutomata(
ints, unicodeAware ? Character.MAX_CODE_POINT : 255, transpositions);
subs.add(lev.toAutomaton(maxEdits, UnicodeUtil.newString(string.ints, string.offset, nonFuzzyPrefix)));
}
}
if (subs.isEmpty()) {
// automaton is empty, there is no accepted paths through it
return Automata.makeEmpty(); // matches nothing
} else if (subs.size() == 1) {
// no synonyms or anything: just a single path through the tokenstream
return subs.get(0);
} else {
// multiple paths: this is really scary! is it slow?
// maybe we should not do this and throw UOE?
Automaton a = Operations.union(subs);
// TODO: we could call toLevenshteinAutomata() before det?
// this only happens if you have multiple paths anyway (e.g. synonyms)
return Operations.determinize(a, DEFAULT_MAX_DETERMINIZED_STATES);
}
}
}

View File

@ -0,0 +1,124 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.spatial.geopoint.search;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.spatial.geopoint.document.GeoPointField.TermEncoding;
/** Implements a point distance range query on a GeoPoint field. This is based on
* {@code org.apache.lucene.spatial.geopoint.search.GeoPointDistanceQuery} and is implemented using a
* {@code org.apache.lucene.search.BooleanClause.MUST_NOT} clause to exclude any points that fall within
* minRadiusMeters from the provided point.
* <p>
* NOTE: this query does not correctly support multi-value docs (see: https://issues.apache.org/jira/browse/LUCENE-7126)
* <br>
* TODO: remove this per ISSUE #17658
**/
public final class XGeoPointDistanceRangeQuery extends GeoPointDistanceQuery {
/** minimum distance range (in meters) from lat, lon center location, maximum is inherited */
protected final double minRadiusMeters;
/**
* Constructs a query for all {@link org.apache.lucene.spatial.geopoint.document.GeoPointField} types within a minimum / maximum
* distance (in meters) range from a given point
*/
public XGeoPointDistanceRangeQuery(final String field, final double centerLat, final double centerLon,
final double minRadiusMeters, final double maxRadiusMeters) {
this(field, TermEncoding.PREFIX, centerLat, centerLon, minRadiusMeters, maxRadiusMeters);
}
/**
* Constructs a query for all {@link org.apache.lucene.spatial.geopoint.document.GeoPointField} types within a minimum / maximum
* distance (in meters) range from a given point. Accepts an optional
* {@link org.apache.lucene.spatial.geopoint.document.GeoPointField.TermEncoding}
*/
public XGeoPointDistanceRangeQuery(final String field, final TermEncoding termEncoding, final double centerLat, final double centerLon,
final double minRadiusMeters, final double maxRadius) {
super(field, termEncoding, centerLat, centerLon, maxRadius);
this.minRadiusMeters = minRadiusMeters;
}
@Override
public Query rewrite(IndexReader reader) {
Query q = super.rewrite(reader);
if (minRadiusMeters == 0.0) {
return q;
}
// add an exclusion query
BooleanQuery.Builder bqb = new BooleanQuery.Builder();
// create a new exclusion query
GeoPointDistanceQuery exclude = new GeoPointDistanceQuery(field, termEncoding, centerLat, centerLon, minRadiusMeters);
// full map search
// if (radiusMeters >= GeoProjectionUtils.SEMIMINOR_AXIS) {
// bqb.add(new BooleanClause(new GeoPointInBBoxQuery(this.field, -180.0, -90.0, 180.0, 90.0), BooleanClause.Occur.MUST));
// } else {
bqb.add(new BooleanClause(q, BooleanClause.Occur.MUST));
// }
bqb.add(new BooleanClause(exclude, BooleanClause.Occur.MUST_NOT));
return bqb.build();
}
@Override
public String toString(String field) {
final StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
sb.append(':');
if (!this.field.equals(field)) {
sb.append(" field=");
sb.append(this.field);
sb.append(':');
}
return sb.append( " Center: [")
.append(centerLat)
.append(',')
.append(centerLon)
.append(']')
.append(" From Distance: ")
.append(minRadiusMeters)
.append(" m")
.append(" To Distance: ")
.append(radiusMeters)
.append(" m")
.append(" Lower Left: [")
.append(minLat)
.append(',')
.append(minLon)
.append(']')
.append(" Upper Right: [")
.append(maxLat)
.append(',')
.append(maxLon)
.append("]")
.toString();
}
/** getter method for minimum distance */
public double getMinRadiusMeters() {
return this.minRadiusMeters;
}
/** getter method for maximum distance */
public double getMaxRadiusMeters() {
return this.radiusMeters;
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch;
import org.elasticsearch.action.support.replication.ReplicationOperation;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
@ -412,7 +413,8 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
if (simpleName.startsWith("Elasticsearch")) {
simpleName = simpleName.substring("Elasticsearch".length());
}
return Strings.toUnderscoreCase(simpleName);
// TODO: do we really need to make the exception name in underscore casing?
return toUnderscoreCase(simpleName);
}
@Override
@ -595,8 +597,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
org.elasticsearch.common.util.concurrent.EsRejectedExecutionException::new, 59),
EARLY_TERMINATION_EXCEPTION(org.elasticsearch.common.lucene.Lucene.EarlyTerminationException.class,
org.elasticsearch.common.lucene.Lucene.EarlyTerminationException::new, 60),
ROUTING_VALIDATION_EXCEPTION(org.elasticsearch.cluster.routing.RoutingValidationException.class,
org.elasticsearch.cluster.routing.RoutingValidationException::new, 61),
// 61 used to be for RoutingValidationException
NOT_SERIALIZABLE_EXCEPTION_WRAPPER(org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper.class,
org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper::new, 62),
ALIAS_FILTER_PARSING_EXCEPTION(org.elasticsearch.indices.AliasFilterParsingException.class,
@ -696,8 +697,8 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
org.elasticsearch.index.translog.TranslogException::new, 115),
PROCESS_CLUSTER_EVENT_TIMEOUT_EXCEPTION(org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException.class,
org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException::new, 116),
RETRY_ON_PRIMARY_EXCEPTION(org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnPrimaryException.class,
org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnPrimaryException::new, 117),
RETRY_ON_PRIMARY_EXCEPTION(ReplicationOperation.RetryOnPrimaryException.class,
ReplicationOperation.RetryOnPrimaryException::new, 117),
ELASTICSEARCH_TIMEOUT_EXCEPTION(org.elasticsearch.ElasticsearchTimeoutException.class,
org.elasticsearch.ElasticsearchTimeoutException::new, 118),
QUERY_PHASE_EXECUTION_EXCEPTION(org.elasticsearch.search.query.QueryPhaseExecutionException.class,
@ -845,4 +846,39 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
interface FunctionThatThrowsIOException<T, R> {
R apply(T t) throws IOException;
}
// lower cases and adds underscores to transitions in a name
private static String toUnderscoreCase(String value) {
StringBuilder sb = new StringBuilder();
boolean changed = false;
for (int i = 0; i < value.length(); i++) {
char c = value.charAt(i);
if (Character.isUpperCase(c)) {
if (!changed) {
// copy it over here
for (int j = 0; j < i; j++) {
sb.append(value.charAt(j));
}
changed = true;
if (i == 0) {
sb.append(Character.toLowerCase(c));
} else {
sb.append('_');
sb.append(Character.toLowerCase(c));
}
} else {
sb.append('_');
sb.append(Character.toLowerCase(c));
}
} else {
if (changed) {
sb.append(c);
}
}
}
if (!changed) {
return value;
}
return sb.toString();
}
}

View File

@ -34,12 +34,11 @@ import java.io.IOException;
*/
@SuppressWarnings("deprecation")
public class Version {
// The logic for ID is: XXYYZZAA, where XX is major version, YY is minor version, ZZ is revision, and AA is alpha/beta/rc indicator
// AA values below 25 are for alpha builder (since 5.0), and above 25 and below 50 are beta builds, and below 99 are RC builds, with 99 indicating a release
// the (internal) format of the id is there so we can easily do after/before checks on the id
/*
* The logic for ID is: XXYYZZAA, where XX is major version, YY is minor version, ZZ is revision, and AA is alpha/beta/rc indicator AA
* values below 25 are for alpha builder (since 5.0), and above 25 and below 50 are beta builds, and below 99 are RC builds, with 99
* indicating a release the (internal) format of the id is there so we can easily do after/before checks on the id
*/
public static final int V_2_0_0_beta1_ID = 2000001;
public static final Version V_2_0_0_beta1 = new Version(V_2_0_0_beta1_ID, org.apache.lucene.util.Version.LUCENE_5_2_1);
public static final int V_2_0_0_beta2_ID = 2000002;
@ -68,9 +67,13 @@ public class Version {
public static final Version V_2_3_0 = new Version(V_2_3_0_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
public static final int V_2_3_1_ID = 2030199;
public static final Version V_2_3_1 = new Version(V_2_3_1_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
public static final int V_2_3_2_ID = 2030299;
public static final Version V_2_3_2 = new Version(V_2_3_2_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
public static final int V_5_0_0_alpha1_ID = 5000001;
public static final Version V_5_0_0_alpha1 = new Version(V_5_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_6_0_0);
public static final Version CURRENT = V_5_0_0_alpha1;
public static final int V_5_0_0_alpha2_ID = 5000002;
public static final Version V_5_0_0_alpha2 = new Version(V_5_0_0_alpha2_ID, org.apache.lucene.util.Version.LUCENE_6_0_0);
public static final Version CURRENT = V_5_0_0_alpha2;
static {
assert CURRENT.luceneVersion.equals(org.apache.lucene.util.Version.LATEST) : "Version must be upgraded to ["
@ -83,8 +86,12 @@ public class Version {
public static Version fromId(int id) {
switch (id) {
case V_5_0_0_alpha2_ID:
return V_5_0_0_alpha2;
case V_5_0_0_alpha1_ID:
return V_5_0_0_alpha1;
case V_2_3_2_ID:
return V_2_3_2;
case V_2_3_1_ID:
return V_2_3_1;
case V_2_3_0_ID:
@ -121,12 +128,15 @@ public class Version {
/**
* Return the {@link Version} of Elasticsearch that has been used to create an index given its settings.
*
* @throws IllegalStateException if the given index settings doesn't contain a value for the key {@value IndexMetaData#SETTING_VERSION_CREATED}
* @throws IllegalStateException if the given index settings doesn't contain a value for the key
* {@value IndexMetaData#SETTING_VERSION_CREATED}
*/
public static Version indexCreated(Settings indexSettings) {
final Version indexVersion = indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, null);
if (indexVersion == null) {
throw new IllegalStateException("[" + IndexMetaData.SETTING_VERSION_CREATED + "] is not present in the index settings for index with uuid: [" + indexSettings.get(IndexMetaData.SETTING_INDEX_UUID) + "]");
throw new IllegalStateException(
"[" + IndexMetaData.SETTING_VERSION_CREATED + "] is not present in the index settings for index with uuid: ["
+ indexSettings.get(IndexMetaData.SETTING_INDEX_UUID) + "]");
}
return indexVersion;
}
@ -155,7 +165,8 @@ public class Version {
}
String[] parts = version.split("\\.|\\-");
if (parts.length < 3 || parts.length > 4) {
throw new IllegalArgumentException("the version needs to contain major, minor, and revision, and optionally the build: " + version);
throw new IllegalArgumentException(
"the version needs to contain major, minor, and revision, and optionally the build: " + version);
}
try {
@ -239,7 +250,8 @@ public class Version {
@SuppressForbidden(reason = "System.out.*")
public static void main(String[] args) {
System.out.println("Version: " + Version.CURRENT + ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date() + ", JVM: " + JvmInfo.jvmInfo().version());
System.out.println("Version: " + Version.CURRENT + ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date() + ", JVM: "
+ JvmInfo.jvmInfo().version());
}
@Override

View File

@ -24,16 +24,21 @@ import org.elasticsearch.transport.BaseTransportResponseHandler;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportResponse;
import java.util.Objects;
import java.util.function.Supplier;
/**
* A simple base class for action response listeners, defaulting to using the SAME executor (as its
* very common on response handlers).
*/
public abstract class ActionListenerResponseHandler<Response extends TransportResponse> extends BaseTransportResponseHandler<Response> {
public class ActionListenerResponseHandler<Response extends TransportResponse> extends BaseTransportResponseHandler<Response> {
private final ActionListener<Response> listener;
private final Supplier<Response> responseSupplier;
public ActionListenerResponseHandler(ActionListener<Response> listener) {
this.listener = listener;
public ActionListenerResponseHandler(ActionListener<Response> listener, Supplier<Response> responseSupplier) {
this.listener = Objects.requireNonNull(listener);
this.responseSupplier = Objects.requireNonNull(responseSupplier);
}
@Override
@ -46,6 +51,11 @@ public abstract class ActionListenerResponseHandler<Response extends TransportRe
listener.onFailure(e);
}
@Override
public Response newInstance() {
return responseSupplier.get();
}
@Override
public String executor() {
return ThreadPool.Names.SAME;

View File

@ -147,12 +147,12 @@ import org.elasticsearch.action.get.TransportMultiGetAction;
import org.elasticsearch.action.get.TransportShardMultiGetAction;
import org.elasticsearch.action.index.IndexAction;
import org.elasticsearch.action.index.TransportIndexAction;
import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptAction;
import org.elasticsearch.action.indexedscripts.delete.TransportDeleteIndexedScriptAction;
import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptAction;
import org.elasticsearch.action.indexedscripts.get.TransportGetIndexedScriptAction;
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptAction;
import org.elasticsearch.action.indexedscripts.put.TransportPutIndexedScriptAction;
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptAction;
import org.elasticsearch.action.admin.cluster.storedscripts.TransportDeleteStoredScriptAction;
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptAction;
import org.elasticsearch.action.admin.cluster.storedscripts.TransportGetStoredScriptAction;
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptAction;
import org.elasticsearch.action.admin.cluster.storedscripts.TransportPutStoredScriptAction;
import org.elasticsearch.action.ingest.IngestActionFilter;
import org.elasticsearch.action.ingest.IngestProxyActionFilter;
import org.elasticsearch.action.ingest.DeletePipelineAction;
@ -340,9 +340,9 @@ public class ActionModule extends AbstractModule {
registerAction(RenderSearchTemplateAction.INSTANCE, TransportRenderSearchTemplateAction.class);
//Indexed scripts
registerAction(PutIndexedScriptAction.INSTANCE, TransportPutIndexedScriptAction.class);
registerAction(GetIndexedScriptAction.INSTANCE, TransportGetIndexedScriptAction.class);
registerAction(DeleteIndexedScriptAction.INSTANCE, TransportDeleteIndexedScriptAction.class);
registerAction(PutStoredScriptAction.INSTANCE, TransportPutStoredScriptAction.class);
registerAction(GetStoredScriptAction.INSTANCE, TransportGetStoredScriptAction.class);
registerAction(DeleteStoredScriptAction.INSTANCE, TransportDeleteStoredScriptAction.class);
registerAction(FieldStatsAction.INSTANCE, TransportFieldStatsTransportAction.class);

View File

@ -22,7 +22,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.StatusToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.rest.RestStatus;
@ -110,10 +109,10 @@ public abstract class DocWriteResponse extends ReplicationResponse implements St
}
static final class Fields {
static final XContentBuilderString _INDEX = new XContentBuilderString("_index");
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _ID = new XContentBuilderString("_id");
static final XContentBuilderString _VERSION = new XContentBuilderString("_version");
static final String _INDEX = "_index";
static final String _TYPE = "_type";
static final String _ID = "_id";
static final String _VERSION = "_version";
}
@Override

View File

@ -26,10 +26,8 @@ package org.elasticsearch.action;
public interface RealtimeRequest {
/**
* @param realtime Controls whether this request should be realtime by reading from the translog. If <code>null</code>
* is specified then whether the operation will be realtime depends on the api of the concrete request
* subclass.
* @param realtime Controls whether this request should be realtime by reading from the translog.
*/
<R extends RealtimeRequest> R realtime(Boolean realtime);
<R extends RealtimeRequest> R realtime(boolean realtime);
}

View File

@ -28,7 +28,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.rest.RestStatus;
@ -280,24 +279,24 @@ public class ReplicationResponse extends ActionResponse {
private static class Fields {
private static final XContentBuilderString _INDEX = new XContentBuilderString("_index");
private static final XContentBuilderString _SHARD = new XContentBuilderString("_shard");
private static final XContentBuilderString _NODE = new XContentBuilderString("_node");
private static final XContentBuilderString REASON = new XContentBuilderString("reason");
private static final XContentBuilderString STATUS = new XContentBuilderString("status");
private static final XContentBuilderString PRIMARY = new XContentBuilderString("primary");
private static final String _INDEX = "_index";
private static final String _SHARD = "_shard";
private static final String _NODE = "_node";
private static final String REASON = "reason";
private static final String STATUS = "status";
private static final String PRIMARY = "primary";
}
}
private static class Fields {
private static final XContentBuilderString _SHARDS = new XContentBuilderString("_shards");
private static final XContentBuilderString TOTAL = new XContentBuilderString("total");
private static final XContentBuilderString SUCCESSFUL = new XContentBuilderString("successful");
private static final XContentBuilderString PENDING = new XContentBuilderString("pending");
private static final XContentBuilderString FAILED = new XContentBuilderString("failed");
private static final XContentBuilderString FAILURES = new XContentBuilderString("failures");
private static final String _SHARDS = "_shards";
private static final String TOTAL = "total";
private static final String SUCCESSFUL = "successful";
private static final String PENDING = "pending";
private static final String FAILED = "failed";
private static final String FAILURES = "failures";
}
}

View File

@ -37,7 +37,7 @@ import static org.elasticsearch.ExceptionsHelper.detailedMessage;
*
* The class is final due to serialization limitations
*/
public final class TaskOperationFailure implements Writeable<TaskOperationFailure>, ToXContent {
public final class TaskOperationFailure implements Writeable, ToXContent {
private final String nodeId;
@ -47,6 +47,16 @@ public final class TaskOperationFailure implements Writeable<TaskOperationFailur
private final RestStatus status;
public TaskOperationFailure(String nodeId, long taskId, Throwable t) {
this.nodeId = nodeId;
this.taskId = taskId;
this.reason = t;
status = ExceptionsHelper.status(t);
}
/**
* Read from a stream.
*/
public TaskOperationFailure(StreamInput in) throws IOException {
nodeId = in.readString();
taskId = in.readLong();
@ -54,11 +64,12 @@ public final class TaskOperationFailure implements Writeable<TaskOperationFailur
status = RestStatus.readFrom(in);
}
public TaskOperationFailure(String nodeId, long taskId, Throwable t) {
this.nodeId = nodeId;
this.taskId = taskId;
this.reason = t;
status = ExceptionsHelper.status(t);
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(nodeId);
out.writeLong(taskId);
out.writeThrowable(reason);
RestStatus.writeTo(out, status);
}
public String getNodeId() {
@ -81,19 +92,6 @@ public final class TaskOperationFailure implements Writeable<TaskOperationFailur
return reason;
}
@Override
public TaskOperationFailure readFrom(StreamInput in) throws IOException {
return new TaskOperationFailure(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(nodeId);
out.writeLong(taskId);
out.writeThrowable(reason);
RestStatus.writeTo(out, status);
}
@Override
public String toString() {
return "[" + nodeId + "][" + taskId + "] failed, reason [" + getReason() + "]";

View File

@ -49,11 +49,7 @@ public class TransportActionNodeProxy<Request extends ActionRequest, Response ex
listener.onFailure(validationException);
return;
}
transportService.sendRequest(node, action.name(), request, transportOptions, new ActionListenerResponseHandler<Response>(listener) {
@Override
public Response newInstance() {
return action.newResponse();
}
});
transportService.sendRequest(node, action.name(), request, transportOptions,
new ActionListenerResponseHandler<>(listener, action::newResponse));
}
}

View File

@ -24,14 +24,14 @@ import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.action.ValidateActions.addValidationError;
@ -40,7 +40,8 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
*/
public class ClusterAllocationExplainRequest extends MasterNodeRequest<ClusterAllocationExplainRequest> {
private static ObjectParser<ClusterAllocationExplainRequest, Void> PARSER = new ObjectParser("cluster/allocation/explain");
private static ObjectParser<ClusterAllocationExplainRequest, ParseFieldMatcherSupplier> PARSER = new ObjectParser(
"cluster/allocation/explain");
static {
PARSER.declareString(ClusterAllocationExplainRequest::setIndex, new ParseField("index"));
PARSER.declareInt(ClusterAllocationExplainRequest::setShard, new ParseField("shard"));
@ -148,7 +149,7 @@ public class ClusterAllocationExplainRequest extends MasterNodeRequest<ClusterAl
}
public static ClusterAllocationExplainRequest parse(XContentParser parser) throws IOException {
ClusterAllocationExplainRequest req = PARSER.parse(parser, new ClusterAllocationExplainRequest());
ClusterAllocationExplainRequest req = PARSER.parse(parser, new ClusterAllocationExplainRequest(), () -> ParseFieldMatcher.STRICT);
Exception e = req.validate();
if (e != null) {
throw new ElasticsearchParseException("'index', 'shard', and 'primary' must be specified in allocation explain request", e);

View File

@ -21,7 +21,6 @@ package org.elasticsearch.action.admin.cluster.allocation;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -32,7 +31,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.shard.ShardId;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@ -40,60 +38,61 @@ import java.util.Map;
* A {@code ClusterAllocationExplanation} is an explanation of why a shard may or may not be allocated to nodes. It also includes weights
* for where the shard is likely to be assigned. It is an immutable class
*/
public final class ClusterAllocationExplanation implements ToXContent, Writeable<ClusterAllocationExplanation> {
public final class ClusterAllocationExplanation implements ToXContent, Writeable {
private final ShardId shard;
private final boolean primary;
private final String assignedNodeId;
private final Map<DiscoveryNode, Decision> nodeToDecision;
private final Map<DiscoveryNode, Float> nodeWeights;
private final UnassignedInfo unassignedInfo;
private final long remainingDelayNanos;
private final long remainingDelayMillis;
private final Map<DiscoveryNode, NodeExplanation> nodeExplanations;
public ClusterAllocationExplanation(ShardId shard, boolean primary, @Nullable String assignedNodeId, long remainingDelayMillis,
@Nullable UnassignedInfo unassignedInfo, Map<DiscoveryNode, NodeExplanation> nodeExplanations) {
this.shard = shard;
this.primary = primary;
this.assignedNodeId = assignedNodeId;
this.unassignedInfo = unassignedInfo;
this.remainingDelayMillis = remainingDelayMillis;
this.nodeExplanations = nodeExplanations;
}
public ClusterAllocationExplanation(StreamInput in) throws IOException {
this.shard = ShardId.readShardId(in);
this.primary = in.readBoolean();
this.assignedNodeId = in.readOptionalString();
this.unassignedInfo = in.readOptionalWriteable(UnassignedInfo::new);
this.remainingDelayMillis = in.readVLong();
Map<DiscoveryNode, Decision> ntd = null;
int size = in.readVInt();
ntd = new HashMap<>(size);
for (int i = 0; i < size; i++) {
DiscoveryNode dn = new DiscoveryNode(in);
Decision decision = Decision.readFrom(in);
ntd.put(dn, decision);
int mapSize = in.readVInt();
Map<DiscoveryNode, NodeExplanation> nodeToExplanation = new HashMap<>(mapSize);
for (int i = 0; i < mapSize; i++) {
NodeExplanation nodeExplanation = new NodeExplanation(in);
nodeToExplanation.put(nodeExplanation.getNode(), nodeExplanation);
}
this.nodeToDecision = ntd;
Map<DiscoveryNode, Float> ntw = null;
size = in.readVInt();
ntw = new HashMap<>(size);
for (int i = 0; i < size; i++) {
DiscoveryNode dn = new DiscoveryNode(in);
float weight = in.readFloat();
ntw.put(dn, weight);
}
this.nodeWeights = ntw;
remainingDelayNanos = in.readVLong();
this.nodeExplanations = nodeToExplanation;
}
public ClusterAllocationExplanation(ShardId shard, boolean primary, @Nullable String assignedNodeId,
UnassignedInfo unassignedInfo, Map<DiscoveryNode, Decision> nodeToDecision,
Map<DiscoveryNode, Float> nodeWeights, long remainingDelayNanos) {
this.shard = shard;
this.primary = primary;
this.assignedNodeId = assignedNodeId;
this.unassignedInfo = unassignedInfo;
this.nodeToDecision = nodeToDecision == null ? Collections.emptyMap() : nodeToDecision;
this.nodeWeights = nodeWeights == null ? Collections.emptyMap() : nodeWeights;
this.remainingDelayNanos = remainingDelayNanos;
@Override
public void writeTo(StreamOutput out) throws IOException {
this.getShard().writeTo(out);
out.writeBoolean(this.isPrimary());
out.writeOptionalString(this.getAssignedNodeId());
out.writeOptionalWriteable(this.getUnassignedInfo());
out.writeVLong(remainingDelayMillis);
out.writeVInt(this.nodeExplanations.size());
for (NodeExplanation explanation : this.nodeExplanations.values()) {
explanation.writeTo(out);
}
}
/** Return the shard that the explanation is about */
public ShardId getShard() {
return this.shard;
}
/** Return true if the explained shard is primary, false otherwise */
public boolean isPrimary() {
return this.primary;
}
@ -115,22 +114,14 @@ public final class ClusterAllocationExplanation implements ToXContent, Writeable
return this.unassignedInfo;
}
/** Return a map of node to decision for shard allocation */
public Map<DiscoveryNode, Decision> getNodeDecisions() {
return this.nodeToDecision;
/** Return the remaining allocation delay for this shard in millisocends */
public long getRemainingDelayMillis() {
return this.remainingDelayMillis;
}
/**
* Return a map of node to balancer "weight" for allocation. Higher weights mean the balancer wants to allocated the shard to that node
* more
*/
public Map<DiscoveryNode, Float> getNodeWeights() {
return this.nodeWeights;
}
/** Return the remaining allocation delay for this shard in nanoseconds */
public long getRemainingDelayNanos() {
return this.remainingDelayNanos;
/** Return a map of node to the explanation for that node */
public Map<DiscoveryNode, NodeExplanation> getNodeExplanations() {
return this.nodeExplanations;
}
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
@ -151,32 +142,12 @@ public final class ClusterAllocationExplanation implements ToXContent, Writeable
if (unassignedInfo != null) {
unassignedInfo.toXContent(builder, params);
long delay = unassignedInfo.getLastComputedLeftDelayNanos();
builder.field("allocation_delay", TimeValue.timeValueNanos(delay));
builder.field("allocation_delay_ms", TimeValue.timeValueNanos(delay).millis());
builder.field("remaining_delay", TimeValue.timeValueNanos(remainingDelayNanos));
builder.field("remaining_delay_ms", TimeValue.timeValueNanos(remainingDelayNanos).millis());
builder.timeValueField("allocation_delay_in_millis", "allocation_delay", TimeValue.timeValueNanos(delay));
builder.timeValueField("remaining_delay_in_millis", "remaining_delay", TimeValue.timeValueMillis(remainingDelayMillis));
}
builder.startObject("nodes");
for (Map.Entry<DiscoveryNode, Float> entry : nodeWeights.entrySet()) {
DiscoveryNode node = entry.getKey();
builder.startObject(node.getId()); {
builder.field("node_name", node.getName());
builder.startObject("node_attributes"); {
for (Map.Entry<String, String> attrEntry : node.getAttributes().entrySet()) {
builder.field(attrEntry.getKey(), attrEntry.getValue());
}
}
builder.endObject(); // end attributes
Decision d = nodeToDecision.get(node);
if (node.getId().equals(assignedNodeId)) {
builder.field("final_decision", "CURRENTLY_ASSIGNED");
} else {
builder.field("final_decision", d.type().toString());
}
builder.field("weight", entry.getValue());
d.toXContent(builder, params);
}
builder.endObject(); // end node <uuid>
for (NodeExplanation explanation : nodeExplanations.values()) {
explanation.toXContent(builder, params);
}
builder.endObject(); // end nodes
}
@ -184,30 +155,105 @@ public final class ClusterAllocationExplanation implements ToXContent, Writeable
return builder;
}
@Override
public ClusterAllocationExplanation readFrom(StreamInput in) throws IOException {
return new ClusterAllocationExplanation(in);
/** An Enum representing the final decision for a shard allocation on a node */
public enum FinalDecision {
// Yes, the shard can be assigned
YES((byte) 0),
// No, the shard cannot be assigned
NO((byte) 1),
// The shard is already assigned to this node
ALREADY_ASSIGNED((byte) 2);
private final byte id;
FinalDecision (byte id) {
this.id = id;
}
private static FinalDecision fromId(byte id) {
switch (id) {
case 0: return YES;
case 1: return NO;
case 2: return ALREADY_ASSIGNED;
default:
throw new IllegalArgumentException("unknown id for final decision: [" + id + "]");
}
}
@Override
public String toString() {
switch (id) {
case 0: return "YES";
case 1: return "NO";
case 2: return "ALREADY_ASSIGNED";
default:
throw new IllegalArgumentException("unknown id for final decision: [" + id + "]");
}
}
static FinalDecision readFrom(StreamInput in) throws IOException {
return fromId(in.readByte());
}
void writeTo(StreamOutput out) throws IOException {
out.writeByte(id);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
this.getShard().writeTo(out);
out.writeBoolean(this.isPrimary());
out.writeOptionalString(this.getAssignedNodeId());
out.writeOptionalWriteable(this.getUnassignedInfo());
/** An Enum representing the state of the shard store's copy of the data on a node */
public enum StoreCopy {
// No data for this shard is on the node
NONE((byte) 0),
// A copy of the data is available on this node
AVAILABLE((byte) 1),
// The copy of the data on the node is corrupt
CORRUPT((byte) 2),
// There was an error reading this node's copy of the data
IO_ERROR((byte) 3),
// The copy of the data on the node is stale
STALE((byte) 4),
// It's unknown what the copy of the data is
UNKNOWN((byte) 5);
Map<DiscoveryNode, Decision> ntd = this.getNodeDecisions();
out.writeVInt(ntd.size());
for (Map.Entry<DiscoveryNode, Decision> entry : ntd.entrySet()) {
entry.getKey().writeTo(out);
Decision.writeTo(entry.getValue(), out);
private final byte id;
StoreCopy (byte id) {
this.id = id;
}
Map<DiscoveryNode, Float> ntw = this.getNodeWeights();
out.writeVInt(ntw.size());
for (Map.Entry<DiscoveryNode, Float> entry : ntw.entrySet()) {
entry.getKey().writeTo(out);
out.writeFloat(entry.getValue());
private static StoreCopy fromId(byte id) {
switch (id) {
case 0: return NONE;
case 1: return AVAILABLE;
case 2: return CORRUPT;
case 3: return IO_ERROR;
case 4: return STALE;
case 5: return UNKNOWN;
default:
throw new IllegalArgumentException("unknown id for store copy: [" + id + "]");
}
}
@Override
public String toString() {
switch (id) {
case 0: return "NONE";
case 1: return "AVAILABLE";
case 2: return "CORRUPT";
case 3: return "IO_ERROR";
case 4: return "STALE";
case 5: return "UNKNOWN";
default:
throw new IllegalArgumentException("unknown id for store copy: [" + id + "]");
}
}
static StoreCopy readFrom(StreamInput in) throws IOException {
return fromId(in.readByte());
}
void writeTo(StreamOutput out) throws IOException {
out.writeByte(id);
}
out.writeVLong(remainingDelayNanos);
}
}

View File

@ -0,0 +1,145 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.allocation;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Map;
/** The cluster allocation explanation for a single node */
public class NodeExplanation implements Writeable, ToXContent {
private final DiscoveryNode node;
private final Decision nodeDecision;
private final Float nodeWeight;
private final IndicesShardStoresResponse.StoreStatus storeStatus;
private final ClusterAllocationExplanation.FinalDecision finalDecision;
private final ClusterAllocationExplanation.StoreCopy storeCopy;
private final String finalExplanation;
public NodeExplanation(final DiscoveryNode node, final Decision nodeDecision, final Float nodeWeight,
final @Nullable IndicesShardStoresResponse.StoreStatus storeStatus,
final ClusterAllocationExplanation.FinalDecision finalDecision,
final String finalExplanation,
final ClusterAllocationExplanation.StoreCopy storeCopy) {
this.node = node;
this.nodeDecision = nodeDecision;
this.nodeWeight = nodeWeight;
this.storeStatus = storeStatus;
this.finalDecision = finalDecision;
this.finalExplanation = finalExplanation;
this.storeCopy = storeCopy;
}
public NodeExplanation(StreamInput in) throws IOException {
this.node = new DiscoveryNode(in);
this.nodeDecision = Decision.readFrom(in);
this.nodeWeight = in.readFloat();
if (in.readBoolean()) {
this.storeStatus = IndicesShardStoresResponse.StoreStatus.readStoreStatus(in);
} else {
this.storeStatus = null;
}
this.finalDecision = ClusterAllocationExplanation.FinalDecision.readFrom(in);
this.finalExplanation = in.readString();
this.storeCopy = ClusterAllocationExplanation.StoreCopy.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
node.writeTo(out);
Decision.writeTo(nodeDecision, out);
out.writeFloat(nodeWeight);
if (storeStatus == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
storeStatus.writeTo(out);
}
finalDecision.writeTo(out);
out.writeString(finalExplanation);
storeCopy.writeTo(out);
}
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(node.getId()); {
builder.field("node_name", node.getName());
builder.startObject("node_attributes"); {
for (Map.Entry<String, String> attrEntry : node.getAttributes().entrySet()) {
builder.field(attrEntry.getKey(), attrEntry.getValue());
}
}
builder.endObject(); // end attributes
builder.startObject("store"); {
builder.field("shard_copy", storeCopy.toString());
if (storeStatus != null) {
final Throwable storeErr = storeStatus.getStoreException();
if (storeErr != null) {
builder.field("store_exception", ExceptionsHelper.detailedMessage(storeErr));
}
}
}
builder.endObject(); // end store
builder.field("final_decision", finalDecision.toString());
builder.field("final_explanation", finalExplanation.toString());
builder.field("weight", nodeWeight);
nodeDecision.toXContent(builder, params);
}
builder.endObject(); // end node <uuid>
return builder;
}
public DiscoveryNode getNode() {
return this.node;
}
public Decision getDecision() {
return this.nodeDecision;
}
public Float getWeight() {
return this.nodeWeight;
}
@Nullable
public IndicesShardStoresResponse.StoreStatus getStoreStatus() {
return this.storeStatus;
}
public ClusterAllocationExplanation.FinalDecision getFinalDecision() {
return this.finalDecision;
}
public String getFinalExplanation() {
return this.finalExplanation;
}
public ClusterAllocationExplanation.StoreCopy getStoreCopy() {
return this.storeCopy;
}
}

View File

@ -20,8 +20,13 @@
package org.elasticsearch.action.admin.cluster.allocation;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.apache.lucene.index.CorruptIndexException;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresRequest;
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse;
import org.elasticsearch.action.admin.indices.shards.TransportIndicesShardStoresAction;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterInfoService;
@ -47,8 +52,10 @@ import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator;
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.ImmutableOpenIntMap;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@ -56,6 +63,7 @@ import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* The {@code TransportClusterAllocationExplainAction} is responsible for actually executing the explanation of a shard's allocation on the
@ -68,19 +76,22 @@ public class TransportClusterAllocationExplainAction
private final ClusterInfoService clusterInfoService;
private final AllocationDeciders allocationDeciders;
private final ShardsAllocator shardAllocator;
private final TransportIndicesShardStoresAction shardStoresAction;
@Inject
public TransportClusterAllocationExplainAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver,
AllocationService allocationService, ClusterInfoService clusterInfoService,
AllocationDeciders allocationDeciders, ShardsAllocator shardAllocator) {
AllocationDeciders allocationDeciders, ShardsAllocator shardAllocator,
TransportIndicesShardStoresAction shardStoresAction) {
super(settings, ClusterAllocationExplainAction.NAME, transportService, clusterService, threadPool, actionFilters,
indexNameExpressionResolver, ClusterAllocationExplainRequest::new);
this.allocationService = allocationService;
this.clusterInfoService = clusterInfoService;
this.allocationDeciders = allocationDeciders;
this.shardAllocator = shardAllocator;
this.shardStoresAction = shardStoresAction;
}
@Override
@ -118,12 +129,86 @@ public class TransportClusterAllocationExplainAction
}
}
/**
* Construct a {@code NodeExplanation} object for the given shard given all the metadata. This also attempts to construct the human
* readable FinalDecision and final explanation as part of the explanation.
*/
public static NodeExplanation calculateNodeExplanation(ShardRouting shard,
IndexMetaData indexMetaData,
DiscoveryNode node,
Decision nodeDecision,
Float nodeWeight,
IndicesShardStoresResponse.StoreStatus storeStatus,
String assignedNodeId,
Set<String> activeAllocationIds) {
final ClusterAllocationExplanation.FinalDecision finalDecision;
final ClusterAllocationExplanation.StoreCopy storeCopy;
final String finalExplanation;
if (storeStatus == null) {
// No copies of the data
storeCopy = ClusterAllocationExplanation.StoreCopy.NONE;
} else {
final Throwable storeErr = storeStatus.getStoreException();
if (storeErr != null) {
if (ExceptionsHelper.unwrapCause(storeErr) instanceof CorruptIndexException) {
storeCopy = ClusterAllocationExplanation.StoreCopy.CORRUPT;
} else {
storeCopy = ClusterAllocationExplanation.StoreCopy.IO_ERROR;
}
} else if (activeAllocationIds.isEmpty()) {
// The ids are only empty if dealing with a legacy index
// TODO: fetch the shard state versions and display here?
storeCopy = ClusterAllocationExplanation.StoreCopy.UNKNOWN;
} else if (activeAllocationIds.contains(storeStatus.getAllocationId())) {
storeCopy = ClusterAllocationExplanation.StoreCopy.AVAILABLE;
} else {
// Otherwise, this is a stale copy of the data (allocation ids don't match)
storeCopy = ClusterAllocationExplanation.StoreCopy.STALE;
}
}
if (node.getId().equals(assignedNodeId)) {
finalDecision = ClusterAllocationExplanation.FinalDecision.ALREADY_ASSIGNED;
finalExplanation = "the shard is already assigned to this node";
} else if (shard.primary() && shard.unassigned() && shard.allocatedPostIndexCreate(indexMetaData) &&
storeCopy == ClusterAllocationExplanation.StoreCopy.STALE) {
finalExplanation = "the copy of the shard is stale, allocation ids do not match";
finalDecision = ClusterAllocationExplanation.FinalDecision.NO;
} else if (shard.primary() && shard.unassigned() && shard.allocatedPostIndexCreate(indexMetaData) &&
storeCopy == ClusterAllocationExplanation.StoreCopy.NONE) {
finalExplanation = "there is no copy of the shard available";
finalDecision = ClusterAllocationExplanation.FinalDecision.NO;
} else if (shard.primary() && shard.unassigned() && storeCopy == ClusterAllocationExplanation.StoreCopy.CORRUPT) {
finalExplanation = "the copy of the shard is corrupt";
finalDecision = ClusterAllocationExplanation.FinalDecision.NO;
} else if (shard.primary() && shard.unassigned() && storeCopy == ClusterAllocationExplanation.StoreCopy.IO_ERROR) {
finalExplanation = "the copy of the shard cannot be read";
finalDecision = ClusterAllocationExplanation.FinalDecision.NO;
} else {
if (nodeDecision.type() == Decision.Type.NO) {
finalDecision = ClusterAllocationExplanation.FinalDecision.NO;
finalExplanation = "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision";
} else {
finalDecision = ClusterAllocationExplanation.FinalDecision.YES;
if (storeCopy == ClusterAllocationExplanation.StoreCopy.AVAILABLE) {
finalExplanation = "the shard can be assigned and the node contains a valid copy of the shard data";
} else {
finalExplanation = "the shard can be assigned";
}
}
}
return new NodeExplanation(node, nodeDecision, nodeWeight, storeStatus, finalDecision, finalExplanation, storeCopy);
}
/**
* For the given {@code ShardRouting}, return the explanation of the allocation for that shard on all nodes. If {@code
* includeYesDecisions} is true, returns all decisions, otherwise returns only 'NO' and 'THROTTLE' decisions.
*/
public static ClusterAllocationExplanation explainShard(ShardRouting shard, RoutingAllocation allocation, RoutingNodes routingNodes,
boolean includeYesDecisions, ShardsAllocator shardAllocator) {
boolean includeYesDecisions, ShardsAllocator shardAllocator,
List<IndicesShardStoresResponse.StoreStatus> shardStores) {
// don't short circuit deciders, we want a full explanation
allocation.debugDecision(true);
// get the existing unassigned info if available
@ -139,14 +224,35 @@ public class TransportClusterAllocationExplainAction
nodeToDecision.put(discoNode, d);
}
}
long remainingDelayNanos = 0;
long remainingDelayMillis = 0;
final MetaData metadata = allocation.metaData();
final IndexMetaData indexMetaData = metadata.index(shard.index());
if (ui != null) {
final MetaData metadata = allocation.metaData();
final Settings indexSettings = metadata.index(shard.index()).getSettings();
remainingDelayNanos = ui.getRemainingDelay(System.nanoTime(), metadata.settings(), indexSettings);
final Settings indexSettings = indexMetaData.getSettings();
long remainingDelayNanos = ui.getRemainingDelay(System.nanoTime(), metadata.settings(), indexSettings);
remainingDelayMillis = TimeValue.timeValueNanos(remainingDelayNanos).millis();
}
return new ClusterAllocationExplanation(shard.shardId(), shard.primary(), shard.currentNodeId(), ui, nodeToDecision,
shardAllocator.weighShard(allocation, shard), remainingDelayNanos);
// Calculate weights for each of the nodes
Map<DiscoveryNode, Float> weights = shardAllocator.weighShard(allocation, shard);
Map<DiscoveryNode, IndicesShardStoresResponse.StoreStatus> nodeToStatus = new HashMap<>(shardStores.size());
for (IndicesShardStoresResponse.StoreStatus status : shardStores) {
nodeToStatus.put(status.getNode(), status);
}
Map<DiscoveryNode, NodeExplanation> explanations = new HashMap<>(shardStores.size());
for (Map.Entry<DiscoveryNode, Decision> entry : nodeToDecision.entrySet()) {
DiscoveryNode node = entry.getKey();
Decision decision = entry.getValue();
Float weight = weights.get(node);
IndicesShardStoresResponse.StoreStatus storeStatus = nodeToStatus.get(node);
NodeExplanation nodeExplanation = calculateNodeExplanation(shard, indexMetaData, node, decision, weight,
storeStatus, shard.currentNodeId(), indexMetaData.activeAllocationIds(shard.getId()));
explanations.put(node, nodeExplanation);
}
return new ClusterAllocationExplanation(shard.shardId(), shard.primary(),
shard.currentNodeId(), remainingDelayMillis, ui, explanations);
}
@Override
@ -156,30 +262,30 @@ public class TransportClusterAllocationExplainAction
final RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, state.nodes(),
clusterInfoService.getClusterInfo(), System.nanoTime());
ShardRouting shardRouting = null;
ShardRouting foundShard = null;
if (request.useAnyUnassignedShard()) {
// If we can use any shard, just pick the first unassigned one (if there are any)
RoutingNodes.UnassignedShards.UnassignedIterator ui = routingNodes.unassigned().iterator();
if (ui.hasNext()) {
shardRouting = ui.next();
foundShard = ui.next();
}
} else {
String index = request.getIndex();
int shard = request.getShard();
if (request.isPrimary()) {
// If we're looking for the primary shard, there's only one copy, so pick it directly
shardRouting = allocation.routingTable().shardRoutingTable(index, shard).primaryShard();
foundShard = allocation.routingTable().shardRoutingTable(index, shard).primaryShard();
} else {
// If looking for a replica, go through all the replica shards
List<ShardRouting> replicaShardRoutings = allocation.routingTable().shardRoutingTable(index, shard).replicaShards();
if (replicaShardRoutings.size() > 0) {
// Pick the first replica at the very least
shardRouting = replicaShardRoutings.get(0);
foundShard = replicaShardRoutings.get(0);
// In case there are multiple replicas where some are assigned and some aren't,
// try to find one that is unassigned at least
for (ShardRouting replica : replicaShardRoutings) {
if (replica.unassigned()) {
shardRouting = replica;
foundShard = replica;
break;
}
}
@ -187,14 +293,34 @@ public class TransportClusterAllocationExplainAction
}
}
if (shardRouting == null) {
if (foundShard == null) {
listener.onFailure(new ElasticsearchException("unable to find any shards to explain [{}] in the routing table", request));
return;
}
final ShardRouting shardRouting = foundShard;
logger.debug("explaining the allocation for [{}], found shard [{}]", request, shardRouting);
ClusterAllocationExplanation cae = explainShard(shardRouting, allocation, routingNodes,
request.includeYesDecisions(), shardAllocator);
listener.onResponse(new ClusterAllocationExplainResponse(cae));
getShardStores(shardRouting, new ActionListener<IndicesShardStoresResponse>() {
@Override
public void onResponse(IndicesShardStoresResponse shardStoreResponse) {
ImmutableOpenIntMap<List<IndicesShardStoresResponse.StoreStatus>> shardStatuses =
shardStoreResponse.getStoreStatuses().get(shardRouting.getIndexName());
List<IndicesShardStoresResponse.StoreStatus> shardStoreStatus = shardStatuses.get(shardRouting.id());
ClusterAllocationExplanation cae = explainShard(shardRouting, allocation, routingNodes,
request.includeYesDecisions(), shardAllocator, shardStoreStatus);
listener.onResponse(new ClusterAllocationExplainResponse(cae));
}
@Override
public void onFailure(Throwable e) {
listener.onFailure(e);
}
});
}
private void getShardStores(ShardRouting shard, final ActionListener<IndicesShardStoresResponse> listener) {
IndicesShardStoresRequest request = new IndicesShardStoresRequest(shard.getIndexName());
request.shardStatuses("all");
shardStoresAction.execute(request, listener);
}
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.action.admin.cluster.health;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
@ -30,12 +29,10 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.StatusToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
import java.util.List;
import java.util.Locale;
import java.util.Map;
@ -83,14 +80,6 @@ public class ClusterHealthResponse extends ActionResponse implements StatusToXCo
return clusterStateHealth;
}
/**
* The validation failures on the cluster level (without index validation failures).
*/
public List<String> getValidationFailures() {
return clusterStateHealth.getValidationFailures();
}
public int getActiveShards() {
return clusterStateHealth.getActiveShards();
}
@ -234,25 +223,24 @@ public class ClusterHealthResponse extends ActionResponse implements StatusToXCo
}
static final class Fields {
static final XContentBuilderString CLUSTER_NAME = new XContentBuilderString("cluster_name");
static final XContentBuilderString STATUS = new XContentBuilderString("status");
static final XContentBuilderString TIMED_OUT = new XContentBuilderString("timed_out");
static final XContentBuilderString NUMBER_OF_NODES = new XContentBuilderString("number_of_nodes");
static final XContentBuilderString NUMBER_OF_DATA_NODES = new XContentBuilderString("number_of_data_nodes");
static final XContentBuilderString NUMBER_OF_PENDING_TASKS = new XContentBuilderString("number_of_pending_tasks");
static final XContentBuilderString NUMBER_OF_IN_FLIGHT_FETCH = new XContentBuilderString("number_of_in_flight_fetch");
static final XContentBuilderString DELAYED_UNASSIGNED_SHARDS = new XContentBuilderString("delayed_unassigned_shards");
static final XContentBuilderString TASK_MAX_WAIT_TIME_IN_QUEUE = new XContentBuilderString("task_max_waiting_in_queue");
static final XContentBuilderString TASK_MAX_WAIT_TIME_IN_QUEUE_IN_MILLIS = new XContentBuilderString("task_max_waiting_in_queue_millis");
static final XContentBuilderString ACTIVE_SHARDS_PERCENT_AS_NUMBER = new XContentBuilderString("active_shards_percent_as_number");
static final XContentBuilderString ACTIVE_SHARDS_PERCENT = new XContentBuilderString("active_shards_percent");
static final XContentBuilderString ACTIVE_PRIMARY_SHARDS = new XContentBuilderString("active_primary_shards");
static final XContentBuilderString ACTIVE_SHARDS = new XContentBuilderString("active_shards");
static final XContentBuilderString RELOCATING_SHARDS = new XContentBuilderString("relocating_shards");
static final XContentBuilderString INITIALIZING_SHARDS = new XContentBuilderString("initializing_shards");
static final XContentBuilderString UNASSIGNED_SHARDS = new XContentBuilderString("unassigned_shards");
static final XContentBuilderString VALIDATION_FAILURES = new XContentBuilderString("validation_failures");
static final XContentBuilderString INDICES = new XContentBuilderString("indices");
static final String CLUSTER_NAME = "cluster_name";
static final String STATUS = "status";
static final String TIMED_OUT = "timed_out";
static final String NUMBER_OF_NODES = "number_of_nodes";
static final String NUMBER_OF_DATA_NODES = "number_of_data_nodes";
static final String NUMBER_OF_PENDING_TASKS = "number_of_pending_tasks";
static final String NUMBER_OF_IN_FLIGHT_FETCH = "number_of_in_flight_fetch";
static final String DELAYED_UNASSIGNED_SHARDS = "delayed_unassigned_shards";
static final String TASK_MAX_WAIT_TIME_IN_QUEUE = "task_max_waiting_in_queue";
static final String TASK_MAX_WAIT_TIME_IN_QUEUE_IN_MILLIS = "task_max_waiting_in_queue_millis";
static final String ACTIVE_SHARDS_PERCENT_AS_NUMBER = "active_shards_percent_as_number";
static final String ACTIVE_SHARDS_PERCENT = "active_shards_percent";
static final String ACTIVE_PRIMARY_SHARDS = "active_primary_shards";
static final String ACTIVE_SHARDS = "active_shards";
static final String RELOCATING_SHARDS = "relocating_shards";
static final String INITIALIZING_SHARDS = "initializing_shards";
static final String UNASSIGNED_SHARDS = "unassigned_shards";
static final String INDICES = "indices";
}
@Override
@ -276,36 +264,10 @@ public class ClusterHealthResponse extends ActionResponse implements StatusToXCo
String level = params.param("level", "cluster");
boolean outputIndices = "indices".equals(level) || "shards".equals(level);
if (!getValidationFailures().isEmpty()) {
builder.startArray(Fields.VALIDATION_FAILURES);
for (String validationFailure : getValidationFailures()) {
builder.value(validationFailure);
}
// if we don't print index level information, still print the index validation failures
// so we know why the status is red
if (!outputIndices) {
for (ClusterIndexHealth indexHealth : clusterStateHealth.getIndices().values()) {
builder.startObject(indexHealth.getIndex());
if (!indexHealth.getValidationFailures().isEmpty()) {
builder.startArray(Fields.VALIDATION_FAILURES);
for (String validationFailure : indexHealth.getValidationFailures()) {
builder.value(validationFailure);
}
builder.endArray();
}
builder.endObject();
}
}
builder.endArray();
}
if (outputIndices) {
builder.startObject(Fields.INDICES);
for (ClusterIndexHealth indexHealth : clusterStateHealth.getIndices().values()) {
builder.startObject(indexHealth.getIndex(), XContentBuilder.FieldCaseConversion.NONE);
builder.startObject(indexHealth.getIndex());
indexHealth.toXContent(builder, params);
builder.endObject();
}

View File

@ -64,23 +64,23 @@ public class NodesInfoResponse extends BaseNodesResponse<NodeInfo> implements To
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("cluster_name", getClusterName().value(), XContentBuilder.FieldCaseConversion.NONE);
builder.field("cluster_name", getClusterName().value());
builder.startObject("nodes");
for (NodeInfo nodeInfo : this) {
builder.startObject(nodeInfo.getNode().getId(), XContentBuilder.FieldCaseConversion.NONE);
builder.startObject(nodeInfo.getNode().getId());
builder.field("name", nodeInfo.getNode().getName(), XContentBuilder.FieldCaseConversion.NONE);
builder.field("name", nodeInfo.getNode().getName());
builder.field("transport_address", nodeInfo.getNode().getAddress().toString());
builder.field("host", nodeInfo.getNode().getHostName(), XContentBuilder.FieldCaseConversion.NONE);
builder.field("ip", nodeInfo.getNode().getHostAddress(), XContentBuilder.FieldCaseConversion.NONE);
builder.field("host", nodeInfo.getNode().getHostName());
builder.field("ip", nodeInfo.getNode().getHostAddress());
builder.field("version", nodeInfo.getVersion());
builder.field("build_hash", nodeInfo.getBuild().shortHash());
if (nodeInfo.getServiceAttributes() != null) {
for (Map.Entry<String, String> nodeAttribute : nodeInfo.getServiceAttributes().entrySet()) {
builder.field(nodeAttribute.getKey(), nodeAttribute.getValue(), XContentBuilder.FieldCaseConversion.NONE);
builder.field(nodeAttribute.getKey(), nodeAttribute.getValue());
}
}
@ -93,7 +93,7 @@ public class NodesInfoResponse extends BaseNodesResponse<NodeInfo> implements To
if (!nodeInfo.getNode().getAttributes().isEmpty()) {
builder.startObject("attributes");
for (Map.Entry<String, String> entry : nodeInfo.getNode().getAttributes().entrySet()) {
builder.field(entry.getKey(), entry.getValue(), XContentBuilder.FieldCaseConversion.NONE);
builder.field(entry.getKey(), entry.getValue());
}
builder.endObject();
}

View File

@ -224,7 +224,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
threadPool = ThreadPoolStats.readThreadPoolStats(in);
}
if (in.readBoolean()) {
fs = FsInfo.readFsInfo(in);
fs = new FsInfo(in);
}
if (in.readBoolean()) {
transport = TransportStats.readTransportStats(in);
@ -299,10 +299,10 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (!params.param("node_info_format", "default").equals("none")) {
builder.field("name", getNode().getName(), XContentBuilder.FieldCaseConversion.NONE);
builder.field("transport_address", getNode().getAddress().toString(), XContentBuilder.FieldCaseConversion.NONE);
builder.field("host", getNode().getHostName(), XContentBuilder.FieldCaseConversion.NONE);
builder.field("ip", getNode().getAddress(), XContentBuilder.FieldCaseConversion.NONE);
builder.field("name", getNode().getName());
builder.field("transport_address", getNode().getAddress().toString());
builder.field("host", getNode().getHostName());
builder.field("ip", getNode().getAddress());
builder.startArray("roles");
for (DiscoveryNode.Role role : getNode().getRoles()) {
@ -313,7 +313,7 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
if (!getNode().getAttributes().isEmpty()) {
builder.startObject("attributes");
for (Map.Entry<String, String> attrEntry : getNode().getAttributes().entrySet()) {
builder.field(attrEntry.getKey(), attrEntry.getValue(), XContentBuilder.FieldCaseConversion.NONE);
builder.field(attrEntry.getKey(), attrEntry.getValue());
}
builder.endObject();
}

View File

@ -65,7 +65,7 @@ public class NodesStatsResponse extends BaseNodesResponse<NodeStats> implements
builder.startObject("nodes");
for (NodeStats nodeStats : this) {
builder.startObject(nodeStats.getNode().getId(), XContentBuilder.FieldCaseConversion.NONE);
builder.startObject(nodeStats.getNode().getId());
builder.field("timestamp", nodeStats.getTimestamp());
nodeStats.toXContent(builder, params);
@ -88,4 +88,4 @@ public class NodesStatsResponse extends BaseNodesResponse<NodeStats> implements
return "{ \"error\" : \"" + e.getMessage() + "\"}";
}
}
}
}

View File

@ -56,7 +56,7 @@ import java.util.function.Consumer;
* Transport action that can be used to cancel currently running cancellable tasks.
* <p>
* For a task to be cancellable it has to return an instance of
* {@link CancellableTask} from {@link TransportRequest#createTask(long, String, String)}
* {@link CancellableTask} from {@link TransportRequest#createTask(long, String, String, TaskId)}
*/
public class TransportCancelTasksAction extends TransportTasksAction<CancellableTask, CancelTasksRequest, CancelTasksResponse, TaskInfo> {
@ -251,7 +251,7 @@ public class TransportCancelTasksAction extends TransportTasksAction<Cancellable
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
parentTaskId = new TaskId(in);
parentTaskId = TaskId.readFromStream(in);
ban = in.readBoolean();
if (ban) {
reason = in.readString();

View File

@ -23,11 +23,11 @@ import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.action.support.tasks.BaseTasksResponse;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.tasks.TaskId;
import java.io.IOException;
@ -54,7 +54,8 @@ public class ListTasksResponse extends BaseTasksResponse implements ToXContent {
public ListTasksResponse() {
}
public ListTasksResponse(List<TaskInfo> tasks, List<TaskOperationFailure> taskFailures, List<? extends FailedNodeException> nodeFailures) {
public ListTasksResponse(List<TaskInfo> tasks, List<TaskOperationFailure> taskFailures,
List<? extends FailedNodeException> nodeFailures) {
super(taskFailures, nodeFailures);
this.tasks = tasks == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(tasks));
}
@ -163,7 +164,7 @@ public class ListTasksResponse extends BaseTasksResponse implements ToXContent {
builder.startObject("nodes");
for (Map.Entry<DiscoveryNode, List<TaskInfo>> entry : getPerNodeTasks().entrySet()) {
DiscoveryNode node = entry.getKey();
builder.startObject(node.getId(), XContentBuilder.FieldCaseConversion.NONE);
builder.startObject(node.getId());
builder.field("name", node.getName());
builder.field("transport_address", node.getAddress().toString());
builder.field("host", node.getHostName());
@ -178,13 +179,13 @@ public class ListTasksResponse extends BaseTasksResponse implements ToXContent {
if (!node.getAttributes().isEmpty()) {
builder.startObject("attributes");
for (Map.Entry<String, String> attrEntry : node.getAttributes().entrySet()) {
builder.field(attrEntry.getKey(), attrEntry.getValue(), XContentBuilder.FieldCaseConversion.NONE);
builder.field(attrEntry.getKey(), attrEntry.getValue());
}
builder.endObject();
}
builder.startObject("tasks");
for(TaskInfo task : entry.getValue()) {
builder.startObject(task.getTaskId().toString(), XContentBuilder.FieldCaseConversion.NONE);
builder.startObject(task.getTaskId().toString());
task.toXContent(builder, params);
builder.endObject();
}
@ -194,7 +195,7 @@ public class ListTasksResponse extends BaseTasksResponse implements ToXContent {
} else if ("parents".equals(groupBy)) {
builder.startObject("tasks");
for (TaskGroup group : getTaskGroups()) {
builder.startObject(group.getTaskInfo().getTaskId().toString(), XContentBuilder.FieldCaseConversion.NONE);
builder.startObject(group.getTaskInfo().getTaskId().toString());
group.toXContent(builder, params);
builder.endObject();
}
@ -205,14 +206,6 @@ public class ListTasksResponse extends BaseTasksResponse implements ToXContent {
@Override
public String toString() {
try {
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
builder.startObject();
toXContent(builder, EMPTY_PARAMS);
builder.endObject();
return builder.string();
} catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}";
}
return Strings.toString(this);
}
}

View File

@ -39,7 +39,7 @@ import java.util.concurrent.TimeUnit;
* and use in APIs. Instead, immutable and streamable TaskInfo objects are used to represent
* snapshot information about currently running tasks.
*/
public class TaskInfo implements Writeable<TaskInfo>, ToXContent {
public class TaskInfo implements Writeable, ToXContent {
private final DiscoveryNode node;
@ -75,21 +75,34 @@ public class TaskInfo implements Writeable<TaskInfo>, ToXContent {
this.parentTaskId = parentTaskId;
}
/**
* Read from a stream.
*/
public TaskInfo(StreamInput in) throws IOException {
node = new DiscoveryNode(in);
taskId = new TaskId(node.getId(), in.readLong());
type = in.readString();
action = in.readString();
description = in.readOptionalString();
if (in.readBoolean()) {
status = in.readTaskStatus();
} else {
status = null;
}
status = in.readOptionalNamedWriteable(Task.Status.class);
startTime = in.readLong();
runningTimeNanos = in.readLong();
cancellable = in.readBoolean();
parentTaskId = new TaskId(in);
parentTaskId = TaskId.readFromStream(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
node.writeTo(out);
out.writeLong(taskId.getId());
out.writeString(type);
out.writeString(action);
out.writeOptionalString(description);
out.writeOptionalNamedWriteable(status);
out.writeLong(startTime);
out.writeLong(runningTimeNanos);
out.writeBoolean(cancellable);
parentTaskId.writeTo(out);
}
public TaskId getTaskId() {
@ -152,30 +165,6 @@ public class TaskInfo implements Writeable<TaskInfo>, ToXContent {
return parentTaskId;
}
@Override
public TaskInfo readFrom(StreamInput in) throws IOException {
return new TaskInfo(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
node.writeTo(out);
out.writeLong(taskId.getId());
out.writeString(type);
out.writeString(action);
out.writeOptionalString(description);
if (status != null) {
out.writeBoolean(true);
out.writeTaskStatus(status);
} else {
out.writeBoolean(false);
}
out.writeLong(startTime);
out.writeLong(runningTimeNanos);
out.writeBoolean(cancellable);
parentTaskId.writeTo(out);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("node", node.getId());

View File

@ -51,12 +51,15 @@ public class TransportListTasksAction extends TransportTasksAction<Task, ListTas
private static final TimeValue DEFAULT_WAIT_FOR_COMPLETION_TIMEOUT = timeValueSeconds(30);
@Inject
public TransportListTasksAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
super(settings, ListTasksAction.NAME, clusterName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, ListTasksRequest::new, ListTasksResponse::new, ThreadPool.Names.MANAGEMENT);
public TransportListTasksAction(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService,
TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
super(settings, ListTasksAction.NAME, clusterName, threadPool, clusterService, transportService, actionFilters,
indexNameExpressionResolver, ListTasksRequest::new, ListTasksResponse::new, ThreadPool.Names.MANAGEMENT);
}
@Override
protected ListTasksResponse newResponse(ListTasksRequest request, List<TaskInfo> tasks, List<TaskOperationFailure> taskOperationFailures, List<FailedNodeException> failedNodeExceptions) {
protected ListTasksResponse newResponse(ListTasksRequest request, List<TaskInfo> tasks,
List<TaskOperationFailure> taskOperationFailures, List<FailedNodeException> failedNodeExceptions) {
return new ListTasksResponse(tasks, taskOperationFailures, failedNodeExceptions);
}

View File

@ -148,7 +148,7 @@ public class PutRepositoryRequest extends AcknowledgedRequest<PutRepositoryReque
* @return this request
*/
public PutRepositoryRequest settings(String source) {
this.settings = Settings.settingsBuilder().loadFromSource(source).build();
this.settings = Settings.builder().loadFromSource(source).build();
return this;
}

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.common.xcontent.XContentHelper;
import java.io.IOException;
@ -78,16 +77,16 @@ public class VerifyRepositoryResponse extends ActionResponse implements ToXConte
}
static final class Fields {
static final XContentBuilderString NODES = new XContentBuilderString("nodes");
static final XContentBuilderString NAME = new XContentBuilderString("name");
static final String NODES = "nodes";
static final String NAME = "name";
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields.NODES);
for (DiscoveryNode node : nodes) {
builder.startObject(node.getId(), XContentBuilder.FieldCaseConversion.NONE);
builder.field(Fields.NAME, node.getName(), XContentBuilder.FieldCaseConversion.NONE);
builder.startObject(node.getId());
builder.field(Fields.NAME, node.getName());
builder.endObject();
}
builder.endObject();

View File

@ -23,7 +23,9 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommand;
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommandRegistry;
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -36,7 +38,6 @@ import java.io.IOException;
* Request to submit cluster reroute allocation commands
*/
public class ClusterRerouteRequest extends AcknowledgedRequest<ClusterRerouteRequest> {
AllocationCommands commands = new AllocationCommands();
boolean dryRun;
boolean explain;
@ -87,10 +88,19 @@ public class ClusterRerouteRequest extends AcknowledgedRequest<ClusterRerouteReq
return this.explain;
}
/**
* Set the allocation commands to execute.
*/
public ClusterRerouteRequest commands(AllocationCommand... commands) {
this.commands = new AllocationCommands(commands);
return this;
}
/**
* Sets the source for the request.
*/
public ClusterRerouteRequest source(BytesReference source) throws Exception {
public ClusterRerouteRequest source(BytesReference source, AllocationCommandRegistry registry, ParseFieldMatcher parseFieldMatcher)
throws Exception {
try (XContentParser parser = XContentHelper.createParser(source)) {
XContentParser.Token token;
String currentFieldName = null;
@ -99,7 +109,7 @@ public class ClusterRerouteRequest extends AcknowledgedRequest<ClusterRerouteReq
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if ("commands".equals(currentFieldName)) {
this.commands = AllocationCommands.fromXContent(parser);
this.commands = AllocationCommands.fromXContent(parser, parseFieldMatcher, registry);
} else {
throw new ElasticsearchParseException("failed to parse reroute request, got start array with wrong field name [{}]", currentFieldName);
}

View File

@ -61,10 +61,10 @@ public class ClusterRerouteRequestBuilder extends AcknowledgedRequestBuilder<Clu
}
/**
* Sets the source for the request
* Sets the commands for the request to execute.
*/
public ClusterRerouteRequestBuilder setSource(BytesReference source) throws Exception {
request.source(source);
public ClusterRerouteRequestBuilder setCommands(AllocationCommand... commands) throws Exception {
request.commands(commands);
return this;
}
}

View File

@ -85,7 +85,7 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest<ClusterUpd
* Sets the source containing the transient settings to be updated. They will not survive a full cluster restart
*/
public ClusterUpdateSettingsRequest transientSettings(String source) {
this.transientSettings = Settings.settingsBuilder().loadFromSource(source).build();
this.transientSettings = Settings.builder().loadFromSource(source).build();
return this;
}
@ -124,7 +124,7 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest<ClusterUpd
* Sets the source containing the persistent settings to be updated. They will get applied cross restarts
*/
public ClusterUpdateSettingsRequest persistentSettings(String source) {
this.persistentSettings = Settings.settingsBuilder().loadFromSource(source).build();
this.persistentSettings = Settings.builder().loadFromSource(source).build();
return this;
}

View File

@ -32,8 +32,8 @@ import static org.elasticsearch.cluster.ClusterState.builder;
* due to the update.
*/
final class SettingsUpdater {
final Settings.Builder transientUpdates = Settings.settingsBuilder();
final Settings.Builder persistentUpdates = Settings.settingsBuilder();
final Settings.Builder transientUpdates = Settings.builder();
final Settings.Builder persistentUpdates = Settings.builder();
private final ClusterSettings clusterSettings;
SettingsUpdater(ClusterSettings clusterSettings) {
@ -50,11 +50,11 @@ final class SettingsUpdater {
synchronized ClusterState updateSettings(final ClusterState currentState, Settings transientToApply, Settings persistentToApply) {
boolean changed = false;
Settings.Builder transientSettings = Settings.settingsBuilder();
Settings.Builder transientSettings = Settings.builder();
transientSettings.put(currentState.metaData().transientSettings());
changed |= clusterSettings.updateDynamicSettings(transientToApply, transientSettings, transientUpdates, "transient");
Settings.Builder persistentSettings = Settings.settingsBuilder();
Settings.Builder persistentSettings = Settings.builder();
persistentSettings.put(currentState.metaData().persistentSettings());
changed |= clusterSettings.updateDynamicSettings(persistentToApply, persistentSettings, persistentUpdates, "persistent");

View File

@ -299,7 +299,7 @@ public class CreateSnapshotRequest extends MasterNodeRequest<CreateSnapshotReque
* @return this request
*/
public CreateSnapshotRequest settings(String source) {
this.settings = Settings.settingsBuilder().loadFromSource(source).build();
this.settings = Settings.builder().loadFromSource(source).build();
return this;
}

View File

@ -25,7 +25,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.snapshots.SnapshotInfo;
@ -83,8 +82,8 @@ public class CreateSnapshotResponse extends ActionResponse implements ToXContent
}
static final class Fields {
static final XContentBuilderString SNAPSHOT = new XContentBuilderString("snapshot");
static final XContentBuilderString ACCEPTED = new XContentBuilderString("accepted");
static final String SNAPSHOT = "snapshot";
static final String ACCEPTED = "accepted";
}
@Override

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.snapshots.SnapshotInfo;
import java.io.IOException;
@ -76,7 +75,7 @@ public class GetSnapshotsResponse extends ActionResponse implements ToXContent {
}
static final class Fields {
static final XContentBuilderString SNAPSHOTS = new XContentBuilderString("snapshots");
static final String SNAPSHOTS = "snapshots";
}
@Override

View File

@ -324,7 +324,7 @@ public class RestoreSnapshotRequest extends MasterNodeRequest<RestoreSnapshotReq
* @return this request
*/
public RestoreSnapshotRequest settings(String source) {
this.settings = Settings.settingsBuilder().loadFromSource(source).build();
this.settings = Settings.builder().loadFromSource(source).build();
return this;
}
@ -441,7 +441,7 @@ public class RestoreSnapshotRequest extends MasterNodeRequest<RestoreSnapshotReq
* Sets settings that should be added/changed in all restored indices
*/
public RestoreSnapshotRequest indexSettings(String source) {
this.indexSettings = Settings.settingsBuilder().loadFromSource(source).build();
this.indexSettings = Settings.builder().loadFromSource(source).build();
return this;
}

View File

@ -25,7 +25,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.snapshots.RestoreInfo;
@ -75,8 +74,8 @@ public class RestoreSnapshotResponse extends ActionResponse implements ToXConten
}
static final class Fields {
static final XContentBuilderString SNAPSHOT = new XContentBuilderString("snapshot");
static final XContentBuilderString ACCEPTED = new XContentBuilderString("accepted");
static final String SNAPSHOT = "snapshot";
static final String ACCEPTED = "accepted";
}
@Override

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus;
@ -135,9 +134,9 @@ public class SnapshotIndexShardStatus extends BroadcastShardResponse implements
}
static final class Fields {
static final XContentBuilderString STAGE = new XContentBuilderString("stage");
static final XContentBuilderString REASON = new XContentBuilderString("reason");
static final XContentBuilderString NODE = new XContentBuilderString("node");
static final String STAGE = "stage";
static final String REASON = "reason";
static final String NODE = "node";
}
@Override

View File

@ -21,7 +21,6 @@ package org.elasticsearch.action.admin.cluster.snapshots.status;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import java.io.IOException;
import java.util.Collection;
@ -91,12 +90,12 @@ public class SnapshotIndexStatus implements Iterable<SnapshotIndexShardStatus>,
}
static final class Fields {
static final XContentBuilderString SHARDS = new XContentBuilderString("shards");
static final String SHARDS = "shards";
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(getIndex(), XContentBuilder.FieldCaseConversion.NONE);
builder.startObject(getIndex());
shardsStats.toXContent(builder, params);
stats.toXContent(builder, params);
builder.startObject(Fields.SHARDS);

View File

@ -21,7 +21,6 @@ package org.elasticsearch.action.admin.cluster.snapshots.status;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import java.io.IOException;
import java.util.Collection;
@ -106,13 +105,13 @@ public class SnapshotShardsStats implements ToXContent {
}
static final class Fields {
static final XContentBuilderString SHARDS_STATS = new XContentBuilderString("shards_stats");
static final XContentBuilderString INITIALIZING = new XContentBuilderString("initializing");
static final XContentBuilderString STARTED = new XContentBuilderString("started");
static final XContentBuilderString FINALIZING = new XContentBuilderString("finalizing");
static final XContentBuilderString DONE = new XContentBuilderString("done");
static final XContentBuilderString FAILED = new XContentBuilderString("failed");
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final String SHARDS_STATS = "shards_stats";
static final String INITIALIZING = "initializing";
static final String STARTED = "started";
static final String FINALIZING = "finalizing";
static final String DONE = "done";
static final String FAILED = "failed";
static final String TOTAL = "total";
}
@Override

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus;
import java.io.IOException;
@ -130,16 +129,16 @@ public class SnapshotStats implements Streamable, ToXContent {
}
static final class Fields {
static final XContentBuilderString STATS = new XContentBuilderString("stats");
static final XContentBuilderString NUMBER_OF_FILES = new XContentBuilderString("number_of_files");
static final XContentBuilderString PROCESSED_FILES = new XContentBuilderString("processed_files");
static final XContentBuilderString TOTAL_SIZE_IN_BYTES = new XContentBuilderString("total_size_in_bytes");
static final XContentBuilderString TOTAL_SIZE = new XContentBuilderString("total_size");
static final XContentBuilderString PROCESSED_SIZE_IN_BYTES = new XContentBuilderString("processed_size_in_bytes");
static final XContentBuilderString PROCESSED_SIZE = new XContentBuilderString("processed_size");
static final XContentBuilderString START_TIME_IN_MILLIS = new XContentBuilderString("start_time_in_millis");
static final XContentBuilderString TIME_IN_MILLIS = new XContentBuilderString("time_in_millis");
static final XContentBuilderString TIME = new XContentBuilderString("time");
static final String STATS = "stats";
static final String NUMBER_OF_FILES = "number_of_files";
static final String PROCESSED_FILES = "processed_files";
static final String TOTAL_SIZE_IN_BYTES = "total_size_in_bytes";
static final String TOTAL_SIZE = "total_size";
static final String PROCESSED_SIZE_IN_BYTES = "processed_size_in_bytes";
static final String PROCESSED_SIZE = "processed_size";
static final String START_TIME_IN_MILLIS = "start_time_in_millis";
static final String TIME_IN_MILLIS = "time_in_millis";
static final String TIME = "time";
}
@Override

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.common.xcontent.XContentFactory;
import java.io.IOException;
@ -180,10 +179,10 @@ public class SnapshotStatus implements ToXContent, Streamable {
}
static final class Fields {
static final XContentBuilderString SNAPSHOT = new XContentBuilderString("snapshot");
static final XContentBuilderString REPOSITORY = new XContentBuilderString("repository");
static final XContentBuilderString STATE = new XContentBuilderString("state");
static final XContentBuilderString INDICES = new XContentBuilderString("indices");
static final String SNAPSHOT = "snapshot";
static final String REPOSITORY = "repository";
static final String STATE = "state";
static final String INDICES = "indices";
}
@Override

View File

@ -24,7 +24,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import java.io.IOException;
import java.util.ArrayList;
@ -75,7 +74,7 @@ public class SnapshotsStatusResponse extends ActionResponse implements ToXConten
}
static final class Fields {
static final XContentBuilderString SNAPSHOTS = new XContentBuilderString("snapshots");
static final String SNAPSHOTS = "snapshots";
}
@Override

View File

@ -27,7 +27,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.index.cache.query.QueryCacheStats;
import org.elasticsearch.index.engine.SegmentsStats;
import org.elasticsearch.index.fielddata.FieldDataStats;
@ -165,7 +164,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
}
static final class Fields {
static final XContentBuilderString COUNT = new XContentBuilderString("count");
static final String COUNT = "count";
}
@Override
@ -362,17 +361,17 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
}
static final class Fields {
static final XContentBuilderString SHARDS = new XContentBuilderString("shards");
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final XContentBuilderString PRIMARIES = new XContentBuilderString("primaries");
static final XContentBuilderString REPLICATION = new XContentBuilderString("replication");
static final XContentBuilderString MIN = new XContentBuilderString("min");
static final XContentBuilderString MAX = new XContentBuilderString("max");
static final XContentBuilderString AVG = new XContentBuilderString("avg");
static final XContentBuilderString INDEX = new XContentBuilderString("index");
static final String SHARDS = "shards";
static final String TOTAL = "total";
static final String PRIMARIES = "primaries";
static final String REPLICATION = "replication";
static final String MIN = "min";
static final String MAX = "max";
static final String AVG = "avg";
static final String INDEX = "index";
}
private void addIntMinMax(XContentBuilderString field, int min, int max, double avg, XContentBuilder builder) throws IOException {
private void addIntMinMax(String field, int min, int max, double avg, XContentBuilder builder) throws IOException {
builder.startObject(field);
builder.field(Fields.MIN, min);
builder.field(Fields.MAX, max);
@ -380,7 +379,7 @@ public class ClusterStatsIndices implements ToXContent, Streamable {
builder.endObject();
}
private void addDoubleMinMax(XContentBuilderString field, double min, double max, double avg, XContentBuilder builder) throws IOException {
private void addDoubleMinMax(String field, double min, double max, double avg, XContentBuilder builder) throws IOException {
builder.startObject(field);
builder.field(Fields.MIN, min);
builder.field(Fields.MAX, max);

View File

@ -21,13 +21,13 @@ package org.elasticsearch.action.admin.cluster.stats;
import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.cursors.ObjectIntCursor;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
@ -35,7 +35,6 @@ import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.monitor.fs.FsInfo;
import org.elasticsearch.monitor.jvm.JvmInfo;
import org.elasticsearch.plugins.PluginInfo;
@ -49,7 +48,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNodes> {
public class ClusterStatsNodes implements ToXContent, Writeable {
private final Counts counts;
private final Set<Version> versions;
@ -71,7 +70,7 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
this.os = new OsStats(in);
this.process = new ProcessStats(in);
this.jvm = new JvmStats(in);
this.fs = FsInfo.Path.readInfoFrom(in);
this.fs = new FsInfo.Path(in);
size = in.readVInt();
this.plugins = new HashSet<>(size);
@ -141,12 +140,6 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
return plugins;
}
@Override
public ClusterStatsNodes readFrom(StreamInput in) throws IOException {
return new ClusterStatsNodes(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
counts.writeTo(out);
@ -163,13 +156,13 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
}
static final class Fields {
static final XContentBuilderString COUNT = new XContentBuilderString("count");
static final XContentBuilderString VERSIONS = new XContentBuilderString("versions");
static final XContentBuilderString OS = new XContentBuilderString("os");
static final XContentBuilderString PROCESS = new XContentBuilderString("process");
static final XContentBuilderString JVM = new XContentBuilderString("jvm");
static final XContentBuilderString FS = new XContentBuilderString("fs");
static final XContentBuilderString PLUGINS = new XContentBuilderString("plugins");
static final String COUNT = "count";
static final String VERSIONS = "versions";
static final String OS = "os";
static final String PROCESS = "process";
static final String JVM = "jvm";
static final String FS = "fs";
static final String PLUGINS = "plugins";
}
@Override
@ -207,7 +200,7 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
return builder;
}
public static class Counts implements Writeable<Counts>, ToXContent {
public static class Counts implements Writeable, ToXContent {
static final String COORDINATING_ONLY = "coordinating_only";
private final int total;
@ -250,11 +243,6 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
return roles;
}
@Override
public Counts readFrom(StreamInput in) throws IOException {
return new Counts(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(total);
@ -262,7 +250,7 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
}
static final class Fields {
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final String TOTAL = "total";
}
@Override
@ -275,22 +263,14 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
}
}
public static class OsStats implements ToXContent, Writeable<OsStats> {
public static class OsStats implements ToXContent, Writeable {
final int availableProcessors;
final int allocatedProcessors;
final ObjectIntHashMap<String> names;
@SuppressWarnings("unchecked")
private OsStats(StreamInput in) throws IOException {
this.availableProcessors = in.readVInt();
this.allocatedProcessors = in.readVInt();
int size = in.readVInt();
this.names = new ObjectIntHashMap<>();
for (int i = 0; i < size; i++) {
names.addTo(in.readString(), in.readVInt());
}
}
/**
* Build the stats from information about each node.
*/
private OsStats(List<NodeInfo> nodeInfos) {
this.names = new ObjectIntHashMap<>();
int availableProcessors = 0;
@ -307,17 +287,17 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
this.allocatedProcessors = allocatedProcessors;
}
public int getAvailableProcessors() {
return availableProcessors;
}
public int getAllocatedProcessors() {
return allocatedProcessors;
}
@Override
public OsStats readFrom(StreamInput in) throws IOException {
return new OsStats(in);
/**
* Read from a stream.
*/
private OsStats(StreamInput in) throws IOException {
this.availableProcessors = in.readVInt();
this.allocatedProcessors = in.readVInt();
int size = in.readVInt();
this.names = new ObjectIntHashMap<>();
for (int i = 0; i < size; i++) {
names.addTo(in.readString(), in.readVInt());
}
}
@Override
@ -331,12 +311,20 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
}
}
public int getAvailableProcessors() {
return availableProcessors;
}
public int getAllocatedProcessors() {
return allocatedProcessors;
}
static final class Fields {
static final XContentBuilderString AVAILABLE_PROCESSORS = new XContentBuilderString("available_processors");
static final XContentBuilderString ALLOCATED_PROCESSORS = new XContentBuilderString("allocated_processors");
static final XContentBuilderString NAME = new XContentBuilderString("name");
static final XContentBuilderString NAMES = new XContentBuilderString("names");
static final XContentBuilderString COUNT = new XContentBuilderString("count");
static final String AVAILABLE_PROCESSORS = "available_processors";
static final String ALLOCATED_PROCESSORS = "allocated_processors";
static final String NAME = "name";
static final String NAMES = "names";
static final String COUNT = "count";
}
@Override
@ -355,7 +343,7 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
}
}
public static class ProcessStats implements ToXContent, Writeable<ProcessStats> {
public static class ProcessStats implements ToXContent, Writeable {
final int count;
final int cpuPercent;
@ -363,14 +351,9 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
final long minOpenFileDescriptors;
final long maxOpenFileDescriptors;
private ProcessStats(StreamInput in) throws IOException {
this.count = in.readVInt();
this.cpuPercent = in.readVInt();
this.totalOpenFileDescriptors = in.readVLong();
this.minOpenFileDescriptors = in.readLong();
this.maxOpenFileDescriptors = in.readLong();
}
/**
* Build from looking at a list of node statistics.
*/
private ProcessStats(List<NodeStats> nodeStatsList) {
int count = 0;
int cpuPercent = 0;
@ -401,6 +384,27 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
this.maxOpenFileDescriptors = maxOpenFileDescriptors;
}
/**
* Read from a stream.
*/
private ProcessStats(StreamInput in) throws IOException {
this.count = in.readVInt();
this.cpuPercent = in.readVInt();
this.totalOpenFileDescriptors = in.readVLong();
this.minOpenFileDescriptors = in.readLong();
this.maxOpenFileDescriptors = in.readLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(count);
out.writeVInt(cpuPercent);
out.writeVLong(totalOpenFileDescriptors);
out.writeLong(minOpenFileDescriptors);
out.writeLong(maxOpenFileDescriptors);
}
/**
* Cpu usage in percentages - 100 is 1 core.
*/
@ -429,27 +433,13 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
return minOpenFileDescriptors;
}
@Override
public ProcessStats readFrom(StreamInput in) throws IOException {
return new ProcessStats(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(count);
out.writeVInt(cpuPercent);
out.writeVLong(totalOpenFileDescriptors);
out.writeLong(minOpenFileDescriptors);
out.writeLong(maxOpenFileDescriptors);
}
static final class Fields {
static final XContentBuilderString CPU = new XContentBuilderString("cpu");
static final XContentBuilderString PERCENT = new XContentBuilderString("percent");
static final XContentBuilderString OPEN_FILE_DESCRIPTORS = new XContentBuilderString("open_file_descriptors");
static final XContentBuilderString MIN = new XContentBuilderString("min");
static final XContentBuilderString MAX = new XContentBuilderString("max");
static final XContentBuilderString AVG = new XContentBuilderString("avg");
static final String CPU = "cpu";
static final String PERCENT = "percent";
static final String OPEN_FILE_DESCRIPTORS = "open_file_descriptors";
static final String MIN = "min";
static final String MAX = "max";
static final String AVG = "avg";
}
@Override
@ -466,7 +456,7 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
}
}
public static class JvmStats implements Writeable<JvmStats>, ToXContent {
public static class JvmStats implements Writeable, ToXContent {
private final ObjectIntHashMap<JvmVersion> versions;
private final long threads;
@ -474,18 +464,9 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
private final long heapUsed;
private final long heapMax;
private JvmStats(StreamInput in) throws IOException {
int size = in.readVInt();
this.versions = new ObjectIntHashMap<>(size);
for (int i = 0; i < size; i++) {
this.versions.addTo(JvmVersion.readJvmVersion(in), in.readVInt());
}
this.threads = in.readVLong();
this.maxUptime = in.readVLong();
this.heapUsed = in.readVLong();
this.heapMax = in.readVLong();
}
/**
* Build from lists of information about each node.
*/
private JvmStats(List<NodeInfo> nodeInfos, List<NodeStats> nodeStatsList) {
this.versions = new ObjectIntHashMap<>();
long threads = 0;
@ -516,6 +497,34 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
this.heapMax = heapMax;
}
/**
* Read from a stream.
*/
private JvmStats(StreamInput in) throws IOException {
int size = in.readVInt();
this.versions = new ObjectIntHashMap<>(size);
for (int i = 0; i < size; i++) {
this.versions.addTo(new JvmVersion(in), in.readVInt());
}
this.threads = in.readVLong();
this.maxUptime = in.readVLong();
this.heapUsed = in.readVLong();
this.heapMax = in.readVLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(versions.size());
for (ObjectIntCursor<JvmVersion> v : versions) {
v.key.writeTo(out);
out.writeVInt(v.value);
}
out.writeVLong(threads);
out.writeVLong(maxUptime);
out.writeVLong(heapUsed);
out.writeVLong(heapMax);
}
public ObjectIntHashMap<JvmVersion> getVersions() {
return versions;
}
@ -548,39 +557,21 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
return new ByteSizeValue(heapMax);
}
@Override
public JvmStats readFrom(StreamInput in) throws IOException {
return new JvmStats(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(versions.size());
for (ObjectIntCursor<JvmVersion> v : versions) {
v.key.writeTo(out);
out.writeVInt(v.value);
}
out.writeVLong(threads);
out.writeVLong(maxUptime);
out.writeVLong(heapUsed);
out.writeVLong(heapMax);
}
static final class Fields {
static final XContentBuilderString VERSIONS = new XContentBuilderString("versions");
static final XContentBuilderString VERSION = new XContentBuilderString("version");
static final XContentBuilderString VM_NAME = new XContentBuilderString("vm_name");
static final XContentBuilderString VM_VERSION = new XContentBuilderString("vm_version");
static final XContentBuilderString VM_VENDOR = new XContentBuilderString("vm_vendor");
static final XContentBuilderString COUNT = new XContentBuilderString("count");
static final XContentBuilderString THREADS = new XContentBuilderString("threads");
static final XContentBuilderString MAX_UPTIME = new XContentBuilderString("max_uptime");
static final XContentBuilderString MAX_UPTIME_IN_MILLIS = new XContentBuilderString("max_uptime_in_millis");
static final XContentBuilderString MEM = new XContentBuilderString("mem");
static final XContentBuilderString HEAP_USED = new XContentBuilderString("heap_used");
static final XContentBuilderString HEAP_USED_IN_BYTES = new XContentBuilderString("heap_used_in_bytes");
static final XContentBuilderString HEAP_MAX = new XContentBuilderString("heap_max");
static final XContentBuilderString HEAP_MAX_IN_BYTES = new XContentBuilderString("heap_max_in_bytes");
static final String VERSIONS = "versions";
static final String VERSION = "version";
static final String VM_NAME = "vm_name";
static final String VM_VERSION = "vm_version";
static final String VM_VENDOR = "vm_vendor";
static final String COUNT = "count";
static final String THREADS = "threads";
static final String MAX_UPTIME = "max_uptime";
static final String MAX_UPTIME_IN_MILLIS = "max_uptime_in_millis";
static final String MEM = "mem";
static final String HEAP_USED = "heap_used";
static final String HEAP_USED_IN_BYTES = "heap_used_in_bytes";
static final String HEAP_MAX = "heap_max";
static final String HEAP_MAX_IN_BYTES = "heap_max_in_bytes";
}
@Override
@ -607,7 +598,7 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
}
}
public static class JvmVersion implements Streamable {
public static class JvmVersion implements Writeable {
String version;
String vmName;
String vmVersion;
@ -620,6 +611,24 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
vmVendor = jvmInfo.getVmVendor();
}
/**
* Read from a stream.
*/
JvmVersion(StreamInput in) throws IOException {
version = in.readString();
vmName = in.readString();
vmVersion = in.readString();
vmVendor = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(version);
out.writeString(vmName);
out.writeString(vmVersion);
out.writeString(vmVendor);
}
JvmVersion() {
}
@ -641,27 +650,5 @@ public class ClusterStatsNodes implements ToXContent, Writeable<ClusterStatsNode
public int hashCode() {
return vmVersion.hashCode();
}
public static JvmVersion readJvmVersion(StreamInput in) throws IOException {
JvmVersion jvm = new JvmVersion();
jvm.readFrom(in);
return jvm;
}
@Override
public void readFrom(StreamInput in) throws IOException {
version = in.readString();
vmName = in.readString();
vmVersion = in.readString();
vmVendor = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(version);
out.writeString(vmName);
out.writeString(vmVersion);
out.writeString(vmVendor);
}
}
}

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.common.xcontent.XContentFactory;
import java.io.IOException;
@ -130,11 +129,11 @@ public class ClusterStatsResponse extends BaseNodesResponse<ClusterStatsNodeResp
}
static final class Fields {
static final XContentBuilderString NODES = new XContentBuilderString("nodes");
static final XContentBuilderString INDICES = new XContentBuilderString("indices");
static final XContentBuilderString UUID = new XContentBuilderString("uuid");
static final XContentBuilderString CLUSTER_NAME = new XContentBuilderString("cluster_name");
static final XContentBuilderString STATUS = new XContentBuilderString("status");
static final String NODES = "nodes";
static final String INDICES = "indices";
static final String UUID = "uuid";
static final String CLUSTER_NAME = "cluster_name";
static final String STATUS = "status";
}
@Override

View File

@ -17,29 +17,30 @@
* under the License.
*/
package org.elasticsearch.action.indexedscripts.delete;
package org.elasticsearch.action.admin.cluster.storedscripts;
import org.elasticsearch.action.Action;
import org.elasticsearch.client.ElasticsearchClient;
/**
*/
public class DeleteIndexedScriptAction extends Action<DeleteIndexedScriptRequest, DeleteIndexedScriptResponse, DeleteIndexedScriptRequestBuilder> {
public class DeleteStoredScriptAction extends Action<DeleteStoredScriptRequest, DeleteStoredScriptResponse,
DeleteStoredScriptRequestBuilder> {
public static final DeleteIndexedScriptAction INSTANCE = new DeleteIndexedScriptAction();
public static final String NAME = "indices:data/write/script/delete";
public static final DeleteStoredScriptAction INSTANCE = new DeleteStoredScriptAction();
public static final String NAME = "cluster:admin/script/delete";
private DeleteIndexedScriptAction() {
private DeleteStoredScriptAction() {
super(NAME);
}
@Override
public DeleteIndexedScriptResponse newResponse() {
return new DeleteIndexedScriptResponse();
public DeleteStoredScriptResponse newResponse() {
return new DeleteStoredScriptResponse();
}
@Override
public DeleteIndexedScriptRequestBuilder newRequestBuilder(ElasticsearchClient client) {
return new DeleteIndexedScriptRequestBuilder(client, this);
public DeleteStoredScriptRequestBuilder newRequestBuilder(ElasticsearchClient client) {
return new DeleteStoredScriptRequestBuilder(client, this);
}
}

View File

@ -0,0 +1,96 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.storedscripts;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
import static org.elasticsearch.action.ValidateActions.addValidationError;
public class DeleteStoredScriptRequest extends AcknowledgedRequest<DeleteStoredScriptRequest> {
private String id;
private String scriptLang;
DeleteStoredScriptRequest() {
}
public DeleteStoredScriptRequest(String scriptLang, String id) {
this.scriptLang = scriptLang;
this.id = id;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (id == null) {
validationException = addValidationError("id is missing", validationException);
} else if (id.contains("#")) {
validationException = addValidationError("id can't contain: '#'", validationException);
}
if (scriptLang == null) {
validationException = addValidationError("lang is missing", validationException);
} else if (scriptLang.contains("#")) {
validationException = addValidationError("lang can't contain: '#'", validationException);
}
return validationException;
}
public String scriptLang() {
return scriptLang;
}
public DeleteStoredScriptRequest scriptLang(String type) {
this.scriptLang = type;
return this;
}
public String id() {
return id;
}
public DeleteStoredScriptRequest id(String id) {
this.id = id;
return this;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
scriptLang = in.readString();
id = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(scriptLang);
out.writeString(id);
}
@Override
public String toString() {
return "delete script {[" + scriptLang + "][" + id + "]}";
}
}

View File

@ -17,29 +17,26 @@
* under the License.
*/
package org.elasticsearch.common.xcontent;
package org.elasticsearch.action.admin.cluster.storedscripts;
import org.elasticsearch.common.Strings;
import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient;
/**
*
*/
public class XContentBuilderString {
public class DeleteStoredScriptRequestBuilder extends AcknowledgedRequestBuilder<DeleteStoredScriptRequest,
DeleteStoredScriptResponse, DeleteStoredScriptRequestBuilder> {
private final XContentString underscore;
private final XContentString camelCase;
public XContentBuilderString(String value) {
underscore = new XContentString(Strings.toUnderscoreCase(value));
camelCase = new XContentString(Strings.toCamelCase(value));
public DeleteStoredScriptRequestBuilder(ElasticsearchClient client, DeleteStoredScriptAction action) {
super(client, action, new DeleteStoredScriptRequest());
}
public XContentString underscore() {
return underscore;
public DeleteStoredScriptRequestBuilder setScriptLang(String scriptLang) {
request.scriptLang(scriptLang);
return this;
}
public XContentString camelCase() {
return camelCase;
public DeleteStoredScriptRequestBuilder setId(String id) {
request.id(id);
return this;
}
}

View File

@ -17,48 +17,32 @@
* under the License.
*/
package org.elasticsearch.action.support;
package org.elasticsearch.action.admin.cluster.storedscripts;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.transport.TransportRequest;
import java.io.IOException;
/**
* Base class for requests that can have associated child tasks
*/
public class ChildTaskRequest extends TransportRequest {
public class DeleteStoredScriptResponse extends AcknowledgedResponse {
private TaskId parentTaskId = TaskId.EMPTY_TASK_ID;
protected ChildTaskRequest() {
DeleteStoredScriptResponse() {
}
public void setParentTask(String parentTaskNode, long parentTaskId) {
this.parentTaskId = new TaskId(parentTaskNode, parentTaskId);
public DeleteStoredScriptResponse(boolean acknowledged) {
super(acknowledged);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
parentTaskId = new TaskId(in);
readAcknowledged(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
parentTaskId.writeTo(out);
}
@Override
public final Task createTask(long id, String type, String action) {
return createTask(id, type, action, parentTaskId);
}
public Task createTask(long id, String type, String action, TaskId parentTaskId) {
return new Task(id, type, action, getDescription(), parentTaskId);
writeAcknowledged(out);
}
}

View File

@ -17,30 +17,31 @@
* under the License.
*/
package org.elasticsearch.action.indexedscripts.get;
package org.elasticsearch.action.admin.cluster.storedscripts;
import org.elasticsearch.action.Action;
import org.elasticsearch.client.ElasticsearchClient;
/**
*/
public class GetIndexedScriptAction extends Action<GetIndexedScriptRequest, GetIndexedScriptResponse, GetIndexedScriptRequestBuilder> {
public class GetStoredScriptAction extends Action<GetStoredScriptRequest, GetStoredScriptResponse,
GetStoredScriptRequestBuilder> {
public static final GetIndexedScriptAction INSTANCE = new GetIndexedScriptAction();
public static final String NAME = "indices:data/read/script/get";
public static final GetStoredScriptAction INSTANCE = new GetStoredScriptAction();
public static final String NAME = "cluster:admin/script/get";
private GetIndexedScriptAction() {
private GetStoredScriptAction() {
super(NAME);
}
@Override
public GetIndexedScriptResponse newResponse() {
return new GetIndexedScriptResponse();
public GetStoredScriptResponse newResponse() {
return new GetStoredScriptResponse();
}
@Override
public GetIndexedScriptRequestBuilder newRequestBuilder(ElasticsearchClient client) {
return new GetIndexedScriptRequestBuilder(client, this);
public GetStoredScriptRequestBuilder newRequestBuilder(ElasticsearchClient client) {
return new GetStoredScriptRequestBuilder(client, this);
}
}

View File

@ -0,0 +1,93 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.storedscripts;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ValidateActions;
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
public class GetStoredScriptRequest extends MasterNodeReadRequest<GetStoredScriptRequest> {
protected String id;
protected String lang;
GetStoredScriptRequest() {
}
public GetStoredScriptRequest(String lang, String id) {
this.lang = lang;
this.id = id;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (lang == null) {
validationException = ValidateActions.addValidationError("lang is missing", validationException);
}
if (id == null) {
validationException = ValidateActions.addValidationError("id is missing", validationException);
}
return validationException;
}
public GetStoredScriptRequest lang(@Nullable String type) {
this.lang = type;
return this;
}
public GetStoredScriptRequest id(String id) {
this.id = id;
return this;
}
public String lang() {
return lang;
}
public String id() {
return id;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
lang = in.readString();
id = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(lang);
out.writeString(id);
}
@Override
public String toString() {
return "get script [" + lang + "][" + id + "]";
}
}

View File

@ -0,0 +1,44 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.storedscripts;
import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.Nullable;
public class GetStoredScriptRequestBuilder extends MasterNodeReadOperationRequestBuilder<GetStoredScriptRequest,
GetStoredScriptResponse, GetStoredScriptRequestBuilder> {
public GetStoredScriptRequestBuilder(ElasticsearchClient client, GetStoredScriptAction action) {
super(client, action, new GetStoredScriptRequest());
}
public GetStoredScriptRequestBuilder setLang(@Nullable String lang) {
request.lang(lang);
return this;
}
public GetStoredScriptRequestBuilder setId(String id) {
request.id(id);
return this;
}
}

View File

@ -17,50 +17,51 @@
* under the License.
*/
package org.elasticsearch.action.support;
package org.elasticsearch.action.admin.cluster.storedscripts;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.script.Script;
import java.io.IOException;
/**
* Base class for action requests that can have associated child tasks
*/
public abstract class ChildTaskActionRequest<Request extends ActionRequest<Request>> extends ActionRequest<Request> {
public class GetStoredScriptResponse extends ActionResponse implements ToXContent {
private TaskId parentTaskId = TaskId.EMPTY_TASK_ID;
protected ChildTaskActionRequest() {
private String storedScript;
GetStoredScriptResponse() {
}
public void setParentTask(String parentTaskNode, long parentTaskId) {
this.parentTaskId = new TaskId(parentTaskNode, parentTaskId);
GetStoredScriptResponse(String storedScript) {
this.storedScript = storedScript;
}
/**
* @return if a stored script and if not found <code>null</code>
*/
public String getStoredScript() {
return storedScript;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.value(storedScript);
return builder;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
parentTaskId = new TaskId(in);
storedScript = in.readOptionalString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
parentTaskId.writeTo(out);
out.writeOptionalString(storedScript);
}
@Override
public final Task createTask(long id, String type, String action) {
return createTask(id, type, action, parentTaskId);
}
public Task createTask(long id, String type, String action, TaskId parentTaskId) {
return new Task(id, type, action, getDescription(), parentTaskId);
}
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.indexedscripts.put;
package org.elasticsearch.action.admin.cluster.storedscripts;
import org.elasticsearch.action.Action;
import org.elasticsearch.client.ElasticsearchClient;
@ -25,24 +25,25 @@ import org.elasticsearch.client.ElasticsearchClient;
/**
*/
public class PutIndexedScriptAction extends Action<PutIndexedScriptRequest, PutIndexedScriptResponse, PutIndexedScriptRequestBuilder> {
public class PutStoredScriptAction extends Action<PutStoredScriptRequest, PutStoredScriptResponse,
PutStoredScriptRequestBuilder> {
public static final PutIndexedScriptAction INSTANCE = new PutIndexedScriptAction();
public static final String NAME = "indices:data/write/script/put";
public static final PutStoredScriptAction INSTANCE = new PutStoredScriptAction();
public static final String NAME = "cluster:admin/script/put";
private PutIndexedScriptAction() {
private PutStoredScriptAction() {
super(NAME);
}
@Override
public PutIndexedScriptResponse newResponse() {
return new PutIndexedScriptResponse();
public PutStoredScriptResponse newResponse() {
return new PutStoredScriptResponse();
}
@Override
public PutIndexedScriptRequestBuilder newRequestBuilder(ElasticsearchClient client) {
return new PutIndexedScriptRequestBuilder(client, this);
public PutStoredScriptRequestBuilder newRequestBuilder(ElasticsearchClient client) {
return new PutStoredScriptRequestBuilder(client, this);
}
}

View File

@ -0,0 +1,126 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.storedscripts;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentHelper;
import java.io.IOException;
import static org.elasticsearch.action.ValidateActions.addValidationError;
public class PutStoredScriptRequest extends AcknowledgedRequest<PutStoredScriptRequest> {
private String id;
private String scriptLang;
private BytesReference script;
public PutStoredScriptRequest() {
super();
}
public PutStoredScriptRequest(String scriptLang) {
super();
this.scriptLang = scriptLang;
}
public PutStoredScriptRequest(String scriptLang, String id) {
super();
this.scriptLang = scriptLang;
this.id = id;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (id == null) {
validationException = addValidationError("id is missing", validationException);
} else if (id.contains("#")) {
validationException = addValidationError("id can't contain: '#'", validationException);
}
if (scriptLang == null) {
validationException = addValidationError("lang is missing", validationException);
} else if (scriptLang.contains("#")) {
validationException = addValidationError("lang can't contain: '#'", validationException);
}
if (script == null) {
validationException = addValidationError("script is missing", validationException);
}
return validationException;
}
public String scriptLang() {
return scriptLang;
}
public PutStoredScriptRequest scriptLang(String scriptLang) {
this.scriptLang = scriptLang;
return this;
}
public String id() {
return id;
}
public PutStoredScriptRequest id(String id) {
this.id = id;
return this;
}
public BytesReference script() {
return script;
}
public PutStoredScriptRequest script(BytesReference source) {
this.script = source;
return this;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
scriptLang = in.readString();
id = in.readOptionalString();
script = in.readBytesReference();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(scriptLang);
out.writeOptionalString(id);
out.writeBytesReference(script);
}
@Override
public String toString() {
String sSource = "_na_";
try {
sSource = XContentHelper.convertToJson(script, false);
} catch (Exception e) {
// ignore
}
return "put script {[" + id + "][" + scriptLang + "], script[" + sSource + "]}";
}
}

View File

@ -0,0 +1,48 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.storedscripts;
import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.bytes.BytesReference;
public class PutStoredScriptRequestBuilder extends AcknowledgedRequestBuilder<PutStoredScriptRequest,
PutStoredScriptResponse, PutStoredScriptRequestBuilder> {
public PutStoredScriptRequestBuilder(ElasticsearchClient client, PutStoredScriptAction action) {
super(client, action, new PutStoredScriptRequest());
}
public PutStoredScriptRequestBuilder setScriptLang(String scriptLang) {
request.scriptLang(scriptLang);
return this;
}
public PutStoredScriptRequestBuilder setId(String id) {
request.id(id);
return this;
}
public PutStoredScriptRequestBuilder setSource(BytesReference source) {
request.script(source);
return this;
}
}

View File

@ -17,38 +17,33 @@
* under the License.
*/
package org.elasticsearch.cluster.routing;
package org.elasticsearch.action.admin.cluster.storedscripts;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
/**
* This class defines {@link RoutingException}s related to
* the validation of routing
*/
public class RoutingValidationException extends RoutingException {
public class PutStoredScriptResponse extends AcknowledgedResponse {
private final RoutingTableValidation validation;
public RoutingValidationException(RoutingTableValidation validation) {
super(validation.toString());
this.validation = validation;
PutStoredScriptResponse() {
}
public RoutingValidationException(StreamInput in) throws IOException {
super(in);
validation = in.readOptionalStreamable(RoutingTableValidation::new);
public PutStoredScriptResponse(boolean acknowledged) {
super(acknowledged);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
readAcknowledged(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeOptionalStreamable(validation);
writeAcknowledged(out);
}
public RoutingTableValidation validation() {
return this.validation;
}
}
}

View File

@ -0,0 +1,71 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.storedscripts;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
public class TransportDeleteStoredScriptAction extends TransportMasterNodeAction<DeleteStoredScriptRequest,
DeleteStoredScriptResponse> {
private final ScriptService scriptService;
@Inject
public TransportDeleteStoredScriptAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver, ScriptService scriptService) {
super(settings, DeleteStoredScriptAction.NAME, transportService, clusterService, threadPool, actionFilters,
indexNameExpressionResolver, DeleteStoredScriptRequest::new);
this.scriptService = scriptService;
}
@Override
protected String executor() {
return ThreadPool.Names.SAME;
}
@Override
protected DeleteStoredScriptResponse newResponse() {
return new DeleteStoredScriptResponse();
}
@Override
protected void masterOperation(DeleteStoredScriptRequest request, ClusterState state,
ActionListener<DeleteStoredScriptResponse> listener) throws Exception {
scriptService.deleteStoredScript(clusterService, request, listener);
}
@Override
protected ClusterBlockException checkBlock(DeleteStoredScriptRequest request, ClusterState state) {
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
}
}

View File

@ -0,0 +1,71 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.storedscripts;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeReadAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
public class TransportGetStoredScriptAction extends TransportMasterNodeReadAction<GetStoredScriptRequest,
GetStoredScriptResponse> {
private final ScriptService scriptService;
@Inject
public TransportGetStoredScriptAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver, ScriptService scriptService) {
super(settings, GetStoredScriptAction.NAME, transportService, clusterService, threadPool, actionFilters,
indexNameExpressionResolver, GetStoredScriptRequest::new);
this.scriptService = scriptService;
}
@Override
protected String executor() {
return ThreadPool.Names.SAME;
}
@Override
protected GetStoredScriptResponse newResponse() {
return new GetStoredScriptResponse();
}
@Override
protected void masterOperation(GetStoredScriptRequest request, ClusterState state,
ActionListener<GetStoredScriptResponse> listener) throws Exception {
listener.onResponse(new GetStoredScriptResponse(scriptService.getStoredScript(state, request)));
}
@Override
protected ClusterBlockException checkBlock(GetStoredScriptRequest request, ClusterState state) {
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ);
}
}

View File

@ -0,0 +1,70 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.storedscripts;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
public class TransportPutStoredScriptAction extends TransportMasterNodeAction<PutStoredScriptRequest, PutStoredScriptResponse> {
private final ScriptService scriptService;
@Inject
public TransportPutStoredScriptAction(Settings settings, TransportService transportService, ClusterService clusterService,
ThreadPool threadPool, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver, ScriptService scriptService) {
super(settings, PutStoredScriptAction.NAME, transportService, clusterService, threadPool, actionFilters,
indexNameExpressionResolver, PutStoredScriptRequest::new);
this.scriptService = scriptService;
}
@Override
protected String executor() {
return ThreadPool.Names.SAME;
}
@Override
protected PutStoredScriptResponse newResponse() {
return new PutStoredScriptResponse();
}
@Override
protected void masterOperation(PutStoredScriptRequest request, ClusterState state,
ActionListener<PutStoredScriptResponse> listener) throws Exception {
scriptService.storeScript(clusterService, request, listener);
}
@Override
protected ClusterBlockException checkBlock(PutStoredScriptRequest request, ClusterState state) {
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
}
}

View File

@ -25,7 +25,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.common.xcontent.XContentFactory;
import java.io.IOException;
@ -103,13 +102,13 @@ public class PendingClusterTasksResponse extends ActionResponse implements Itera
static final class Fields {
static final XContentBuilderString TASKS = new XContentBuilderString("tasks");
static final XContentBuilderString EXECUTING = new XContentBuilderString("executing");
static final XContentBuilderString INSERT_ORDER = new XContentBuilderString("insert_order");
static final XContentBuilderString PRIORITY = new XContentBuilderString("priority");
static final XContentBuilderString SOURCE = new XContentBuilderString("source");
static final XContentBuilderString TIME_IN_QUEUE_MILLIS = new XContentBuilderString("time_in_queue_millis");
static final XContentBuilderString TIME_IN_QUEUE = new XContentBuilderString("time_in_queue");
static final String TASKS = "tasks";
static final String EXECUTING = "executing";
static final String INSERT_ORDER = "insert_order";
static final String PRIORITY = "priority";
static final String SOURCE = "source";
static final String TIME_IN_QUEUE_MILLIS = "time_in_queue_millis";
static final String TIME_IN_QUEUE = "time_in_queue";
}

View File

@ -63,7 +63,7 @@ public class RenderSearchTemplateRequest extends ActionRequest<RenderSearchTempl
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
if (in.readBoolean()) {
template = Template.readTemplate(in);
template = new Template(in);
}
}
}

View File

@ -22,7 +22,9 @@ package org.elasticsearch.action.admin.cluster.validate.template;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
@ -38,12 +40,15 @@ import java.util.Collections;
public class TransportRenderSearchTemplateAction extends HandledTransportAction<RenderSearchTemplateRequest, RenderSearchTemplateResponse> {
private final ScriptService scriptService;
private final ClusterService clusterService;
@Inject
public TransportRenderSearchTemplateAction(ScriptService scriptService, Settings settings, ThreadPool threadPool,
TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
ClusterService clusterService) {
super(settings, RenderSearchTemplateAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, RenderSearchTemplateRequest::new);
this.scriptService = scriptService;
this.clusterService = clusterService;
}
@Override
@ -57,7 +62,8 @@ public class TransportRenderSearchTemplateAction extends HandledTransportAction<
@Override
protected void doRun() throws Exception {
ExecutableScript executable = scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, Collections.emptyMap());
ExecutableScript executable = scriptService.executable(request.template(), ScriptContext.Standard.SEARCH,
Collections.emptyMap(), clusterService.state());
BytesReference processedTemplate = (BytesReference) executable.run();
RenderSearchTemplateResponse response = new RenderSearchTemplateResponse();
response.source(processedTemplate);

View File

@ -25,7 +25,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import java.io.IOException;
import java.util.ArrayList;
@ -204,12 +203,12 @@ public class AnalyzeResponse extends ActionResponse implements Iterable<AnalyzeR
}
static final class Fields {
static final XContentBuilderString TOKENS = new XContentBuilderString("tokens");
static final XContentBuilderString TOKEN = new XContentBuilderString("token");
static final XContentBuilderString START_OFFSET = new XContentBuilderString("start_offset");
static final XContentBuilderString END_OFFSET = new XContentBuilderString("end_offset");
static final XContentBuilderString TYPE = new XContentBuilderString("type");
static final XContentBuilderString POSITION = new XContentBuilderString("position");
static final XContentBuilderString DETAIL = new XContentBuilderString("detail");
static final String TOKENS = "tokens";
static final String TOKEN = "token";
static final String START_OFFSET = "start_offset";
static final String END_OFFSET = "end_offset";
static final String TYPE = "type";
static final String POSITION = "position";
static final String DETAIL = "detail";
}
}

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import java.io.IOException;
@ -132,13 +131,13 @@ public class DetailAnalyzeResponse implements Streamable, ToXContent {
}
static final class Fields {
static final XContentBuilderString NAME = new XContentBuilderString("name");
static final XContentBuilderString FILTERED_TEXT = new XContentBuilderString("filtered_text");
static final XContentBuilderString CUSTOM_ANALYZER = new XContentBuilderString("custom_analyzer");
static final XContentBuilderString ANALYZER = new XContentBuilderString("analyzer");
static final XContentBuilderString CHARFILTERS = new XContentBuilderString("charfilters");
static final XContentBuilderString TOKENIZER = new XContentBuilderString("tokenizer");
static final XContentBuilderString TOKENFILTERS = new XContentBuilderString("tokenfilters");
static final String NAME = "name";
static final String FILTERED_TEXT = "filtered_text";
static final String CUSTOM_ANALYZER = "custom_analyzer";
static final String ANALYZER = "analyzer";
static final String CHARFILTERS = "charfilters";
static final String TOKENIZER = "tokenizer";
static final String TOKENFILTERS = "tokenfilters";
}
@Override

View File

@ -128,8 +128,8 @@ public class TransportAnalyzeAction extends TransportSingleShardAction<AnalyzeRe
}
MappedFieldType fieldType = indexService.mapperService().fullName(request.field());
if (fieldType != null) {
if (fieldType.isNumeric()) {
throw new IllegalArgumentException("Can't process field [" + request.field() + "], Analysis requests are not supported on numeric fields");
if (fieldType.tokenized() == false) {
throw new IllegalArgumentException("Can't process field [" + request.field() + "], Analysis requests are only supported on tokenized fields");
}
analyzer = fieldType.indexAnalyzer();
field = fieldType.name();

View File

@ -21,7 +21,6 @@ package org.elasticsearch.action.admin.indices.create;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.admin.indices.alias.Alias;
@ -49,9 +48,9 @@ import java.util.Map;
import java.util.Set;
import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
/**
* A request to create an index. Best created with {@link org.elasticsearch.client.Requests#createIndexRequest(String)}.
@ -169,7 +168,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
* The settings to create the index with (either json/yaml/properties format)
*/
public CreateIndexRequest settings(String source) {
this.settings = Settings.settingsBuilder().loadFromSource(source).build();
this.settings = Settings.builder().loadFromSource(source).build();
return this;
}
@ -305,8 +304,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
* Sets the aliases that will be associated with the index when it gets created
*/
public CreateIndexRequest aliases(BytesReference source) {
try {
XContentParser parser = XContentHelper.createParser(source);
try (XContentParser parser = XContentHelper.createParser(source)) {
//move to the first alias
parser.nextToken();
while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) {

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.indices.flush.ShardsSyncedFlushResult;
import org.elasticsearch.indices.flush.SyncedFlushService;
import org.elasticsearch.rest.RestStatus;
@ -181,14 +180,14 @@ public class SyncedFlushResponse extends ActionResponse implements ToXContent {
}
static final class Fields {
static final XContentBuilderString _SHARDS = new XContentBuilderString("_shards");
static final XContentBuilderString TOTAL = new XContentBuilderString("total");
static final XContentBuilderString SUCCESSFUL = new XContentBuilderString("successful");
static final XContentBuilderString FAILED = new XContentBuilderString("failed");
static final XContentBuilderString FAILURES = new XContentBuilderString("failures");
static final XContentBuilderString SHARD = new XContentBuilderString("shard");
static final XContentBuilderString ROUTING = new XContentBuilderString("routing");
static final XContentBuilderString REASON = new XContentBuilderString("reason");
static final String _SHARDS = "_shards";
static final String TOTAL = "total";
static final String SUCCESSFUL = "successful";
static final String FAILED = "failed";
static final String FAILURES = "failures";
static final String SHARD = "shard";
static final String ROUTING = "routing";
static final String REASON = "reason";
}
@Override

View File

@ -25,7 +25,6 @@ import org.elasticsearch.action.support.replication.TransportReplicationAction;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
@ -56,7 +55,7 @@ public class TransportShardFlushAction extends TransportReplicationAction<ShardF
}
@Override
protected Tuple<ReplicationResponse, ShardFlushRequest> shardOperationOnPrimary(MetaData metaData, ShardFlushRequest shardRequest) {
protected Tuple<ReplicationResponse, ShardFlushRequest> shardOperationOnPrimary(ShardFlushRequest shardRequest) {
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId().getIndex()).getShard(shardRequest.shardId().id());
indexShard.flush(shardRequest.getRequest());
logger.trace("{} flush request executed on primary", indexShard.shardId());

View File

@ -74,10 +74,10 @@ public class GetFieldMappingsResponse extends ActionResponse implements ToXConte
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
for (Map.Entry<String, Map<String, Map<String, FieldMappingMetaData>>> indexEntry : mappings.entrySet()) {
builder.startObject(indexEntry.getKey(), XContentBuilder.FieldCaseConversion.NONE);
builder.startObject(indexEntry.getKey());
builder.startObject("mappings");
for (Map.Entry<String, Map<String, FieldMappingMetaData>> typeEntry : indexEntry.getValue().entrySet()) {
builder.startObject(typeEntry.getKey(), XContentBuilder.FieldCaseConversion.NONE);
builder.startObject(typeEntry.getKey());
for (Map.Entry<String, FieldMappingMetaData> fieldEntry : typeEntry.getValue().entrySet()) {
builder.startObject(fieldEntry.getKey());
fieldEntry.getValue().toXContent(builder, params);

View File

@ -26,7 +26,6 @@ import org.elasticsearch.action.support.replication.TransportReplicationAction;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
@ -58,7 +57,7 @@ public class TransportShardRefreshAction extends TransportReplicationAction<Basi
}
@Override
protected Tuple<ReplicationResponse, BasicReplicationRequest> shardOperationOnPrimary(MetaData metaData, BasicReplicationRequest shardRequest) {
protected Tuple<ReplicationResponse, BasicReplicationRequest> shardOperationOnPrimary(BasicReplicationRequest shardRequest) {
IndexShard indexShard = indicesService.indexServiceSafe(shardRequest.shardId().getIndex()).getShard(shardRequest.shardId().id());
indexShard.refresh("api");
logger.trace("{} refresh request executed on primary", indexShard.shardId());

View File

@ -27,7 +27,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.index.engine.Segment;
import java.io.IOException;
@ -102,7 +101,7 @@ public class IndicesSegmentResponse extends BroadcastResponse implements ToXCont
builder.startObject(Fields.INDICES);
for (IndexSegments indexSegments : getIndices().values()) {
builder.startObject(indexSegments.getIndex(), XContentBuilder.FieldCaseConversion.NONE);
builder.startObject(indexSegments.getIndex());
builder.startObject(Fields.SHARDS);
for (IndexShardSegments indexSegment : indexSegments) {
@ -164,7 +163,7 @@ public class IndicesSegmentResponse extends BroadcastResponse implements ToXCont
builder.endObject();
return builder;
}
static void toXContent(XContentBuilder builder, Accountable tree) throws IOException {
builder.startObject();
builder.field(Fields.DESCRIPTION, tree.toString());
@ -181,31 +180,31 @@ public class IndicesSegmentResponse extends BroadcastResponse implements ToXCont
}
static final class Fields {
static final XContentBuilderString INDICES = new XContentBuilderString("indices");
static final XContentBuilderString SHARDS = new XContentBuilderString("shards");
static final XContentBuilderString ROUTING = new XContentBuilderString("routing");
static final XContentBuilderString STATE = new XContentBuilderString("state");
static final XContentBuilderString PRIMARY = new XContentBuilderString("primary");
static final XContentBuilderString NODE = new XContentBuilderString("node");
static final XContentBuilderString RELOCATING_NODE = new XContentBuilderString("relocating_node");
static final String INDICES = "indices";
static final String SHARDS = "shards";
static final String ROUTING = "routing";
static final String STATE = "state";
static final String PRIMARY = "primary";
static final String NODE = "node";
static final String RELOCATING_NODE = "relocating_node";
static final XContentBuilderString SEGMENTS = new XContentBuilderString("segments");
static final XContentBuilderString GENERATION = new XContentBuilderString("generation");
static final XContentBuilderString NUM_COMMITTED_SEGMENTS = new XContentBuilderString("num_committed_segments");
static final XContentBuilderString NUM_SEARCH_SEGMENTS = new XContentBuilderString("num_search_segments");
static final XContentBuilderString NUM_DOCS = new XContentBuilderString("num_docs");
static final XContentBuilderString DELETED_DOCS = new XContentBuilderString("deleted_docs");
static final XContentBuilderString SIZE = new XContentBuilderString("size");
static final XContentBuilderString SIZE_IN_BYTES = new XContentBuilderString("size_in_bytes");
static final XContentBuilderString COMMITTED = new XContentBuilderString("committed");
static final XContentBuilderString SEARCH = new XContentBuilderString("search");
static final XContentBuilderString VERSION = new XContentBuilderString("version");
static final XContentBuilderString COMPOUND = new XContentBuilderString("compound");
static final XContentBuilderString MERGE_ID = new XContentBuilderString("merge_id");
static final XContentBuilderString MEMORY = new XContentBuilderString("memory");
static final XContentBuilderString MEMORY_IN_BYTES = new XContentBuilderString("memory_in_bytes");
static final XContentBuilderString RAM_TREE = new XContentBuilderString("ram_tree");
static final XContentBuilderString DESCRIPTION = new XContentBuilderString("description");
static final XContentBuilderString CHILDREN = new XContentBuilderString("children");
static final String SEGMENTS = "segments";
static final String GENERATION = "generation";
static final String NUM_COMMITTED_SEGMENTS = "num_committed_segments";
static final String NUM_SEARCH_SEGMENTS = "num_search_segments";
static final String NUM_DOCS = "num_docs";
static final String DELETED_DOCS = "deleted_docs";
static final String SIZE = "size";
static final String SIZE_IN_BYTES = "size_in_bytes";
static final String COMMITTED = "committed";
static final String SEARCH = "search";
static final String VERSION = "version";
static final String COMPOUND = "compound";
static final String MERGE_ID = "merge_id";
static final String MEMORY = "memory";
static final String MEMORY_IN_BYTES = "memory_in_bytes";
static final String RAM_TREE = "ram_tree";
static final String DESCRIPTION = "description";
static final String CHILDREN = "children";
}
}
}

View File

@ -124,7 +124,7 @@ public class UpdateSettingsRequest extends AcknowledgedRequest<UpdateSettingsReq
* Sets the settings to be updated (either json/yaml/properties format)
*/
public UpdateSettingsRequest settings(String source) {
this.settings = Settings.settingsBuilder().loadFromSource(source).build();
this.settings = Settings.builder().loadFromSource(source).build();
return this;
}

View File

@ -33,7 +33,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.index.shard.ShardStateMetaData;
import java.io.IOException;
@ -165,7 +164,7 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon
return allocationStatus;
}
static StoreStatus readStoreStatus(StreamInput in) throws IOException {
public static StoreStatus readStoreStatus(StreamInput in) throws IOException {
StoreStatus storeStatus = new StoreStatus();
storeStatus.readFrom(in);
return storeStatus;
@ -401,14 +400,14 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon
}
static final class Fields {
static final XContentBuilderString INDICES = new XContentBuilderString("indices");
static final XContentBuilderString SHARDS = new XContentBuilderString("shards");
static final XContentBuilderString FAILURES = new XContentBuilderString("failures");
static final XContentBuilderString STORES = new XContentBuilderString("stores");
static final String INDICES = "indices";
static final String SHARDS = "shards";
static final String FAILURES = "failures";
static final String STORES = "stores";
// StoreStatus fields
static final XContentBuilderString LEGACY_VERSION = new XContentBuilderString("legacy_version");
static final XContentBuilderString ALLOCATION_ID = new XContentBuilderString("allocation_id");
static final XContentBuilderString STORE_EXCEPTION = new XContentBuilderString("store_exception");
static final XContentBuilderString ALLOCATED = new XContentBuilderString("allocation");
static final String LEGACY_VERSION = "legacy_version";
static final String ALLOCATION_ID = "allocation_id";
static final String STORE_EXCEPTION = "store_exception";
static final String ALLOCATED = "allocation";
}
}

View File

@ -29,7 +29,6 @@ import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.cluster.health.ClusterShardHealth;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
@ -110,7 +109,7 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc
// we could fetch all shard store info from every node once (nNodes requests)
// we have to implement a TransportNodesAction instead of using TransportNodesListGatewayStartedShards
// for fetching shard stores info, that operates on a list of shards instead of a single shard
new AsyncShardStoresInfoFetches(state.nodes(), routingNodes, state.metaData(), shardIdsToFetch, listener).start();
new AsyncShardStoresInfoFetches(state.nodes(), routingNodes, shardIdsToFetch, listener).start();
}
@Override
@ -121,16 +120,14 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc
private class AsyncShardStoresInfoFetches {
private final DiscoveryNodes nodes;
private final RoutingNodes routingNodes;
private final MetaData metaData;
private final Set<ShardId> shardIds;
private final ActionListener<IndicesShardStoresResponse> listener;
private CountDown expectedOps;
private final Queue<InternalAsyncFetch.Response> fetchResponses;
AsyncShardStoresInfoFetches(DiscoveryNodes nodes, RoutingNodes routingNodes, MetaData metaData, Set<ShardId> shardIds, ActionListener<IndicesShardStoresResponse> listener) {
AsyncShardStoresInfoFetches(DiscoveryNodes nodes, RoutingNodes routingNodes, Set<ShardId> shardIds, ActionListener<IndicesShardStoresResponse> listener) {
this.nodes = nodes;
this.routingNodes = routingNodes;
this.metaData = metaData;
this.shardIds = shardIds;
this.listener = listener;
this.fetchResponses = new ConcurrentLinkedQueue<>();
@ -143,7 +140,7 @@ public class TransportIndicesShardStoresAction extends TransportMasterNodeReadAc
} else {
for (ShardId shardId : shardIds) {
InternalAsyncFetch fetch = new InternalAsyncFetch(logger, "shard_stores", shardId, listShardStoresInfo);
fetch.fetchData(nodes, metaData, Collections.<String>emptySet());
fetch.fetchData(nodes, Collections.<String>emptySet());
}
}
}

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.common.xcontent.XContentFactory;
import java.io.IOException;
@ -176,7 +175,7 @@ public class IndicesStatsResponse extends BroadcastResponse implements ToXConten
if ("indices".equalsIgnoreCase(level) || "shards".equalsIgnoreCase(level)) {
builder.startObject(Fields.INDICES);
for (IndexStats indexStats : getIndices().values()) {
builder.startObject(indexStats.getIndex(), XContentBuilder.FieldCaseConversion.NONE);
builder.startObject(indexStats.getIndex());
builder.startObject("primaries");
indexStats.getPrimaries().toXContent(builder, params);
@ -208,8 +207,8 @@ public class IndicesStatsResponse extends BroadcastResponse implements ToXConten
}
static final class Fields {
static final XContentBuilderString INDICES = new XContentBuilderString("indices");
static final XContentBuilderString SHARDS = new XContentBuilderString("shards");
static final String INDICES = "indices";
static final String SHARDS = "shards";
}
@Override

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.index.engine.CommitStats;
import org.elasticsearch.index.shard.ShardPath;
@ -132,14 +131,14 @@ public class ShardStats implements Streamable, ToXContent {
}
static final class Fields {
static final XContentBuilderString ROUTING = new XContentBuilderString("routing");
static final XContentBuilderString STATE = new XContentBuilderString("state");
static final XContentBuilderString STATE_PATH = new XContentBuilderString("state_path");
static final XContentBuilderString DATA_PATH = new XContentBuilderString("data_path");
static final XContentBuilderString IS_CUSTOM_DATA_PATH = new XContentBuilderString("is_custom_data_path");
static final XContentBuilderString SHARD_PATH = new XContentBuilderString("shard_path");
static final XContentBuilderString PRIMARY = new XContentBuilderString("primary");
static final XContentBuilderString NODE = new XContentBuilderString("node");
static final XContentBuilderString RELOCATING_NODE = new XContentBuilderString("relocating_node");
static final String ROUTING = "routing";
static final String STATE = "state";
static final String STATE_PATH = "state_path";
static final String DATA_PATH = "data_path";
static final String IS_CUSTOM_DATA_PATH = "is_custom_data_path";
static final String SHARD_PATH = "shard_path";
static final String PRIMARY = "primary";
static final String NODE = "node";
static final String RELOCATING_NODE = "relocating_node";
}
}

View File

@ -47,9 +47,9 @@ import java.util.Map;
import java.util.Set;
import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
/**
* A request to create an index template.
@ -71,7 +71,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
private Map<String, String> mappings = new HashMap<>();
private final Set<Alias> aliases = new HashSet<>();
private Map<String, IndexMetaData.Custom> customs = new HashMap<>();
public PutIndexTemplateRequest() {
@ -162,7 +162,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
* The settings to create the index template with (either json/yaml/properties format).
*/
public PutIndexTemplateRequest settings(String source) {
this.settings = Settings.settingsBuilder().loadFromSource(source).build();
this.settings = Settings.builder().loadFromSource(source).build();
return this;
}
@ -356,7 +356,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
public Map<String, IndexMetaData.Custom> customs() {
return this.customs;
}
public Set<Alias> aliases() {
return this.aliases;
}
@ -393,8 +393,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
* Sets the aliases that will be associated with the index when it gets created
*/
public PutIndexTemplateRequest aliases(BytesReference source) {
try {
XContentParser parser = XContentHelper.createParser(source);
try (XContentParser parser = XContentHelper.createParser(source)) {
//move to the first alias
parser.nextToken();
while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) {

View File

@ -73,7 +73,7 @@ public class TransportPutIndexTemplateAction extends TransportMasterNodeAction<P
if (cause.length() == 0) {
cause = "api";
}
final Settings.Builder templateSettingsBuilder = Settings.settingsBuilder();
final Settings.Builder templateSettingsBuilder = Settings.builder();
templateSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX);
indexScopedSettings.validate(templateSettingsBuilder);
indexTemplateService.putTemplate(new MetaDataIndexTemplateService.PutRequest(cause, request.name())

View File

@ -25,7 +25,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import java.io.IOException;
import java.util.ArrayList;
@ -126,7 +125,7 @@ public class UpgradeStatusResponse extends BroadcastResponse implements ToXConte
if (outputIndices) {
builder.startObject(Fields.INDICES);
for (IndexUpgradeStatus indexUpgradeStatus : getIndices().values()) {
builder.startObject(indexUpgradeStatus.getIndex(), XContentBuilder.FieldCaseConversion.NONE);
builder.startObject(indexUpgradeStatus.getIndex());
builder.byteSizeField(Fields.SIZE_IN_BYTES, Fields.SIZE, indexUpgradeStatus.getTotalBytes());
builder.byteSizeField(Fields.SIZE_TO_UPGRADE_IN_BYTES, Fields.SIZE_TO_UPGRADE, indexUpgradeStatus.getToUpgradeBytes());
@ -165,18 +164,18 @@ public class UpgradeStatusResponse extends BroadcastResponse implements ToXConte
}
static final class Fields {
static final XContentBuilderString INDICES = new XContentBuilderString("indices");
static final XContentBuilderString SHARDS = new XContentBuilderString("shards");
static final XContentBuilderString ROUTING = new XContentBuilderString("routing");
static final XContentBuilderString STATE = new XContentBuilderString("state");
static final XContentBuilderString PRIMARY = new XContentBuilderString("primary");
static final XContentBuilderString NODE = new XContentBuilderString("node");
static final XContentBuilderString RELOCATING_NODE = new XContentBuilderString("relocating_node");
static final XContentBuilderString SIZE = new XContentBuilderString("size");
static final XContentBuilderString SIZE_IN_BYTES = new XContentBuilderString("size_in_bytes");
static final XContentBuilderString SIZE_TO_UPGRADE = new XContentBuilderString("size_to_upgrade");
static final XContentBuilderString SIZE_TO_UPGRADE_ANCIENT = new XContentBuilderString("size_to_upgrade_ancient");
static final XContentBuilderString SIZE_TO_UPGRADE_IN_BYTES = new XContentBuilderString("size_to_upgrade_in_bytes");
static final XContentBuilderString SIZE_TO_UPGRADE_ANCIENT_IN_BYTES = new XContentBuilderString("size_to_upgrade_ancient_in_bytes");
static final String INDICES = "indices";
static final String SHARDS = "shards";
static final String ROUTING = "routing";
static final String STATE = "state";
static final String PRIMARY = "primary";
static final String NODE = "node";
static final String RELOCATING_NODE = "relocating_node";
static final String SIZE = "size";
static final String SIZE_IN_BYTES = "size_in_bytes";
static final String SIZE_TO_UPGRADE = "size_to_upgrade";
static final String SIZE_TO_UPGRADE_ANCIENT = "size_to_upgrade_ancient";
static final String SIZE_TO_UPGRADE_IN_BYTES = "size_to_upgrade_in_bytes";
static final String SIZE_TO_UPGRADE_ANCIENT_IN_BYTES = "size_to_upgrade_ancient_in_bytes";
}
}

Some files were not shown because too many files have changed in this diff Show More