Merge branch 'master' into feature/rank-eval

Conflicts:
	core/src/main/java/org/elasticsearch/script/Script.java
        docs/reference/search.asciidoc
This commit is contained in:
Christoph Büscher 2017-02-03 17:27:20 +01:00
commit 4cb8d9d08c
2990 changed files with 70024 additions and 35411 deletions

5
.gitignore vendored
View File

@ -38,11 +38,14 @@ dependency-reduced-pom.xml
# osx stuff
.DS_Store
# default folders in which the create_bwc_index.py expects to find old es versions in
/backwards
/dev-tools/backwards
# needed in case docs build is run...maybe we can configure doc build to generate files under build?
html_docs
# random old stuff that we should look at the necessity of...
/tmp/
backwards/
eclipse-build

View File

@ -25,12 +25,6 @@ run it using Gradle:
gradle run
-------------------------------------
or to attach a remote debugger, run it as:
-------------------------------------
gradle run --debug-jvm
-------------------------------------
=== Test case filtering.
- `tests.class` is a class-filtering shell-like glob pattern,
@ -430,15 +424,33 @@ cd $BATS_ARCHIVES
sudo -E bats $BATS_TESTS/*.bats
-------------------------------------------------
Note: Starting vagrant VM outside of the elasticsearch folder requires to
indicates the folder that contains the Vagrantfile using the VAGRANT_CWD
environment variable:
You can also use Gradle to prepare the test environment and then starts a single VM:
-------------------------------------------------
gradle vagrantSetUp
VAGRANT_CWD=/path/to/elasticsearch vagrant up centos-7 --provider virtualbox
gradle vagrantFedora24#up
-------------------------------------------------
Or any of vagrantCentos6#up, vagrantDebian8#up, vagrantFedora24#up, vagrantOel6#up,
vagrantOel7#up, vagrantOpensuse13#up, vagrantSles12#up, vagrantUbuntu1204#up,
vagrantUbuntu1604#up.
Once up, you can then connect to the VM using SSH from the elasticsearch directory:
-------------------------------------------------
vagrant ssh fedora-24
-------------------------------------------------
Or from another directory:
-------------------------------------------------
VAGRANT_CWD=/path/to/elasticsearch vagrant ssh fedora-24
-------------------------------------------------
Note: Starting vagrant VM outside of the elasticsearch folder requires to
indicates the folder that contains the Vagrantfile using the VAGRANT_CWD
environment variable.
== Coverage analysis
Tests can be run instrumented with jacoco to produce a coverage report in
@ -462,7 +474,7 @@ Combined (Unit+Integration) coverage:
mvn -Dtests.coverage verify jacoco:report
---------------------------------------------------------------------------
== Debugging from an IDE
== Launching and debugging from an IDE
If you want to run elasticsearch from your IDE, the `gradle run` task
supports a remote debugging option:
@ -471,6 +483,17 @@ supports a remote debugging option:
gradle run --debug-jvm
---------------------------------------------------------------------------
== Debugging remotely from an IDE
If you want to run Elasticsearch and be able to remotely attach the process
for debugging purposes from your IDE, can start Elasticsearch using `ES_JAVA_OPTS`:
---------------------------------------------------------------------------
ES_JAVA_OPTS="-Xdebug -Xrunjdwp:server=y,transport=dt_socket,address=4000,suspend=y" ./bin/elasticsearch
---------------------------------------------------------------------------
Read your IDE documentation for how to attach a debugger to a JVM process.
== Building with extra plugins
Additional plugins may be built alongside elasticsearch, where their
dependency on elasticsearch will be substituted with the local elasticsearch

View File

@ -55,7 +55,7 @@ dependencies {
runtime 'org.apache.commons:commons-math3:3.2'
}
compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked"
compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked,-processing"
// enable the JMH's BenchmarkProcessor to generate the final benchmark classes
// needs to be added separately otherwise Gradle will quote it and javac will fail
compileJava.options.compilerArgs.addAll(["-processor", "org.openjdk.jmh.generators.BenchmarkProcessor"])

View File

@ -17,6 +17,7 @@
* under the License.
*/
import java.nio.file.Path
import org.eclipse.jgit.lib.Repository
import org.eclipse.jgit.lib.RepositoryBuilder
import org.gradle.plugins.ide.eclipse.model.SourceFolder
@ -29,8 +30,9 @@ subprojects {
description = "Elasticsearch subproject ${project.path}"
}
Path rootPath = rootDir.toPath()
// setup pom license info, but only for artifacts that are part of elasticsearch
configure(subprojects.findAll { it.path.startsWith(':x-plugins') == false }) {
configure(subprojects.findAll { it.projectDir.toPath().startsWith(rootPath) }) {
// we only use maven publish to add tasks for pom generation
plugins.withType(MavenPublishPlugin).whenPluginAdded {
@ -204,6 +206,14 @@ allprojects {
}
}
}
task cleanIdeaBuildDir(type: Delete) {
delete 'build-idea'
}
cleanIdeaBuildDir.setGroup("ide")
cleanIdeaBuildDir.setDescription("Deletes the IDEA build directory.")
tasks.cleanIdea.dependsOn(cleanIdeaBuildDir)
}
idea {

View File

@ -23,14 +23,12 @@ apply plugin: 'groovy'
group = 'org.elasticsearch.gradle'
// TODO: remove this when upgrading to a version that supports ProgressLogger
// gradle 2.14 made internal apis unavailable to plugins, and gradle considered
// ProgressLogger to be an internal api. Until this is made available again,
// we can't upgrade without losing our nice progress logging
// NOTE that this check duplicates that in BuildPlugin, but we need to check
// early here before trying to compile the broken classes in buildSrc
if (GradleVersion.current() != GradleVersion.version('2.13')) {
throw new GradleException('Gradle 2.13 is required to build elasticsearch')
if (GradleVersion.current() < GradleVersion.version('2.13')) {
throw new GradleException('Gradle 2.13+ is required to build elasticsearch')
}
if (JavaVersion.current() < JavaVersion.VERSION_1_8) {
throw new GradleException('Java 1.8 is required to build elasticsearch gradle tools')
}
if (project == rootProject) {
@ -96,9 +94,26 @@ dependencies {
compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE....
compile 'de.thetaphi:forbiddenapis:2.2'
compile 'org.apache.rat:apache-rat:0.11'
compile 'ru.vyarus:gradle-animalsniffer-plugin:1.0.1'
}
// Gradle version-specific options (allows build to run with Gradle 2.13 as well as 2.14+/3.+)
if (GradleVersion.current() == GradleVersion.version("2.13")) {
// ProgressLogger(-Factory) classes are part of the public Gradle API
sourceSets.main.groovy.srcDir 'src/main/gradle-2.13-groovy'
dependencies {
compile 'ru.vyarus:gradle-animalsniffer-plugin:1.0.1' // last version compatible with Gradle 2.13
}
} else {
// Gradle 2.14+ removed ProgressLogger(-Factory) classes from the public APIs
// Use logging dependency instead
sourceSets.main.groovy.srcDir 'src/main/gradle-2.14-groovy'
dependencies {
compileOnly "org.gradle:gradle-logging:${GradleVersion.current().getVersion()}"
compile 'ru.vyarus:gradle-animalsniffer-plugin:1.2.0' // Gradle 2.14 requires a version > 1.0.1
}
}
/*****************************************************************************
* Bootstrap repositories *

View File

@ -16,16 +16,16 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.routing.allocation.command;
import org.elasticsearch.common.xcontent.ParseFieldRegistry;
package org.elasticsearch.gradle
/**
* Registry of allocation commands. This is it's own class just to make Guice happy.
* Wraps a ProgressLogger so that code in src/main/groovy does not need to
* define imports on Gradle 2.13/2.14+ ProgressLoggers
*/
public class AllocationCommandRegistry extends ParseFieldRegistry<AllocationCommand.Parser<?>> {
public AllocationCommandRegistry() {
super("allocation_command");
class ProgressLogger {
@Delegate org.gradle.logging.ProgressLogger progressLogger
ProgressLogger(org.gradle.logging.ProgressLogger progressLogger) {
this.progressLogger = progressLogger
}
}

View File

@ -0,0 +1,35 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle
import org.gradle.logging.ProgressLoggerFactory
import javax.inject.Inject
/**
* Allows to inject a ProgressLoggerFactory to tasks in src/main/groovy
* without requiring the corresponding import of ProgressLoggerFactory,
* making it compatible with both Gradle 2.13 and 2.14+.
*/
trait ProgressLoggerFactoryInjection {
@Inject
ProgressLoggerFactory getProgressLoggerFactory() {
throw new UnsupportedOperationException()
}
}

View File

@ -16,18 +16,16 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.rest.yaml.parser;
package org.elasticsearch.gradle
/**
* Exception thrown whenever there is a problem parsing any of the REST test suite fragment
* Wraps a ProgressLogger so that code in src/main/groovy does not need to
* define imports on Gradle 2.13/2.14+ ProgressLoggers
*/
public class ClientYamlTestParseException extends Exception {
class ProgressLogger {
@Delegate org.gradle.internal.logging.progress.ProgressLogger progressLogger
ClientYamlTestParseException(String message) {
super(message);
}
ClientYamlTestParseException(String message, Throwable cause) {
super(message, cause);
ProgressLogger(org.gradle.internal.logging.progress.ProgressLogger progressLogger) {
this.progressLogger = progressLogger
}
}

View File

@ -0,0 +1,35 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle
import org.gradle.internal.logging.progress.ProgressLoggerFactory
import javax.inject.Inject
/**
* Allows to inject a ProgressLoggerFactory to tasks in src/main/groovy
* without requiring the corresponding import of ProgressLoggerFactory,
* making it compatible with both Gradle 2.13 and 2.14+.
*/
trait ProgressLoggerFactoryInjection {
@Inject
ProgressLoggerFactory getProgressLoggerFactory() {
throw new UnsupportedOperationException()
}
}

View File

@ -8,6 +8,7 @@ import org.apache.tools.ant.BuildException
import org.apache.tools.ant.DefaultLogger
import org.apache.tools.ant.RuntimeConfigurable
import org.apache.tools.ant.UnknownElement
import org.elasticsearch.gradle.ProgressLoggerFactoryInjection
import org.gradle.api.DefaultTask
import org.gradle.api.file.FileCollection
import org.gradle.api.file.FileTreeElement
@ -19,12 +20,9 @@ import org.gradle.api.tasks.Optional
import org.gradle.api.tasks.TaskAction
import org.gradle.api.tasks.util.PatternFilterable
import org.gradle.api.tasks.util.PatternSet
import org.gradle.logging.ProgressLoggerFactory
import org.gradle.util.ConfigureUtil
import javax.inject.Inject
class RandomizedTestingTask extends DefaultTask {
class RandomizedTestingTask extends DefaultTask implements ProgressLoggerFactoryInjection {
// TODO: change to "executable" to match gradle test params?
@Optional
@ -81,6 +79,7 @@ class RandomizedTestingTask extends DefaultTask {
String argLine = null
Map<String, Object> systemProperties = new HashMap<>()
Map<String, Object> environmentVariables = new HashMap<>()
PatternFilterable patternSet = new PatternSet()
RandomizedTestingTask() {
@ -89,11 +88,6 @@ class RandomizedTestingTask extends DefaultTask {
listenersConfig.listeners.add(new TestReportLogger(logger: logger, config: testLoggingConfig))
}
@Inject
ProgressLoggerFactory getProgressLoggerFactory() {
throw new UnsupportedOperationException();
}
void jvmArgs(Iterable<String> arguments) {
jvmArgs.addAll(arguments)
}
@ -106,6 +100,10 @@ class RandomizedTestingTask extends DefaultTask {
systemProperties.put(property, value)
}
void environment(String key, Object value) {
environmentVariables.put(key, value)
}
void include(String... includes) {
this.patternSet.include(includes);
}
@ -194,7 +192,8 @@ class RandomizedTestingTask extends DefaultTask {
haltOnFailure: true, // we want to capture when a build failed, but will decide whether to rethrow later
shuffleOnSlave: shuffleOnSlave,
leaveTemporary: leaveTemporary,
ifNoTests: ifNoTests
ifNoTests: ifNoTests,
newenvironment: true
]
DefaultLogger listener = null
@ -250,6 +249,9 @@ class RandomizedTestingTask extends DefaultTask {
for (Map.Entry<String, Object> prop : systemProperties) {
sysproperty key: prop.getKey(), value: prop.getValue().toString()
}
for (Map.Entry<String, Object> envvar : environmentVariables) {
env key: envvar.getKey(), value: envvar.getValue().toString()
}
makeListeners()
}
} catch (BuildException e) {

View File

@ -25,8 +25,7 @@ import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedStartEvent
import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedSuiteResultEvent
import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedTestResultEvent
import com.carrotsearch.ant.tasks.junit4.listeners.AggregatedEventListener
import org.gradle.logging.ProgressLogger
import org.gradle.logging.ProgressLoggerFactory
import org.elasticsearch.gradle.ProgressLogger
import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDurationInSeconds
import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.ERROR
@ -52,8 +51,6 @@ import static java.lang.Math.max
* quick.
*/
class TestProgressLogger implements AggregatedEventListener {
/** Factory to build a progress logger when testing starts */
ProgressLoggerFactory factory
ProgressLogger progressLogger
int totalSuites
int totalSlaves
@ -79,12 +76,15 @@ class TestProgressLogger implements AggregatedEventListener {
/* Note that we probably overuse volatile here but it isn't hurting us and
lets us move things around without worying about breaking things. */
TestProgressLogger(Map args) {
progressLogger = new ProgressLogger(args.factory.newOperation(TestProgressLogger))
progressLogger.setDescription('Randomized test runner')
}
@Subscribe
void onStart(AggregatedStartEvent e) throws IOException {
totalSuites = e.suiteCount
totalSlaves = e.slaveCount
progressLogger = factory.newOperation(TestProgressLogger)
progressLogger.setDescription('Randomized test runner')
progressLogger.started()
progressLogger.progress(
"Starting JUnit4 for ${totalSuites} suites on ${totalSlaves} jvms")

View File

@ -18,9 +18,11 @@
*/
package org.elasticsearch.gradle
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
import nebula.plugin.extraconfigurations.ProvidedBasePlugin
import org.elasticsearch.gradle.precommit.PrecommitTasks
import org.gradle.api.GradleException
import org.gradle.api.InvalidUserDataException
import org.gradle.api.JavaVersion
import org.gradle.api.Plugin
import org.gradle.api.Project
@ -54,6 +56,11 @@ class BuildPlugin implements Plugin<Project> {
@Override
void apply(Project project) {
if (project.pluginManager.hasPlugin('elasticsearch.standalone-rest-test')) {
throw new InvalidUserDataException('elasticsearch.standalone-test, '
+ 'elasticearch.standalone-rest-test, and elasticsearch.build '
+ 'are mutually exclusive')
}
project.pluginManager.apply('java')
project.pluginManager.apply('carrotsearch.randomized-testing')
// these plugins add lots of info to our jars
@ -195,18 +202,13 @@ class BuildPlugin implements Plugin<Project> {
/** Runs the given javascript using jjs from the jdk, and returns the output */
private static String runJavascript(Project project, String javaHome, String script) {
File tmpScript = File.createTempFile('es-gradle-tmp', '.js')
tmpScript.setText(script, 'UTF-8')
ByteArrayOutputStream output = new ByteArrayOutputStream()
ExecResult result = project.exec {
executable = new File(javaHome, 'bin/jjs')
args tmpScript.toString()
project.exec {
executable = new File(javaHome, 'bin/jrunscript')
args '-e', script
standardOutput = output
errorOutput = new ByteArrayOutputStream()
ignoreExitValue = true // we do not fail so we can first cleanup the tmp file
}
java.nio.file.Files.delete(tmpScript.toPath())
result.assertNormalExitValue()
return output.toString('UTF-8').trim()
}
@ -412,8 +414,10 @@ class BuildPlugin implements Plugin<Project> {
// hack until gradle supports java 9's new "--release" arg
assert minimumJava == JavaVersion.VERSION_1_8
options.compilerArgs << '--release' << '8'
project.sourceCompatibility = null
project.targetCompatibility = null
doFirst{
sourceCompatibility = null
targetCompatibility = null
}
}
}
}
@ -509,11 +513,9 @@ class BuildPlugin implements Plugin<Project> {
}
}
// System assertions (-esa) are disabled for now because of what looks like a
// JDK bug triggered by Groovy on JDK7. We should look at re-enabling system
// assertions when we upgrade to a new version of Groovy (currently 2.4.4) or
// require JDK8. See https://issues.apache.org/jira/browse/GROOVY-7528.
enableSystemAssertions false
boolean assertionsEnabled = Boolean.parseBoolean(System.getProperty('tests.asserts', 'true'))
enableSystemAssertions assertionsEnabled
enableAssertions assertionsEnabled
testLogging {
showNumFailuresAtEnd 25
@ -554,11 +556,22 @@ class BuildPlugin implements Plugin<Project> {
/** Configures the test task */
static Task configureTest(Project project) {
Task test = project.tasks.getByName('test')
RandomizedTestingTask test = project.tasks.getByName('test')
test.configure(commonTestConfig(project))
test.configure {
include '**/*Tests.class'
}
// Add a method to create additional unit tests for a project, which will share the same
// randomized testing setup, but by default run no tests.
project.extensions.add('additionalTest', { String name, Closure config ->
RandomizedTestingTask additionalTest = project.tasks.create(name, RandomizedTestingTask.class)
additionalTest.classpath = test.classpath
additionalTest.testClassesDir = test.testClassesDir
additionalTest.configure(commonTestConfig(project))
additionalTest.configure(config)
test.dependsOn(additionalTest)
});
return test
}

View File

@ -30,6 +30,7 @@ public class DocsTestPlugin extends RestTestPlugin {
@Override
public void apply(Project project) {
project.pluginManager.apply('elasticsearch.standalone-rest-test')
super.apply(project)
Map<String, String> defaultSubstitutions = [
/* These match up with the asciidoc syntax for substitutions but

View File

@ -139,6 +139,7 @@ class PrecommitTasks {
configProperties = [
suppressions: checkstyleSuppressions
]
toolVersion = 7.5
}
for (String taskName : ['checkstyleMain', 'checkstyleTest']) {
Task task = project.tasks.findByName(taskName)

View File

@ -73,8 +73,8 @@ class ClusterConfiguration {
@Input
String jvmArgs = "-Xms" + System.getProperty('tests.heap.size', '512m') +
" " + "-Xmx" + System.getProperty('tests.heap.size', '512m') +
" " + System.getProperty('tests.jvm.argline', '')
" " + "-Xmx" + System.getProperty('tests.heap.size', '512m') +
" " + System.getProperty('tests.jvm.argline', '')
/**
* A closure to call which returns the unicast host to connect to for cluster formation.

View File

@ -39,6 +39,7 @@ import org.gradle.api.tasks.Delete
import org.gradle.api.tasks.Exec
import java.nio.file.Paths
import java.util.concurrent.TimeUnit
/**
* A helper for creating tasks to build a cluster that is used by a task, and tear down the cluster when the task is finished.
@ -91,6 +92,8 @@ class ClusterFormationTasks {
configureBwcPluginDependency("${task.name}_elasticsearchBwcPlugins", project, entry.getValue(),
project.configurations.elasticsearchBwcPlugins, config.bwcVersion)
}
project.configurations.elasticsearchBwcDistro.resolutionStrategy.cacheChangingModulesFor(0, TimeUnit.SECONDS)
project.configurations.elasticsearchBwcPlugins.resolutionStrategy.cacheChangingModulesFor(0, TimeUnit.SECONDS)
}
for (int i = 0; i < config.numNodes; i++) {
// we start N nodes and out of these N nodes there might be M bwc nodes.
@ -268,12 +271,12 @@ class ClusterFormationTasks {
static Task configureWriteConfigTask(String name, Project project, Task setup, NodeInfo node, NodeInfo seedNode) {
Map esConfig = [
'cluster.name' : node.clusterName,
'node.name' : "node-" + node.nodeNum,
'pidfile' : node.pidFile,
'path.repo' : "${node.sharedDir}/repo",
'path.shared_data' : "${node.sharedDir}/",
// Define a node attribute so we can test that it exists
'node.attr.testattr' : 'test',
'repositories.url.allowed_urls': 'http://snapshot.test*'
'node.attr.testattr' : 'test'
]
// we set min master nodes to the total number of nodes in the cluster and
// basically skip initial state recovery to allow the cluster to form using a realistic master election

View File

@ -48,7 +48,7 @@ class MessyTestPlugin extends StandaloneTestPlugin {
}
private static addPluginResources(Project project, Project pluginProject) {
String outputDir = "generated-resources/${pluginProject.name}"
String outputDir = "${project.buildDir}/generated-resources/${pluginProject.name}"
String taskName = ClusterFormationTasks.pluginTaskName("copy", pluginProject.name, "Metadata")
Copy copyPluginMetadata = project.tasks.create(taskName, Copy.class)
copyPluginMetadata.into(outputDir)
@ -57,7 +57,7 @@ class MessyTestPlugin extends StandaloneTestPlugin {
project.sourceSets.test.output.dir(outputDir, builtBy: taskName)
// add each generated dir to the test classpath in IDEs
//project.eclipse.classpath.sourceSets = [project.sourceSets.test]
project.idea.module.singleEntryLibraries= ['TEST': [project.file(outputDir)]]
// Eclipse doesn't need this because it gets the entire module as a dependency
}
}

View File

@ -151,6 +151,9 @@ class NodeInfo {
args.addAll("-E", "node.portsfile=true")
String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ")
String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs
if (Boolean.parseBoolean(System.getProperty('tests.asserts', 'true'))) {
esJavaOpts += " -ea -esa"
}
env.put('ES_JAVA_OPTS', esJavaOpts)
for (Map.Entry<String, String> property : System.properties.entrySet()) {
if (property.key.startsWith('tests.es.')) {

View File

@ -18,15 +18,29 @@
*/
package org.elasticsearch.gradle.test
import org.elasticsearch.gradle.BuildPlugin
import org.gradle.api.InvalidUserDataException
import org.gradle.api.Plugin
import org.gradle.api.Project
/** A plugin to add rest integration tests. Used for qa projects. */
/**
* Adds support for starting an Elasticsearch cluster before running integration
* tests. Used in conjunction with {@link StandaloneRestTestPlugin} for qa
* projects and in conjunction with {@link BuildPlugin} for testing the rest
* client.
*/
public class RestTestPlugin implements Plugin<Project> {
List REQUIRED_PLUGINS = [
'elasticsearch.build',
'elasticsearch.standalone-rest-test']
@Override
public void apply(Project project) {
project.pluginManager.apply(StandaloneTestBasePlugin)
if (false == REQUIRED_PLUGINS.any {project.pluginManager.hasPlugin(it)}) {
throw new InvalidUserDataException('elasticsearch.rest-test '
+ 'requires either elasticsearch.build or '
+ 'elasticsearch.standalone-rest-test')
}
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
integTest.cluster.distribution = 'zip' // rest tests should run with the real zip

View File

@ -24,15 +24,26 @@ import com.carrotsearch.gradle.junit4.RandomizedTestingPlugin
import org.elasticsearch.gradle.BuildPlugin
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.precommit.PrecommitTasks
import org.gradle.api.InvalidUserDataException
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.api.Task
import org.gradle.api.plugins.JavaBasePlugin
/** Configures the build to have a rest integration test. */
public class StandaloneTestBasePlugin implements Plugin<Project> {
/**
* Configures the build to compile tests against Elasticsearch's test framework
* and run REST tests. Use BuildPlugin if you want to build main code as well
* as tests.
*/
public class StandaloneRestTestPlugin implements Plugin<Project> {
@Override
public void apply(Project project) {
if (project.pluginManager.hasPlugin('elasticsearch.build')) {
throw new InvalidUserDataException('elasticsearch.standalone-test '
+ 'elasticsearch.standalone-rest-test, and elasticsearch.build '
+ 'are mutually exclusive')
}
project.pluginManager.apply(JavaBasePlugin)
project.pluginManager.apply(RandomizedTestingPlugin)

View File

@ -25,12 +25,15 @@ import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.api.plugins.JavaBasePlugin
/** A plugin to add tests only. Used for QA tests that run arbitrary unit tests. */
/**
* Configures the build to compile against Elasticsearch's test framework and
* run integration and unit tests. Use BuildPlugin if you want to build main
* code as well as tests. */
public class StandaloneTestPlugin implements Plugin<Project> {
@Override
public void apply(Project project) {
project.pluginManager.apply(StandaloneTestBasePlugin)
project.pluginManager.apply(StandaloneRestTestPlugin)
Map testOptions = [
name: 'test',

View File

@ -0,0 +1,66 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gradle.test
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.api.artifacts.Dependency
import org.gradle.api.artifacts.ProjectDependency
import org.gradle.api.tasks.Copy
/**
* A plugin to run tests that depend on other plugins or modules.
*
* This plugin will add the plugin-metadata and properties files for each
* dependency to the test source set.
*/
class TestWithDependenciesPlugin implements Plugin<Project> {
@Override
void apply(Project project) {
if (project.isEclipse) {
/* The changes this plugin makes both break and aren't needed by
* Eclipse. This is because Eclipse flattens main and test
* dependencies into a single dependency. Because Eclipse is
* "special".... */
return
}
project.configurations.testCompile.dependencies.all { Dependency dep ->
// this closure is run every time a compile dependency is added
if (dep instanceof ProjectDependency && dep.dependencyProject.plugins.hasPlugin(PluginBuildPlugin)) {
project.gradle.projectsEvaluated {
addPluginResources(project, dep.dependencyProject)
}
}
}
}
private static addPluginResources(Project project, Project pluginProject) {
String outputDir = "${project.buildDir}/generated-resources/${pluginProject.name}"
String taskName = ClusterFormationTasks.pluginTaskName("copy", pluginProject.name, "Metadata")
Copy copyPluginMetadata = project.tasks.create(taskName, Copy.class)
copyPluginMetadata.into(outputDir)
copyPluginMetadata.from(pluginProject.tasks.pluginProperties)
copyPluginMetadata.from(pluginProject.file('src/main/plugin-metadata'))
project.sourceSets.test.output.dir(outputDir, builtBy: taskName)
}
}

View File

@ -20,10 +20,9 @@ package org.elasticsearch.gradle.vagrant
import com.carrotsearch.gradle.junit4.LoggingOutputStream
import groovy.transform.PackageScope
import org.elasticsearch.gradle.ProgressLogger
import org.gradle.api.GradleScriptException
import org.gradle.api.logging.Logger
import org.gradle.logging.ProgressLogger
import org.gradle.logging.ProgressLoggerFactory
import java.util.regex.Matcher
@ -49,7 +48,7 @@ public class TapLoggerOutputStream extends LoggingOutputStream {
TapLoggerOutputStream(Map args) {
logger = args.logger
progressLogger = args.factory.newOperation(VagrantLoggerOutputStream)
progressLogger = new ProgressLogger(args.factory.newOperation(VagrantLoggerOutputStream))
progressLogger.setDescription("TAP output for `${args.command}`")
}

View File

@ -19,17 +19,15 @@
package org.elasticsearch.gradle.vagrant
import org.apache.commons.io.output.TeeOutputStream
import org.elasticsearch.gradle.ProgressLoggerFactoryInjection
import org.elasticsearch.gradle.LoggedExec
import org.gradle.api.tasks.Input
import org.gradle.logging.ProgressLoggerFactory
import javax.inject.Inject
/**
* Runs a vagrant command. Pretty much like Exec task but with a nicer output
* formatter and defaults to `vagrant` as first part of commandLine.
*/
public class VagrantCommandTask extends LoggedExec {
public class VagrantCommandTask extends LoggedExec implements ProgressLoggerFactoryInjection {
@Input
String boxName
@ -57,9 +55,4 @@ public class VagrantCommandTask extends LoggedExec {
stuff starts with ==> $box */
squashedPrefix: "==> $boxName: ")
}
@Inject
ProgressLoggerFactory getProgressLoggerFactory() {
throw new UnsupportedOperationException();
}
}

View File

@ -19,9 +19,7 @@
package org.elasticsearch.gradle.vagrant
import com.carrotsearch.gradle.junit4.LoggingOutputStream
import org.gradle.api.logging.Logger
import org.gradle.logging.ProgressLogger
import org.gradle.logging.ProgressLoggerFactory
import org.elasticsearch.gradle.ProgressLogger
/**
* Adapts an OutputStream being written to by vagrant into a ProcessLogger. It
@ -55,7 +53,7 @@ public class VagrantLoggerOutputStream extends LoggingOutputStream {
private String heading = ''
VagrantLoggerOutputStream(Map args) {
progressLogger = args.factory.newOperation(VagrantLoggerOutputStream)
progressLogger = new ProgressLogger(args.factory.newOperation(VagrantLoggerOutputStream))
progressLogger.setDescription("Vagrant output for `$args.command`")
squashedPrefix = args.squashedPrefix
}

View File

@ -404,10 +404,6 @@ class VagrantTestPlugin implements Plugin<Project> {
args 'halt', box
}
stop.dependsOn(halt)
if (project.extensions.esvagrant.boxes.contains(box) == false) {
// we only need a halt task if this box was not specified
continue;
}
Task update = project.tasks.create("vagrant${boxTask}#update", VagrantCommandTask) {
boxName box
@ -435,6 +431,11 @@ class VagrantTestPlugin implements Plugin<Project> {
dependsOn update
}
if (project.extensions.esvagrant.boxes.contains(box) == false) {
// we d'ont need tests tasks if this box was not specified
continue;
}
Task smoke = project.tasks.create("vagrant${boxTask}#smoketest", Exec) {
environment vagrantEnvVars
dependsOn up

View File

@ -0,0 +1,20 @@
#
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
implementation-class=org.elasticsearch.gradle.test.StandaloneRestTestPlugin

View File

@ -0,0 +1,20 @@
#
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
implementation-class=org.elasticsearch.gradle.test.TestWithDependenciesPlugin

View File

@ -10,9 +10,6 @@
<suppress files="org[/\\]elasticsearch[/\\]painless[/\\]antlr[/\\]PainlessLexer\.java" checks="." />
<suppress files="org[/\\]elasticsearch[/\\]painless[/\\]antlr[/\\]PainlessParser(|BaseVisitor|Visitor)\.java" checks="." />
<!-- ThrowableProxy is a forked copy from Log4j to hack around a bug; this can be removed when the hack is removed -->
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]apache[/\\]logging[/\\]log4j[/\\]core[/\\]impl[/\\]ThrowableProxy.java" checks="RegexpSinglelineJava" />
<!-- Hopefully temporary suppression of LineLength on files that don't pass it. We should remove these when we the
files start to pass. -->
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]queries[/\\]BlendedTermQuery.java" checks="LineLength" />
@ -29,7 +26,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]delete[/\\]TransportDeleteRepositoryAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]get[/\\]GetRepositoriesRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]get[/\\]TransportGetRepositoriesAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]put[/\\]PutRepositoryRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]put[/\\]PutRepositoryRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]put[/\\]TransportPutRepositoryAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]repositories[/\\]verify[/\\]TransportVerifyRepositoryAction.java" checks="LineLength" />
@ -56,7 +52,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]stats[/\\]TransportClusterStatsAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]tasks[/\\]PendingClusterTasksAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]tasks[/\\]PendingClusterTasksRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]tasks[/\\]PendingClusterTasksResponse.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]tasks[/\\]TransportPendingClusterTasksAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]validate[/\\]template[/\\]RenderSearchTemplateAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]admin[/\\]cluster[/\\]validate[/\\]template[/\\]RenderSearchTemplateRequestBuilder.java" checks="LineLength" />
@ -135,15 +130,12 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]TransportBulkAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]bulk[/\\]TransportShardBulkAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]delete[/\\]DeleteRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]delete[/\\]TransportDeleteAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]explain[/\\]TransportExplainAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]GetRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]MultiGetRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]TransportGetAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]get[/\\]TransportShardMultiGetAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]index[/\\]IndexRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]index[/\\]IndexRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]index[/\\]TransportIndexAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]indexedscripts[/\\]delete[/\\]DeleteIndexedScriptAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]indexedscripts[/\\]delete[/\\]DeleteIndexedScriptRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]indexedscripts[/\\]delete[/\\]DeleteIndexedScriptRequestBuilder.java" checks="LineLength" />
@ -165,19 +157,16 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]SimulatePipelineRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]SimulatePipelineTransportAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]MultiSearchRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]SearchPhaseExecutionException.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]SearchResponse.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]ShardSearchFailure.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]TransportClearScrollAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]TransportMultiSearchAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]suggest[/\\]SuggestResponse.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]ActionFilter.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]AutoCreateIndex.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]DelegatingActionListener.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]IndicesOptions.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]ToXContentToBytes.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]broadcast[/\\]BroadcastOperationRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]broadcast[/\\]BroadcastRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]broadcast[/\\]BroadcastResponse.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]broadcast[/\\]TransportBroadcastAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]broadcast[/\\]node[/\\]TransportBroadcastByNodeAction.java" checks="LineLength" />
@ -195,7 +184,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]instance[/\\]InstanceShardOperationRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]instance[/\\]TransportInstanceSingleOperationAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]shard[/\\]SingleShardOperationRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]shard[/\\]SingleShardRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]single[/\\]shard[/\\]TransportSingleShardAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]MultiTermVectorsRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]MultiTermVectorsRequestBuilder.java" checks="LineLength" />
@ -212,7 +200,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]UpdateRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]update[/\\]UpdateRequestBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]Bootstrap.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JNAKernel32Library.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JNANatives.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JVMCheck.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JarHell.java" checks="LineLength" />
@ -234,7 +221,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]AutoExpandReplicas.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]IndexMetaData.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]IndexNameExpressionResolver.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]MappingMetaData.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]MetaData.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]MetaDataCreateIndexService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]metadata[/\\]MetaDataIndexStateService.java" checks="LineLength" />
@ -260,10 +246,8 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]decider[/\\]AllocationDeciders.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]service[/\\]InternalClusterService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]Base64.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]Booleans.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]Numbers.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]blobstore[/\\]fs[/\\]FsBlobStore.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]blobstore[/\\]url[/\\]URLBlobStore.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]bytes[/\\]BytesArray.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]bytes[/\\]PagedBytesReference.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]cache[/\\]Cache.java" checks="LineLength" />
@ -315,7 +299,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]AlreadyExpiredException.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]CompositeIndexEventListener.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexSettings.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexingSlowLog.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]MergePolicyConfig.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]SearchSlowLog.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]AnalysisRegistry.java" checks="LineLength" />
@ -338,7 +321,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]fieldcomparator[/\\]FloatValuesComparatorSource.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]fieldcomparator[/\\]LongValuesComparatorSource.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]ordinals[/\\]GlobalOrdinalsBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]ordinals[/\\]InternalGlobalOrdinalsIndexFieldData.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]ordinals[/\\]MultiOrdinals.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]ordinals[/\\]OrdinalsBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]ordinals[/\\]SinglePackedOrdinals.java" checks="LineLength" />
@ -349,16 +331,13 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]plain[/\\]ParentChildIndexFieldData.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]plain[/\\]SortedNumericDVIndexFieldData.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]fielddata[/\\]plain[/\\]SortedSetDVOrdinalsIndexFieldData.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]get[/\\]GetResult.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]get[/\\]ShardGetService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DocumentFieldMappers.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DocumentMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DocumentMapperParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]DocumentParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]FieldMapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]FieldTypeLookup.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]MappedFieldType.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]Mapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]MapperService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]Mapping.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]MetadataFieldMapper.java" checks="LineLength" />
@ -392,7 +371,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryBuilders.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryValidationException.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]support[/\\]InnerHitsQueryParserHelper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]support[/\\]QueryParsers.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]MatchQuery.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]MultiMatchQuery.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]geo[/\\]IndexedGeoBoundingBoxQuery.java" checks="LineLength" />
@ -406,8 +384,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]StoreRecovery.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]TranslogRecoveryPerformer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]snapshots[/\\]blobstore[/\\]BlobStoreIndexShardRepository.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]snapshots[/\\]blobstore[/\\]BlobStoreIndexShardSnapshots.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]IndexStore.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]IndexStoreConfig.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]LegacyVerification.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]store[/\\]Store.java" checks="LineLength" />
@ -432,14 +408,12 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]recovery[/\\]RecoverySettings.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]recovery[/\\]PeerRecoverySourceService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]recovery[/\\]RecoveryState.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]recovery[/\\]StartRecoveryRequest.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]store[/\\]IndicesStore.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]store[/\\]TransportNodesListShardStoreMetaData.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]ttl[/\\]IndicesTTLService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]GcNames.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]HotThreads.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]node[/\\]Node.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]node[/\\]internal[/\\]InternalSettingsPreparer.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginsService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]RepositoriesService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]Repository.java" checks="LineLength" />
@ -450,10 +424,7 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]blobstore[/\\]ChecksumBlobStoreFormat.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]fs[/\\]FsRepository.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]uri[/\\]URLIndexShardRepository.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]uri[/\\]URLRepository.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]BytesRestResponse.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]RestController.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestCountAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestIndicesAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestShardsAction.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestThreadPoolAction.java" checks="LineLength" />
@ -475,18 +446,12 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]filters[/\\]FiltersParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]filters[/\\]InternalFilters.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]geogrid[/\\]GeoHashGridAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]global[/\\]GlobalAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]global[/\\]InternalGlobal.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]histogram[/\\]HistogramAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]missing[/\\]InternalMissing.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]missing[/\\]MissingAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]nested[/\\]InternalReverseNested.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]nested[/\\]ReverseNestedAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]range[/\\]RangeAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]sampler[/\\]DiversifiedBytesHashSamplerAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]sampler[/\\]DiversifiedMapSamplerAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]sampler[/\\]DiversifiedNumericSamplerAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]sampler[/\\]DiversifiedOrdinalsSamplerAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]sampler[/\\]InternalSampler.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]sampler[/\\]SamplerAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]significant[/\\]GlobalOrdinalsSignificantTermsAggregator.java" checks="LineLength" />
@ -498,7 +463,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]significant[/\\]heuristics[/\\]ScriptHeuristic.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]significant[/\\]heuristics[/\\]SignificanceHeuristic.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]AbstractTermsParametersParser.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]GlobalOrdinalsStringTermsAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]InternalOrder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]LongTermsAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]StringTermsAggregator.java" checks="LineLength" />
@ -515,7 +479,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]percentiles[/\\]tdigest[/\\]TDigestPercentilesAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]scripted[/\\]ScriptedMetricAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]stats[/\\]extended[/\\]ExtendedStatsAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]tophits[/\\]TopHitsAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]pipeline[/\\]bucketscript[/\\]BucketScriptPipelineAggregator.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]AggregationPath.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]ValuesSourceParser.java" checks="LineLength" />
@ -548,7 +511,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]threadpool[/\\]ThreadPool.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]queries[/\\]BlendedTermQueryTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]apache[/\\]lucene[/\\]search[/\\]postingshighlight[/\\]CustomPostingsHighlighterTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]ESExceptionTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]NamingConventionTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]VersionTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]RejectionActionIT.java" checks="LineLength" />
@ -576,8 +538,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]SimulatePipelineResponseTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]ingest[/\\]WriteableIngestDocumentTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]search[/\\]SearchRequestBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]IndicesOptionsTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]IndicesOptionsTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]TransportActionFilterChainTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]WaitActiveShardCountIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]broadcast[/\\]node[/\\]TransportBroadcastByNodeActionTests.java" checks="LineLength" />
@ -591,9 +551,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]termvectors[/\\]TermVectorsUnitTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]aliases[/\\]IndexAliasesIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JarHellTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bwcompat[/\\]BasicAnalysisBackwardCompatibilityIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bwcompat[/\\]BasicBackwardsCompatibilityIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bwcompat[/\\]GetIndexBackwardsCompatibilityIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]bwcompat[/\\]RestoreBackwardsCompatIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]AbstractClientHeadersTestCase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterHealthIT.java" checks="LineLength" />
@ -669,7 +626,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]settings[/\\]ClusterSettingsIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]shards[/\\]ClusterSearchShardsIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]structure[/\\]RoutingIteratorTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]BooleansTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]blobstore[/\\]FsBlobStoreContainerTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]blobstore[/\\]FsBlobStoreTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]breaker[/\\]MemoryCircuitBreakerTests.java" checks="LineLength" />
@ -704,8 +660,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]PrimaryShardAllocatorTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]PriorityComparatorTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]QuorumGatewayIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]RecoverAfterNodesIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]RecoveryBackwardsCompatibilityIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]RecoveryFromGatewayIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]ReplicaShardAllocatorTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]ReusePeerRecoverySharedTest.java" checks="LineLength" />
@ -789,7 +743,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]HasChildQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]MoreLikeThisQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]MultiMatchQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]RandomQueryBuilder.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]SpanMultiTermQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]SpanNotQueryBuilderTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]support[/\\]QueryInnerHitsTests.java" checks="LineLength" />
@ -801,7 +754,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]nested[/\\]LongNestedSortingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]search[/\\]nested[/\\]NestedSortingTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]IndexShardTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]IndexingOperationListenerTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]ShardPathTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]similarity[/\\]SimilarityTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]snapshots[/\\]blobstore[/\\]FileInfoTests.java" checks="LineLength" />
@ -819,7 +771,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]IndicesOptionsIntegrationIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]analyze[/\\]AnalyzeActionIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]analyze[/\\]HunspellServiceIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]exists[/\\]indices[/\\]IndicesExistsIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]exists[/\\]types[/\\]TypesExistsIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]flush[/\\]FlushIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indices[/\\]flush[/\\]SyncedFlushSingleNodeTests.java" checks="LineLength" />
@ -924,7 +875,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]DedicatedClusterSnapshotRestoreIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]RepositoriesIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SharedClusterSnapshotRestoreIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotBackwardsCompatibilityIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotUtilsTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]ESBlobStoreRepositoryIntegTestCase.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]geo[/\\]RandomShapeGenerator.java" checks="LineLength" />
@ -935,9 +885,8 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]validate[/\\]SimpleValidateQueryIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]versioning[/\\]SimpleVersioningIT.java" checks="LineLength" />
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]ExpressionScriptEngineService.java" checks="LineLength" />
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]ExpressionSearchScript.java" checks="LineLength" />
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]ExpressionTests.java" checks="LineLength" />
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]IndexedExpressionTests.java" checks="LineLength" />
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]StoredExpressionTests.java" checks="LineLength" />
<suppress files="modules[/\\]lang-expression[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]expression[/\\]MoreExpressionTests.java" checks="LineLength" />
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]groovy[/\\]GroovyScriptEngineService.java" checks="LineLength" />
<suppress files="modules[/\\]lang-groovy[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]groovy[/\\]GroovySecurityTests.java" checks="LineLength" />
@ -994,13 +943,8 @@
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]util[/\\]MockBigArrays.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]script[/\\]NativeSignificanceScoreScriptWithParams.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]BackgroundIndexer.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]CompositeTestCluster.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]CorruptionUtils.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]ESBackcompatTestCase.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]ESIntegTestCase.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]ESSingleNodeTestCase.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]ESTestCase.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]ExternalNode.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]IndexSettingsModule.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]InternalTestCluster.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]MockIndexEventListener.java" checks="LineLength" />
@ -1012,7 +956,6 @@
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]engine[/\\]AssertingSearcher.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]engine[/\\]MockEngineSupport.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]hamcrest[/\\]ElasticsearchAssertions.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]junit[/\\]listeners[/\\]LoggingListener.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]store[/\\]MockFSDirectoryService.java" checks="LineLength" />
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]store[/\\]MockFSIndexStore.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]cli[/\\]CliTool.java" checks="LineLength" />

View File

@ -1,10 +1,11 @@
# When updating elasticsearch, please update 'rest' version in core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
elasticsearch = 6.0.0-alpha1
lucene = 6.4.0-snapshot-ec38570
lucene = 6.4.0
# optional dependencies
spatial4j = 0.6
jts = 1.13
jackson = 2.8.1
jackson = 2.8.6
snakeyaml = 1.15
# When updating log4j, please update also docs/java-api/index.asciidoc
log4j = 2.7
@ -15,10 +16,16 @@ jna = 4.2.2
randomizedrunner = 2.4.0
junit = 4.11
httpclient = 4.5.2
# When updating httpcore, please also update core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
httpcore = 4.4.5
# When updating httpasyncclient, please also update core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
httpasyncclient = 4.1.2
commonslogging = 1.1.3
commonscodec = 1.10
hamcrest = 1.3
securemock = 1.2
# When updating mocksocket, please also update core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
mocksocket = 1.1
# benchmark dependencies
jmh = 1.17.3

View File

@ -95,7 +95,7 @@ public class BulkBenchmarkTask implements BenchmarkTask {
private final BlockingQueue<List<String>> bulkQueue;
private final int bulkSize;
public LoadGenerator(Path bulkDataFile, BlockingQueue<List<String>> bulkQueue, int bulkSize) {
LoadGenerator(Path bulkDataFile, BlockingQueue<List<String>> bulkQueue, int bulkSize) {
this.bulkDataFile = bulkDataFile;
this.bulkQueue = bulkQueue;
this.bulkSize = bulkSize;
@ -143,7 +143,7 @@ public class BulkBenchmarkTask implements BenchmarkTask {
private final BulkRequestExecutor bulkRequestExecutor;
private final SampleRecorder sampleRecorder;
public BulkIndexer(BlockingQueue<List<String>> bulkData, int warmupIterations, int measurementIterations,
BulkIndexer(BlockingQueue<List<String>> bulkData, int warmupIterations, int measurementIterations,
SampleRecorder sampleRecorder, BulkRequestExecutor bulkRequestExecutor) {
this.bulkData = bulkData;
this.warmupIterations = warmupIterations;

View File

@ -73,7 +73,7 @@ public final class RestClientBenchmark extends AbstractBenchmark<RestClient> {
private final RestClient client;
private final String actionMetaData;
public RestBulkRequestExecutor(RestClient client, String index, String type) {
RestBulkRequestExecutor(RestClient client, String index, String type) {
this.client = client;
this.actionMetaData = String.format(Locale.ROOT, "{ \"index\" : { \"_index\" : \"%s\", \"_type\" : \"%s\" } }%n", index, type);
}

View File

@ -28,6 +28,7 @@ import org.elasticsearch.client.benchmark.ops.search.SearchRequestExecutor;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugin.noop.NoopPlugin;
import org.elasticsearch.plugin.noop.action.bulk.NoopBulkAction;
@ -70,7 +71,7 @@ public final class TransportClientBenchmark extends AbstractBenchmark<TransportC
private final String indexName;
private final String typeName;
public TransportBulkRequestExecutor(TransportClient client, String indexName, String typeName) {
TransportBulkRequestExecutor(TransportClient client, String indexName, String typeName) {
this.client = client;
this.indexName = indexName;
this.typeName = typeName;
@ -80,7 +81,7 @@ public final class TransportClientBenchmark extends AbstractBenchmark<TransportC
public boolean bulkIndex(List<String> bulkData) {
NoopBulkRequestBuilder builder = NoopBulkAction.INSTANCE.newRequestBuilder(client);
for (String bulkItem : bulkData) {
builder.add(new IndexRequest(indexName, typeName).source(bulkItem.getBytes(StandardCharsets.UTF_8)));
builder.add(new IndexRequest(indexName, typeName).source(bulkItem.getBytes(StandardCharsets.UTF_8), XContentType.JSON));
}
BulkResponse bulkResponse;
try {

View File

@ -23,15 +23,23 @@ import org.elasticsearch.plugin.noop.action.bulk.RestNoopBulkAction;
import org.elasticsearch.plugin.noop.action.bulk.TransportNoopBulkAction;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsFilter;
import org.elasticsearch.plugin.noop.action.search.NoopSearchAction;
import org.elasticsearch.plugin.noop.action.search.RestNoopSearchAction;
import org.elasticsearch.plugin.noop.action.search.TransportNoopSearchAction;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestHandler;
import java.util.Arrays;
import java.util.List;
import java.util.function.Supplier;
public class NoopPlugin extends Plugin implements ActionPlugin {
@Override
@ -43,7 +51,11 @@ public class NoopPlugin extends Plugin implements ActionPlugin {
}
@Override
public List<Class<? extends RestHandler>> getRestHandlers() {
return Arrays.asList(RestNoopBulkAction.class, RestNoopSearchAction.class);
public List<RestHandler> getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings,
IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver,
Supplier<DiscoveryNodes> nodesInCluster) {
return Arrays.asList(
new RestNoopBulkAction(settings, restController),
new RestNoopSearchAction(settings, restController));
}
}

View File

@ -33,6 +33,7 @@ import org.elasticsearch.action.update.UpdateRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
public class NoopBulkRequestBuilder extends ActionRequestBuilder<BulkRequest, BulkResponse, NoopBulkRequestBuilder>
implements WriteRequestBuilder<NoopBulkRequestBuilder> {
@ -95,17 +96,17 @@ public class NoopBulkRequestBuilder extends ActionRequestBuilder<BulkRequest, Bu
/**
* Adds a framed data in binary format
*/
public NoopBulkRequestBuilder add(byte[] data, int from, int length) throws Exception {
request.add(data, from, length, null, null);
public NoopBulkRequestBuilder add(byte[] data, int from, int length, XContentType xContentType) throws Exception {
request.add(data, from, length, null, null, xContentType);
return this;
}
/**
* Adds a framed data in binary format
*/
public NoopBulkRequestBuilder add(byte[] data, int from, int length, @Nullable String defaultIndex, @Nullable String defaultType)
throws Exception {
request.add(data, from, length, defaultIndex, defaultType);
public NoopBulkRequestBuilder add(byte[] data, int from, int length, @Nullable String defaultIndex, @Nullable String defaultType,
XContentType xContentType) throws Exception {
request.add(data, from, length, defaultIndex, defaultType, xContentType);
return this;
}

View File

@ -18,8 +18,8 @@
*/
package org.elasticsearch.plugin.noop.action.bulk;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkShardRequest;
@ -28,7 +28,6 @@ import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.Requests;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.shard.ShardId;
@ -47,7 +46,6 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT;
import static org.elasticsearch.rest.RestStatus.OK;
public class RestNoopBulkAction extends BaseRestHandler {
@Inject
public RestNoopBulkAction(Settings settings, RestController controller) {
super(settings);
@ -75,7 +73,8 @@ public class RestNoopBulkAction extends BaseRestHandler {
}
bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT));
bulkRequest.setRefreshPolicy(request.param("refresh"));
bulkRequest.add(request.content(), defaultIndex, defaultType, defaultRouting, defaultFields, null, defaultPipeline, null, true);
bulkRequest.add(request.content(), defaultIndex, defaultType, defaultRouting, defaultFields, null, defaultPipeline, null, true,
request.getXContentType());
// short circuit the call to the transport layer
return channel -> {
@ -91,7 +90,7 @@ public class RestNoopBulkAction extends BaseRestHandler {
private final RestRequest request;
public BulkRestBuilderListener(RestChannel channel, RestRequest request) {
BulkRestBuilderListener(RestChannel channel, RestRequest request) {
super(channel);
this.request = request;
}
@ -103,9 +102,7 @@ public class RestNoopBulkAction extends BaseRestHandler {
builder.field(Fields.ERRORS, false);
builder.startArray(Fields.ITEMS);
for (int idx = 0; idx < bulkRequest.numberOfActions(); idx++) {
builder.startObject();
ITEM_RESPONSE.toXContent(builder, request);
builder.endObject();
}
builder.endArray();
builder.endObject();

View File

@ -20,7 +20,6 @@ package org.elasticsearch.plugin.noop.action.search;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController;
@ -33,8 +32,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET;
import static org.elasticsearch.rest.RestRequest.Method.POST;
public class RestNoopSearchAction extends BaseRestHandler {
@Inject
public RestNoopSearchAction(Settings settings, RestController controller) {
super(settings);
controller.registerHandler(GET, "/_noop_search", this);

View File

@ -0,0 +1,41 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
apply plugin: 'elasticsearch.build'
apply plugin: 'elasticsearch.rest-test'
group = 'org.elasticsearch.client'
dependencies {
compile "org.elasticsearch:elasticsearch:${version}"
compile "org.elasticsearch.client:rest:${version}"
testCompile "org.elasticsearch.client:test:${version}"
testCompile "org.elasticsearch.test:framework:${version}"
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}"
}
dependencyLicenses {
// Don't check licenses for dependency that are part of the elasticsearch project
// But any other dependency should have its license/notice/sha1
dependencies = project.configurations.runtime.fileCollection {
it.group.startsWith('org.elasticsearch') == false
}
}

View File

@ -17,33 +17,37 @@
* under the License.
*/
package org.elasticsearch.painless.antlr;
package org.elasticsearch.client;
import org.antlr.v4.runtime.Token;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.Header;
import java.io.IOException;
import java.util.Objects;
/**
* Utility to figure out if a {@code /} is division or the start of a regex literal.
* High level REST client that wraps an instance of the low level {@link RestClient} and allows to build requests and read responses.
* The provided {@link RestClient} is externally built and closed.
*/
public class SlashStrategy {
public static boolean slashIsRegex(PainlessLexer lexer) {
EnhancedPainlessLexer realLexer = (EnhancedPainlessLexer) lexer;
Token lastToken = realLexer.getPreviousToken();
if (lastToken == null) {
public final class RestHighLevelClient {
private static final Log logger = LogFactory.getLog(RestHighLevelClient.class);
private final RestClient client;
public RestHighLevelClient(RestClient client) {
this.client = Objects.requireNonNull(client);
}
public boolean ping(Header... headers) {
try {
client.performRequest("HEAD", "/", headers);
return true;
}
switch (lastToken.getType()) {
case PainlessLexer.RBRACE:
case PainlessLexer.RP:
case PainlessLexer.OCTAL:
case PainlessLexer.HEX:
case PainlessLexer.INTEGER:
case PainlessLexer.DECIMAL:
case PainlessLexer.ID:
case PainlessLexer.DOTINTEGER:
case PainlessLexer.DOTID:
} catch(IOException exception) {
return false;
default:
return true;
}
}
}

View File

@ -16,23 +16,33 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.rest.yaml.parser;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.test.rest.yaml.section.MatchAssertion;
package org.elasticsearch.client;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.junit.AfterClass;
import org.junit.Before;
import java.io.IOException;
/**
* Parser for match assert sections
*/
public class MatchParser implements ClientYamlTestFragmentParser<MatchAssertion> {
public abstract class ESRestHighLevelClientTestCase extends ESRestTestCase {
@Override
public MatchAssertion parse(ClientYamlTestSuiteParseContext parseContext) throws IOException, ClientYamlTestParseException {
XContentLocation location = parseContext.parser().getTokenLocation();
Tuple<String,Object> stringObjectTuple = parseContext.parseTuple();
return new MatchAssertion(location, stringObjectTuple.v1(), stringObjectTuple.v2());
private static RestHighLevelClient restHighLevelClient;
@Before
public void initHighLevelClient() throws IOException {
super.initClient();
if (restHighLevelClient == null) {
restHighLevelClient = new RestHighLevelClient(client());
}
}
@AfterClass
public static void cleanupClient() throws IOException {
restHighLevelClient = null;
}
protected static RestHighLevelClient highLevelClient() {
return restHighLevelClient;
}
}

View File

@ -17,14 +17,11 @@
* under the License.
*/
package org.elasticsearch.http;
package org.elasticsearch.client;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestRequest;
public interface HttpServerAdapter {
void dispatchRequest(RestRequest request, RestChannel channel, ThreadContext context);
public class MainActionIT extends ESRestHighLevelClientTestCase {
public void testPing() {
assertTrue(highLevelClient().ping());
}
}

View File

@ -0,0 +1,87 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.apache.http.Header;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
import org.mockito.ArgumentMatcher;
import org.mockito.internal.matchers.ArrayEquals;
import org.mockito.internal.matchers.VarargMatcher;
import java.io.IOException;
import java.net.SocketTimeoutException;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.argThat;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class RestHighLevelClientTests extends ESTestCase {
private RestClient restClient;
private RestHighLevelClient restHighLevelClient;
@Before
public void initClient() throws IOException {
restClient = mock(RestClient.class);
restHighLevelClient = new RestHighLevelClient(restClient);
}
public void testPing() throws IOException {
assertTrue(restHighLevelClient.ping());
verify(restClient).performRequest(eq("HEAD"), eq("/"), argThat(new HeadersVarargMatcher()));
}
public void testPingFailure() throws IOException {
when(restClient.performRequest(any(), any())).thenThrow(new IllegalStateException());
expectThrows(IllegalStateException.class, () -> restHighLevelClient.ping());
}
public void testPingFailed() throws IOException {
when(restClient.performRequest(any(), any())).thenThrow(new SocketTimeoutException());
assertFalse(restHighLevelClient.ping());
}
public void testPingWithHeaders() throws IOException {
Header[] headers = RestClientTestUtil.randomHeaders(random(), "Header");
assertTrue(restHighLevelClient.ping(headers));
verify(restClient).performRequest(eq("HEAD"), eq("/"), argThat(new HeadersVarargMatcher(headers)));
}
private class HeadersVarargMatcher extends ArgumentMatcher<Header[]> implements VarargMatcher {
private Header[] expectedHeaders;
HeadersVarargMatcher(Header... expectedHeaders) {
this.expectedHeaders = expectedHeaders;
}
@Override
public boolean matches(Object varargArgument) {
if (varargArgument instanceof Header[]) {
Header[] actualHeaders = (Header[]) varargArgument;
return new ArrayEquals(expectedHeaders).matches(actualHeaders);
}
return false;
}
}
}

View File

@ -33,7 +33,7 @@ group = 'org.elasticsearch.client'
dependencies {
compile "org.apache.httpcomponents:httpclient:${versions.httpclient}"
compile "org.apache.httpcomponents:httpcore:${versions.httpcore}"
compile "org.apache.httpcomponents:httpasyncclient:4.1.2"
compile "org.apache.httpcomponents:httpasyncclient:${versions.httpasyncclient}"
compile "org.apache.httpcomponents:httpcore-nio:${versions.httpcore}"
compile "commons-codec:commons-codec:${versions.commonscodec}"
compile "commons-logging:commons-logging:${versions.commonslogging}"
@ -43,6 +43,7 @@ dependencies {
testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}"
testCompile "org.elasticsearch:securemock:${versions.securemock}"
testCompile "org.elasticsearch:mocksocket:${versions.mocksocket}"
testCompile "org.codehaus.mojo:animal-sniffer-annotations:1.15"
signature "org.codehaus.mojo.signature:java17:1.0@signature"
}

View File

@ -53,7 +53,7 @@ interface HttpAsyncResponseConsumerFactory {
private final int bufferLimit;
public HeapBufferedResponseConsumerFactory(int bufferLimitBytes) {
HeapBufferedResponseConsumerFactory(int bufferLimitBytes) {
this.bufferLimit = bufferLimitBytes;
}

View File

@ -25,6 +25,7 @@ import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.HttpRequest;
import org.apache.http.HttpResponse;
import org.apache.http.client.AuthCache;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
import org.apache.http.client.methods.HttpHead;
@ -34,8 +35,11 @@ import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.client.methods.HttpTrace;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.concurrent.FutureCallback;
import org.apache.http.impl.auth.BasicScheme;
import org.apache.http.impl.client.BasicAuthCache;
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
import org.apache.http.nio.client.methods.HttpAsyncMethods;
import org.apache.http.nio.protocol.HttpAsyncRequestProducer;
@ -49,6 +53,7 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
@ -91,7 +96,7 @@ public class RestClient implements Closeable {
private final long maxRetryTimeoutMillis;
private final String pathPrefix;
private final AtomicInteger lastHostIndex = new AtomicInteger(0);
private volatile Set<HttpHost> hosts;
private volatile HostTuple<Set<HttpHost>> hostTuple;
private final ConcurrentMap<HttpHost, DeadHostState> blacklist = new ConcurrentHashMap<>();
private final FailureListener failureListener;
@ -121,11 +126,13 @@ public class RestClient implements Closeable {
throw new IllegalArgumentException("hosts must not be null nor empty");
}
Set<HttpHost> httpHosts = new HashSet<>();
AuthCache authCache = new BasicAuthCache();
for (HttpHost host : hosts) {
Objects.requireNonNull(host, "host cannot be null");
httpHosts.add(host);
authCache.put(host, new BasicScheme());
}
this.hosts = Collections.unmodifiableSet(httpHosts);
this.hostTuple = new HostTuple<>(Collections.unmodifiableSet(httpHosts), authCache);
this.blacklist.clear();
}
@ -282,29 +289,61 @@ public class RestClient implements Closeable {
public void performRequestAsync(String method, String endpoint, Map<String, String> params,
HttpEntity entity, HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory,
ResponseListener responseListener, Header... headers) {
URI uri = buildUri(pathPrefix, endpoint, params);
Objects.requireNonNull(params, "params must not be null");
Map<String, String> requestParams = new HashMap<>(params);
//ignore is a special parameter supported by the clients, shouldn't be sent to es
String ignoreString = requestParams.remove("ignore");
Set<Integer> ignoreErrorCodes;
if (ignoreString == null) {
if (HttpHead.METHOD_NAME.equals(method)) {
//404 never causes error if returned for a HEAD request
ignoreErrorCodes = Collections.singleton(404);
} else {
ignoreErrorCodes = Collections.emptySet();
}
} else {
String[] ignoresArray = ignoreString.split(",");
ignoreErrorCodes = new HashSet<>();
if (HttpHead.METHOD_NAME.equals(method)) {
//404 never causes error if returned for a HEAD request
ignoreErrorCodes.add(404);
}
for (String ignoreCode : ignoresArray) {
try {
ignoreErrorCodes.add(Integer.valueOf(ignoreCode));
} catch (NumberFormatException e) {
throw new IllegalArgumentException("ignore value should be a number, found [" + ignoreString + "] instead", e);
}
}
}
URI uri = buildUri(pathPrefix, endpoint, requestParams);
HttpRequestBase request = createHttpRequest(method, uri, entity);
setHeaders(request, headers);
FailureTrackingResponseListener failureTrackingResponseListener = new FailureTrackingResponseListener(responseListener);
long startTime = System.nanoTime();
performRequestAsync(startTime, nextHost().iterator(), request, httpAsyncResponseConsumerFactory, failureTrackingResponseListener);
performRequestAsync(startTime, nextHost(), request, ignoreErrorCodes, httpAsyncResponseConsumerFactory,
failureTrackingResponseListener);
}
private void performRequestAsync(final long startTime, final Iterator<HttpHost> hosts, final HttpRequestBase request,
private void performRequestAsync(final long startTime, final HostTuple<Iterator<HttpHost>> hostTuple, final HttpRequestBase request,
final Set<Integer> ignoreErrorCodes,
final HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory,
final FailureTrackingResponseListener listener) {
final HttpHost host = hosts.next();
final HttpHost host = hostTuple.hosts.next();
//we stream the request body if the entity allows for it
HttpAsyncRequestProducer requestProducer = HttpAsyncMethods.create(host, request);
HttpAsyncResponseConsumer<HttpResponse> asyncResponseConsumer = httpAsyncResponseConsumerFactory.createHttpAsyncResponseConsumer();
client.execute(requestProducer, asyncResponseConsumer, new FutureCallback<HttpResponse>() {
final HttpAsyncRequestProducer requestProducer = HttpAsyncMethods.create(host, request);
final HttpAsyncResponseConsumer<HttpResponse> asyncResponseConsumer =
httpAsyncResponseConsumerFactory.createHttpAsyncResponseConsumer();
final HttpClientContext context = HttpClientContext.create();
context.setAuthCache(hostTuple.authCache);
client.execute(requestProducer, asyncResponseConsumer, context, new FutureCallback<HttpResponse>() {
@Override
public void completed(HttpResponse httpResponse) {
try {
RequestLogger.logResponse(logger, request, host, httpResponse);
int statusCode = httpResponse.getStatusLine().getStatusCode();
Response response = new Response(request.getRequestLine(), host, httpResponse);
if (isSuccessfulResponse(request.getMethod(), statusCode)) {
if (isSuccessfulResponse(statusCode) || ignoreErrorCodes.contains(response.getStatusLine().getStatusCode())) {
onResponse(host);
listener.onSuccess(response);
} else {
@ -312,7 +351,7 @@ public class RestClient implements Closeable {
if (isRetryStatus(statusCode)) {
//mark host dead and retry against next one
onFailure(host);
retryIfPossible(responseException, hosts, request);
retryIfPossible(responseException);
} else {
//mark host alive and don't retry, as the error should be a request problem
onResponse(host);
@ -329,14 +368,14 @@ public class RestClient implements Closeable {
try {
RequestLogger.logFailedRequest(logger, request, host, failure);
onFailure(host);
retryIfPossible(failure, hosts, request);
retryIfPossible(failure);
} catch(Exception e) {
listener.onDefinitiveFailure(e);
}
}
private void retryIfPossible(Exception exception, Iterator<HttpHost> hosts, HttpRequestBase request) {
if (hosts.hasNext()) {
private void retryIfPossible(Exception exception) {
if (hostTuple.hosts.hasNext()) {
//in case we are retrying, check whether maxRetryTimeout has been reached
long timeElapsedMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime);
long timeout = maxRetryTimeoutMillis - timeElapsedMillis;
@ -347,7 +386,7 @@ public class RestClient implements Closeable {
} else {
listener.trackFailure(exception);
request.reset();
performRequestAsync(startTime, hosts, request, httpAsyncResponseConsumerFactory, listener);
performRequestAsync(startTime, hostTuple, request, ignoreErrorCodes, httpAsyncResponseConsumerFactory, listener);
}
} else {
listener.onDefinitiveFailure(exception);
@ -385,17 +424,18 @@ public class RestClient implements Closeable {
* The iterator returned will never be empty. In case there are no healthy hosts available, or dead ones to be be retried,
* one dead host gets returned so that it can be retried.
*/
private Iterable<HttpHost> nextHost() {
private HostTuple<Iterator<HttpHost>> nextHost() {
final HostTuple<Set<HttpHost>> hostTuple = this.hostTuple;
Collection<HttpHost> nextHosts = Collections.emptySet();
do {
Set<HttpHost> filteredHosts = new HashSet<>(hosts);
Set<HttpHost> filteredHosts = new HashSet<>(hostTuple.hosts);
for (Map.Entry<HttpHost, DeadHostState> entry : blacklist.entrySet()) {
if (System.nanoTime() - entry.getValue().getDeadUntilNanos() < 0) {
filteredHosts.remove(entry.getKey());
}
}
if (filteredHosts.isEmpty()) {
//last resort: if there are no good hosts to use, return a single dead one, the one that's closest to being retried
//last resort: if there are no good host to use, return a single dead one, the one that's closest to being retried
List<Map.Entry<HttpHost, DeadHostState>> sortedHosts = new ArrayList<>(blacklist.entrySet());
if (sortedHosts.size() > 0) {
Collections.sort(sortedHosts, new Comparator<Map.Entry<HttpHost, DeadHostState>>() {
@ -414,7 +454,7 @@ public class RestClient implements Closeable {
nextHosts = rotatedHosts;
}
} while(nextHosts.isEmpty());
return nextHosts;
return new HostTuple<>(nextHosts.iterator(), hostTuple.authCache);
}
/**
@ -452,8 +492,8 @@ public class RestClient implements Closeable {
client.close();
}
private static boolean isSuccessfulResponse(String method, int statusCode) {
return statusCode < 300 || (HttpHead.METHOD_NAME.equals(method) && statusCode == 404);
private static boolean isSuccessfulResponse(int statusCode) {
return statusCode < 300;
}
private static boolean isRetryStatus(int statusCode) {
@ -510,7 +550,6 @@ public class RestClient implements Closeable {
}
private static URI buildUri(String pathPrefix, String path, Map<String, String> params) {
Objects.requireNonNull(params, "params must not be null");
Objects.requireNonNull(path, "path must not be null");
try {
String fullPath;
@ -657,4 +696,18 @@ public class RestClient implements Closeable {
}
}
/**
* {@code HostTuple} enables the {@linkplain HttpHost}s and {@linkplain AuthCache} to be set together in a thread
* safe, volatile way.
*/
private static class HostTuple<T> {
public final T hosts;
public final AuthCache authCache;
HostTuple(final T hosts, final AuthCache authCache) {
this.hosts = hosts;
this.authCache = authCache;
}
}
}

View File

@ -28,6 +28,8 @@ import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.apache.http.nio.conn.SchemeIOSessionStrategy;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Objects;
/**
@ -177,7 +179,12 @@ public final class RestClientBuilder {
if (failureListener == null) {
failureListener = new RestClient.FailureListener();
}
CloseableHttpAsyncClient httpClient = createHttpClient();
CloseableHttpAsyncClient httpClient = AccessController.doPrivileged(new PrivilegedAction<CloseableHttpAsyncClient>() {
@Override
public CloseableHttpAsyncClient run() {
return createHttpClient();
}
});
RestClient restClient = new RestClient(httpClient, maxRetryTimeout, defaultHeaders, hosts, pathPrefix, failureListener);
httpClient.start();
return restClient;

View File

@ -24,6 +24,7 @@ import com.sun.net.httpserver.HttpHandler;
import com.sun.net.httpserver.HttpServer;
import org.apache.http.HttpHost;
import org.codehaus.mojo.animal_sniffer.IgnoreJRERequirement;
import org.elasticsearch.mocksocket.MockHttpServer;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
@ -80,7 +81,7 @@ public class RestClientMultipleHostsIntegTests extends RestClientTestCase {
}
private static HttpServer createHttpServer() throws Exception {
HttpServer httpServer = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0);
HttpServer httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0);
httpServer.start();
//returns a different status code depending on the path
for (int statusCode : getAllStatusCodes()) {

View File

@ -26,8 +26,10 @@ import org.apache.http.HttpResponse;
import org.apache.http.ProtocolVersion;
import org.apache.http.StatusLine;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.concurrent.FutureCallback;
import org.apache.http.conn.ConnectTimeoutException;
import org.apache.http.impl.auth.BasicScheme;
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
import org.apache.http.message.BasicHttpResponse;
import org.apache.http.message.BasicStatusLine;
@ -73,13 +75,15 @@ public class RestClientMultipleHostsTests extends RestClientTestCase {
public void createRestClient() throws IOException {
CloseableHttpAsyncClient httpClient = mock(CloseableHttpAsyncClient.class);
when(httpClient.<HttpResponse>execute(any(HttpAsyncRequestProducer.class), any(HttpAsyncResponseConsumer.class),
any(FutureCallback.class))).thenAnswer(new Answer<Future<HttpResponse>>() {
any(HttpClientContext.class), any(FutureCallback.class))).thenAnswer(new Answer<Future<HttpResponse>>() {
@Override
public Future<HttpResponse> answer(InvocationOnMock invocationOnMock) throws Throwable {
HttpAsyncRequestProducer requestProducer = (HttpAsyncRequestProducer) invocationOnMock.getArguments()[0];
HttpUriRequest request = (HttpUriRequest)requestProducer.generateRequest();
HttpHost httpHost = requestProducer.getTarget();
FutureCallback<HttpResponse> futureCallback = (FutureCallback<HttpResponse>) invocationOnMock.getArguments()[2];
HttpClientContext context = (HttpClientContext) invocationOnMock.getArguments()[2];
assertThat(context.getAuthCache().get(httpHost), instanceOf(BasicScheme.class));
FutureCallback<HttpResponse> futureCallback = (FutureCallback<HttpResponse>) invocationOnMock.getArguments()[3];
//return the desired status code or exception depending on the path
if (request.getURI().getPath().equals("/soe")) {
futureCallback.failed(new SocketTimeoutException(httpHost.toString()));

View File

@ -26,9 +26,14 @@ import com.sun.net.httpserver.HttpServer;
import org.apache.http.Consts;
import org.apache.http.Header;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.apache.http.util.EntityUtils;
import org.codehaus.mojo.animal_sniffer.IgnoreJRERequirement;
import org.elasticsearch.mocksocket.MockHttpServer;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@ -39,7 +44,6 @@ import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
@ -48,8 +52,10 @@ import java.util.Set;
import static org.elasticsearch.client.RestClientTestUtil.getAllStatusCodes;
import static org.elasticsearch.client.RestClientTestUtil.getHttpMethods;
import static org.elasticsearch.client.RestClientTestUtil.randomStatusCode;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.startsWith;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
/**
@ -67,27 +73,14 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
@BeforeClass
public static void startHttpServer() throws Exception {
String pathPrefixWithoutLeadingSlash;
if (randomBoolean()) {
pathPrefixWithoutLeadingSlash = "testPathPrefix/" + randomAsciiOfLengthBetween(1, 5);
pathPrefix = "/" + pathPrefixWithoutLeadingSlash;
} else {
pathPrefix = pathPrefixWithoutLeadingSlash = "";
}
pathPrefix = randomBoolean() ? "/testPathPrefix/" + randomAsciiOfLengthBetween(1, 5) : "";
httpServer = createHttpServer();
int numHeaders = randomIntBetween(0, 5);
defaultHeaders = generateHeaders("Header-default", "Header-array", numHeaders);
RestClientBuilder restClientBuilder = RestClient.builder(
new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort())).setDefaultHeaders(defaultHeaders);
if (pathPrefix.length() > 0) {
restClientBuilder.setPathPrefix((randomBoolean() ? "/" : "") + pathPrefixWithoutLeadingSlash);
}
restClient = restClientBuilder.build();
defaultHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header-default");
restClient = createRestClient(false, true);
}
private static HttpServer createHttpServer() throws Exception {
HttpServer httpServer = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0);
HttpServer httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0);
httpServer.start();
//returns a different status code depending on the path
for (int statusCode : getAllStatusCodes()) {
@ -131,6 +124,35 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
}
}
private static RestClient createRestClient(final boolean useAuth, final boolean usePreemptiveAuth) {
// provide the username/password for every request
final BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials("user", "pass"));
final RestClientBuilder restClientBuilder = RestClient.builder(
new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort())).setDefaultHeaders(defaultHeaders);
if (pathPrefix.length() > 0) {
// sometimes cut off the leading slash
restClientBuilder.setPathPrefix(randomBoolean() ? pathPrefix.substring(1) : pathPrefix);
}
if (useAuth) {
restClientBuilder.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() {
@Override
public HttpAsyncClientBuilder customizeHttpClient(final HttpAsyncClientBuilder httpClientBuilder) {
if (usePreemptiveAuth == false) {
// disable preemptive auth by ignoring any authcache
httpClientBuilder.disableAuthCaching();
}
return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider);
}
});
}
return restClientBuilder.build();
}
@AfterClass
public static void stopHttpServers() throws IOException {
restClient.close();
@ -150,42 +172,25 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
if (method.equals("HEAD") == false) {
standardHeaders.add("Content-length");
}
final int numHeaders = randomIntBetween(1, 5);
final Header[] headers = generateHeaders("Header", "Header-array", numHeaders);
final Map<String, List<String>> expectedHeaders = new HashMap<>();
addHeaders(expectedHeaders, defaultHeaders, headers);
final Header[] requestHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header");
final int statusCode = randomStatusCode(getRandom());
Response esResponse;
try {
esResponse = restClient.performRequest(method, "/" + statusCode, Collections.<String, String>emptyMap(), headers);
esResponse = restClient.performRequest(method, "/" + statusCode, Collections.<String, String>emptyMap(), requestHeaders);
} catch(ResponseException e) {
esResponse = e.getResponse();
}
assertEquals(method, esResponse.getRequestLine().getMethod());
assertEquals(statusCode, esResponse.getStatusLine().getStatusCode());
assertEquals((pathPrefix.length() > 0 ? pathPrefix : "") + "/" + statusCode, esResponse.getRequestLine().getUri());
assertEquals(pathPrefix + "/" + statusCode, esResponse.getRequestLine().getUri());
assertHeaders(defaultHeaders, requestHeaders, esResponse.getHeaders(), standardHeaders);
for (final Header responseHeader : esResponse.getHeaders()) {
final String name = responseHeader.getName();
final String value = responseHeader.getValue();
if (name.startsWith("Header")) {
final List<String> values = expectedHeaders.get(name);
assertNotNull("found response header [" + name + "] that wasn't originally sent: " + value, values);
assertTrue("found incorrect response header [" + name + "]: " + value, values.remove(value));
// we've collected them all
if (values.isEmpty()) {
expectedHeaders.remove(name);
}
} else {
String name = responseHeader.getName();
if (name.startsWith("Header") == false) {
assertTrue("unknown header was returned " + name, standardHeaders.remove(name));
}
}
assertTrue("some headers that were sent weren't returned: " + expectedHeaders, expectedHeaders.isEmpty());
assertTrue("some expected standard headers weren't returned: " + standardHeaders, standardHeaders.isEmpty());
}
}
@ -208,7 +213,41 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
bodyTest("GET");
}
private void bodyTest(String method) throws IOException {
/**
* Verify that credentials are sent on the first request with preemptive auth enabled (default when provided with credentials).
*/
public void testPreemptiveAuthEnabled() throws IOException {
final String[] methods = { "POST", "PUT", "GET", "DELETE" };
try (RestClient restClient = createRestClient(true, true)) {
for (final String method : methods) {
final Response response = bodyTest(restClient, method);
assertThat(response.getHeader("Authorization"), startsWith("Basic"));
}
}
}
/**
* Verify that credentials are <em>not</em> sent on the first request with preemptive auth disabled.
*/
public void testPreemptiveAuthDisabled() throws IOException {
final String[] methods = { "POST", "PUT", "GET", "DELETE" };
try (RestClient restClient = createRestClient(true, false)) {
for (final String method : methods) {
final Response response = bodyTest(restClient, method);
assertThat(response.getHeader("Authorization"), nullValue());
}
}
}
private Response bodyTest(final String method) throws IOException {
return bodyTest(restClient, method);
}
private Response bodyTest(final RestClient restClient, final String method) throws IOException {
String requestBody = "{ \"field\": \"value\" }";
StringEntity entity = new StringEntity(requestBody);
int statusCode = randomStatusCode(getRandom());
@ -220,7 +259,9 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
}
assertEquals(method, esResponse.getRequestLine().getMethod());
assertEquals(statusCode, esResponse.getStatusLine().getStatusCode());
assertEquals((pathPrefix.length() > 0 ? pathPrefix : "") + "/" + statusCode, esResponse.getRequestLine().getUri());
assertEquals(pathPrefix + "/" + statusCode, esResponse.getRequestLine().getUri());
assertEquals(requestBody, EntityUtils.toString(esResponse.getEntity()));
return esResponse;
}
}

View File

@ -34,10 +34,12 @@ import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpTrace;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.concurrent.FutureCallback;
import org.apache.http.conn.ConnectTimeoutException;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.auth.BasicScheme;
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
import org.apache.http.message.BasicHttpResponse;
import org.apache.http.message.BasicStatusLine;
@ -56,7 +58,6 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Future;
@ -70,7 +71,6 @@ import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@ -98,11 +98,13 @@ public class RestClientSingleHostTests extends RestClientTestCase {
public void createRestClient() throws IOException {
httpClient = mock(CloseableHttpAsyncClient.class);
when(httpClient.<HttpResponse>execute(any(HttpAsyncRequestProducer.class), any(HttpAsyncResponseConsumer.class),
any(FutureCallback.class))).thenAnswer(new Answer<Future<HttpResponse>>() {
any(HttpClientContext.class), any(FutureCallback.class))).thenAnswer(new Answer<Future<HttpResponse>>() {
@Override
public Future<HttpResponse> answer(InvocationOnMock invocationOnMock) throws Throwable {
HttpAsyncRequestProducer requestProducer = (HttpAsyncRequestProducer) invocationOnMock.getArguments()[0];
FutureCallback<HttpResponse> futureCallback = (FutureCallback<HttpResponse>) invocationOnMock.getArguments()[2];
HttpClientContext context = (HttpClientContext) invocationOnMock.getArguments()[2];
assertThat(context.getAuthCache().get(httpHost), instanceOf(BasicScheme.class));
FutureCallback<HttpResponse> futureCallback = (FutureCallback<HttpResponse>) invocationOnMock.getArguments()[3];
HttpUriRequest request = (HttpUriRequest)requestProducer.generateRequest();
//return the desired status code or exception depending on the path
if (request.getURI().getPath().equals("/soe")) {
@ -131,9 +133,7 @@ public class RestClientSingleHostTests extends RestClientTestCase {
}
});
int numHeaders = randomIntBetween(0, 3);
defaultHeaders = generateHeaders("Header-default", "Header-array", numHeaders);
defaultHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header-default");
httpHost = new HttpHost("localhost", 9200);
failureListener = new HostsTrackingFailureListener();
restClient = new RestClient(httpClient, 10000, defaultHeaders, new HttpHost[]{httpHost}, null, failureListener);
@ -160,7 +160,7 @@ public class RestClientSingleHostTests extends RestClientTestCase {
for (String httpMethod : getHttpMethods()) {
HttpUriRequest expectedRequest = performRandomRequest(httpMethod);
verify(httpClient, times(++times)).<HttpResponse>execute(requestArgumentCaptor.capture(),
any(HttpAsyncResponseConsumer.class), any(FutureCallback.class));
any(HttpAsyncResponseConsumer.class), any(HttpClientContext.class), any(FutureCallback.class));
HttpUriRequest actualRequest = (HttpUriRequest)requestArgumentCaptor.getValue().generateRequest();
assertEquals(expectedRequest.getURI(), actualRequest.getURI());
assertEquals(expectedRequest.getClass(), actualRequest.getClass());
@ -220,23 +220,45 @@ public class RestClientSingleHostTests extends RestClientTestCase {
*/
public void testErrorStatusCodes() throws IOException {
for (String method : getHttpMethods()) {
Set<Integer> expectedIgnores = new HashSet<>();
String ignoreParam = "";
if (HttpHead.METHOD_NAME.equals(method)) {
expectedIgnores.add(404);
}
if (randomBoolean()) {
int numIgnores = randomIntBetween(1, 3);
for (int i = 0; i < numIgnores; i++) {
Integer code = randomFrom(getAllErrorStatusCodes());
expectedIgnores.add(code);
ignoreParam += code;
if (i < numIgnores - 1) {
ignoreParam += ",";
}
}
}
//error status codes should cause an exception to be thrown
for (int errorStatusCode : getAllErrorStatusCodes()) {
try {
Response response = performRequest(method, "/" + errorStatusCode);
if (method.equals("HEAD") && errorStatusCode == 404) {
//no exception gets thrown although we got a 404
assertThat(response.getStatusLine().getStatusCode(), equalTo(errorStatusCode));
Map<String, String> params;
if (ignoreParam.isEmpty()) {
params = Collections.emptyMap();
} else {
params = Collections.singletonMap("ignore", ignoreParam);
}
Response response = performRequest(method, "/" + errorStatusCode, params);
if (expectedIgnores.contains(errorStatusCode)) {
//no exception gets thrown although we got an error status code, as it was configured to be ignored
assertEquals(errorStatusCode, response.getStatusLine().getStatusCode());
} else {
fail("request should have failed");
}
} catch(ResponseException e) {
if (method.equals("HEAD") && errorStatusCode == 404) {
if (expectedIgnores.contains(errorStatusCode)) {
throw e;
}
assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(errorStatusCode));
assertEquals(errorStatusCode, e.getResponse().getStatusLine().getStatusCode());
}
if (errorStatusCode <= 500) {
if (errorStatusCode <= 500 || expectedIgnores.contains(errorStatusCode)) {
failureListener.assertNotCalled();
} else {
failureListener.assertCalled(httpHost);
@ -339,44 +361,26 @@ public class RestClientSingleHostTests extends RestClientTestCase {
*/
public void testHeaders() throws IOException {
for (String method : getHttpMethods()) {
final int numHeaders = randomIntBetween(1, 5);
final Header[] headers = generateHeaders("Header", null, numHeaders);
final Map<String, List<String>> expectedHeaders = new HashMap<>();
addHeaders(expectedHeaders, defaultHeaders, headers);
final Header[] requestHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header");
final int statusCode = randomStatusCode(getRandom());
Response esResponse;
try {
esResponse = restClient.performRequest(method, "/" + statusCode, headers);
esResponse = restClient.performRequest(method, "/" + statusCode, requestHeaders);
} catch(ResponseException e) {
esResponse = e.getResponse();
}
assertThat(esResponse.getStatusLine().getStatusCode(), equalTo(statusCode));
for (Header responseHeader : esResponse.getHeaders()) {
final String name = responseHeader.getName();
final String value = responseHeader.getValue();
final List<String> values = expectedHeaders.get(name);
assertNotNull("found response header [" + name + "] that wasn't originally sent: " + value, values);
assertTrue("found incorrect response header [" + name + "]: " + value, values.remove(value));
// we've collected them all
if (values.isEmpty()) {
expectedHeaders.remove(name);
}
}
assertTrue("some headers that were sent weren't returned " + expectedHeaders, expectedHeaders.isEmpty());
assertHeaders(defaultHeaders, requestHeaders, esResponse.getHeaders(), Collections.<String>emptySet());
}
}
private HttpUriRequest performRandomRequest(String method) throws Exception {
String uriAsString = "/" + randomStatusCode(getRandom());
URIBuilder uriBuilder = new URIBuilder(uriAsString);
Map<String, String> params = Collections.emptyMap();
final Map<String, String> params = new HashMap<>();
boolean hasParams = randomBoolean();
if (hasParams) {
int numParams = randomIntBetween(1, 3);
params = new HashMap<>(numParams);
for (int i = 0; i < numParams; i++) {
String paramKey = "param-" + i;
String paramValue = randomAsciiOfLengthBetween(3, 10);
@ -384,6 +388,14 @@ public class RestClientSingleHostTests extends RestClientTestCase {
uriBuilder.addParameter(paramKey, paramValue);
}
}
if (randomBoolean()) {
//randomly add some ignore parameter, which doesn't get sent as part of the request
String ignore = Integer.toString(randomFrom(RestClientTestUtil.getAllErrorStatusCodes()));
if (randomBoolean()) {
ignore += "," + Integer.toString(randomFrom(RestClientTestUtil.getAllErrorStatusCodes()));
}
params.put("ignore", ignore);
}
URI uri = uriBuilder.build();
HttpUriRequest request;
@ -424,10 +436,9 @@ public class RestClientSingleHostTests extends RestClientTestCase {
}
Header[] headers = new Header[0];
final int numHeaders = randomIntBetween(1, 5);
final Set<String> uniqueNames = new HashSet<>(numHeaders);
final Set<String> uniqueNames = new HashSet<>();
if (randomBoolean()) {
headers = generateHeaders("Header", "Header-array", numHeaders);
headers = RestClientTestUtil.randomHeaders(getRandom(), "Header");
for (Header header : headers) {
request.addHeader(header);
uniqueNames.add(header.getName());
@ -455,16 +466,25 @@ public class RestClientSingleHostTests extends RestClientTestCase {
}
private Response performRequest(String method, String endpoint, Header... headers) throws IOException {
switch(randomIntBetween(0, 2)) {
return performRequest(method, endpoint, Collections.<String, String>emptyMap(), headers);
}
private Response performRequest(String method, String endpoint, Map<String, String> params, Header... headers) throws IOException {
int methodSelector;
if (params.isEmpty()) {
methodSelector = randomIntBetween(0, 2);
} else {
methodSelector = randomIntBetween(1, 2);
}
switch(methodSelector) {
case 0:
return restClient.performRequest(method, endpoint, headers);
case 1:
return restClient.performRequest(method, endpoint, Collections.<String, String>emptyMap(), headers);
return restClient.performRequest(method, endpoint, params, headers);
case 2:
return restClient.performRequest(method, endpoint, Collections.<String, String>emptyMap(), (HttpEntity)null, headers);
return restClient.performRequest(method, endpoint, params, (HttpEntity)null, headers);
default:
throw new UnsupportedOperationException();
}
}
}

View File

@ -43,6 +43,7 @@ dependencies {
testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}"
testCompile "org.elasticsearch:securemock:${versions.securemock}"
testCompile "org.elasticsearch:mocksocket:${versions.mocksocket}"
testCompile "org.codehaus.mojo:animal-sniffer-annotations:1.15"
signature "org.codehaus.mojo.signature:java17:1.0@signature"
}

View File

@ -1 +0,0 @@
fd13b1c033741d48291315c6370f7d475a42dccf

View File

@ -0,0 +1 @@
2ef7b1cc34de149600f5e75bc2d5bf40de894e60

View File

@ -35,6 +35,7 @@ import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientTestCase;
import org.elasticsearch.mocksocket.MockHttpServer;
import org.junit.After;
import org.junit.Before;
@ -141,7 +142,7 @@ public class ElasticsearchHostsSnifferTests extends RestClientTestCase {
}
private static HttpServer createHttpServer(final SniffResponse sniffResponse, final int sniffTimeoutMillis) throws IOException {
HttpServer httpServer = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0);
HttpServer httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0);
httpServer.createContext("/_nodes/http", new ResponseHandler(sniffTimeoutMillis, sniffResponse));
return httpServer;
}

View File

@ -30,16 +30,19 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
import org.apache.http.Header;
import org.apache.http.message.BasicHeader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
@TestMethodProviders({
JUnit3MethodProvider.class
})
@ -53,70 +56,56 @@ import java.util.Set;
public abstract class RestClientTestCase extends RandomizedTest {
/**
* Create the specified number of {@link Header}s.
* <p>
* Generated header names will be the {@code baseName} plus its index or, rarely, the {@code arrayName} if it's supplied.
* Assert that the actual headers are the expected ones given the original default and request headers. Some headers can be ignored,
* for instance in case the http client is adding its own automatically.
*
* @param baseName The base name to use for all headers.
* @param arrayName The optional ({@code null}able) array name to use randomly.
* @param headers The number of headers to create.
* @return Never {@code null}.
* @param defaultHeaders the default headers set to the REST client instance
* @param requestHeaders the request headers sent with a particular request
* @param actualHeaders the actual headers as a result of the provided default and request headers
* @param ignoreHeaders header keys to be ignored as they are not part of default nor request headers, yet they
* will be part of the actual ones
*/
protected static Header[] generateHeaders(final String baseName, final String arrayName, final int headers) {
final Header[] generated = new Header[headers];
for (int i = 0; i < headers; i++) {
String headerName = baseName + i;
if (arrayName != null && rarely()) {
headerName = arrayName;
}
generated[i] = new BasicHeader(headerName, randomAsciiOfLengthBetween(3, 10));
protected static void assertHeaders(final Header[] defaultHeaders, final Header[] requestHeaders,
final Header[] actualHeaders, final Set<String> ignoreHeaders) {
final Map<String, List<String>> expectedHeaders = new HashMap<>();
final Set<String> requestHeaderKeys = new HashSet<>();
for (final Header header : requestHeaders) {
final String name = header.getName();
addValueToListEntry(expectedHeaders, name, header.getValue());
requestHeaderKeys.add(name);
}
return generated;
for (final Header defaultHeader : defaultHeaders) {
final String name = defaultHeader.getName();
if (requestHeaderKeys.contains(name) == false) {
addValueToListEntry(expectedHeaders, name, defaultHeader.getValue());
}
}
Set<String> actualIgnoredHeaders = new HashSet<>();
for (Header responseHeader : actualHeaders) {
final String name = responseHeader.getName();
if (ignoreHeaders.contains(name)) {
expectedHeaders.remove(name);
actualIgnoredHeaders.add(name);
continue;
}
final String value = responseHeader.getValue();
final List<String> values = expectedHeaders.get(name);
assertNotNull("found response header [" + name + "] that wasn't originally sent: " + value, values);
assertTrue("found incorrect response header [" + name + "]: " + value, values.remove(value));
if (values.isEmpty()) {
expectedHeaders.remove(name);
}
}
assertEquals("some headers meant to be ignored were not part of the actual headers", ignoreHeaders, actualIgnoredHeaders);
assertTrue("some headers that were sent weren't returned " + expectedHeaders, expectedHeaders.isEmpty());
}
/**
* Create a new {@link List} within the {@code map} if none exists for {@code name} or append to the existing list.
*
* @param map The map to manipulate.
* @param name The name to create/append the list for.
* @param value The value to add.
*/
private static void createOrAppendList(final Map<String, List<String>> map, final String name, final String value) {
private static void addValueToListEntry(final Map<String, List<String>> map, final String name, final String value) {
List<String> values = map.get(name);
if (values == null) {
values = new ArrayList<>();
map.put(name, values);
}
values.add(value);
}
/**
* Add the {@code headers} to the {@code map} so that related tests can more easily assert that they exist.
* <p>
* If both the {@code defaultHeaders} and {@code headers} contain the same {@link Header}, based on its
* {@linkplain Header#getName() name}, then this will only use the {@code Header}(s) from {@code headers}.
*
* @param map The map to build with name/value(s) pairs.
* @param defaultHeaders The headers to add to the map representing default headers.
* @param headers The headers to add to the map representing request-level headers.
* @see #createOrAppendList(Map, String, String)
*/
protected static void addHeaders(final Map<String, List<String>> map, final Header[] defaultHeaders, final Header[] headers) {
final Set<String> uniqueHeaders = new HashSet<>();
for (final Header header : headers) {
final String name = header.getName();
createOrAppendList(map, name, header.getValue());
uniqueHeaders.add(name);
}
for (final Header defaultHeader : defaultHeaders) {
final String name = defaultHeader.getName();
if (uniqueHeaders.contains(name) == false) {
createOrAppendList(map, name, defaultHeader.getValue());
}
}
}
}

View File

@ -19,7 +19,11 @@
package org.elasticsearch.client;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import org.apache.http.Header;
import org.apache.http.message.BasicHeader;
import java.util.ArrayList;
import java.util.Arrays;
@ -55,7 +59,7 @@ final class RestClientTestUtil {
}
static int randomStatusCode(Random random) {
return RandomPicks.randomFrom(random, ALL_ERROR_STATUS_CODES);
return RandomPicks.randomFrom(random, ALL_STATUS_CODES);
}
static int randomOkStatusCode(Random random) {
@ -81,4 +85,23 @@ final class RestClientTestUtil {
static List<Integer> getAllStatusCodes() {
return ALL_STATUS_CODES;
}
/**
* Create a random number of {@link Header}s.
* Generated header names will either be the {@code baseName} plus its index, or exactly the provided {@code baseName} so that the
* we test also support for multiple headers with same key and different values.
*/
static Header[] randomHeaders(Random random, final String baseName) {
int numHeaders = RandomNumbers.randomIntBetween(random, 0, 5);
final Header[] headers = new Header[numHeaders];
for (int i = 0; i < numHeaders; i++) {
String headerName = baseName;
//randomly exercise the code path that supports multiple headers with same key
if (random.nextBoolean()) {
headerName = headerName + i;
}
headers[i] = new BasicHeader(headerName, RandomStrings.randomAsciiOfLengthBetween(random, 3, 10));
}
return headers;
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.transport.client;
import io.netty.util.ThreadDeathWatcher;
import io.netty.util.concurrent.GlobalEventExecutor;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.reindex.ReindexPlugin;
@ -36,41 +37,70 @@ import java.util.Collections;
import java.util.concurrent.TimeUnit;
/**
* A builder to create an instance of {@link TransportClient}
* This class pre-installs the
* A builder to create an instance of {@link TransportClient}. This class pre-installs the
* {@link Netty4Plugin},
* {@link ReindexPlugin},
* {@link PercolatorPlugin},
* and {@link MustachePlugin}
* for the client. These plugins are all elasticsearch core modules required.
* plugins for the client. These plugins are all the required modules for Elasticsearch.
*/
@SuppressWarnings({"unchecked","varargs"})
public class PreBuiltTransportClient extends TransportClient {
private static final Collection<Class<? extends Plugin>> PRE_INSTALLED_PLUGINS =
Collections.unmodifiableList(
Arrays.asList(
Netty4Plugin.class,
ReindexPlugin.class,
PercolatorPlugin.class,
MustachePlugin.class));
static {
// initialize Netty system properties before triggering any Netty class loads
initializeNetty();
}
/**
* Netty wants to do some unsafe things like use unsafe and replace a private field. This method disables these things by default, but
* can be overridden by setting the corresponding system properties.
*/
@SuppressForbidden(reason = "set system properties to configure Netty")
private static void initializeNetty() {
final String noUnsafeKey = "io.netty.noUnsafe";
final String noUnsafe = System.getProperty(noUnsafeKey);
if (noUnsafe == null) {
// disable Netty from using unsafe
// while permissions are needed to set this, if a security exception is thrown the permission needed can either be granted or
// the system property can be set directly before starting the JVM; therefore, we do not catch a security exception here
System.setProperty(noUnsafeKey, Boolean.toString(true));
}
final String noKeySetOptimizationKey = "io.netty.noKeySetOptimization";
final String noKeySetOptimization = System.getProperty(noKeySetOptimizationKey);
if (noKeySetOptimization == null) {
// disable Netty from replacing the selector key set
// while permissions are needed to set this, if a security exception is thrown the permission needed can either be granted or
// the system property can be set directly before starting the JVM; therefore, we do not catch a security exception here
System.setProperty(noKeySetOptimizationKey, Boolean.toString(true));
}
}
private static final Collection<Class<? extends Plugin>> PRE_INSTALLED_PLUGINS =
Collections.unmodifiableList(
Arrays.asList(
Netty4Plugin.class,
ReindexPlugin.class,
PercolatorPlugin.class,
MustachePlugin.class));
/**
* Creates a new transport client with pre-installed plugins.
*
* @param settings the settings passed to this transport client
* @param plugins an optional array of additional plugins to run with this client
* @param plugins an optional array of additional plugins to run with this client
*/
@SafeVarargs
public PreBuiltTransportClient(Settings settings, Class<? extends Plugin>... plugins) {
this(settings, Arrays.asList(plugins));
}
/**
* Creates a new transport client with pre-installed plugins.
*
* @param settings the settings passed to this transport client
* @param plugins a collection of additional plugins to run with this client
* @param plugins a collection of additional plugins to run with this client
*/
public PreBuiltTransportClient(Settings settings, Collection<Class<? extends Plugin>> plugins) {
this(settings, plugins, null);
@ -78,12 +108,15 @@ public class PreBuiltTransportClient extends TransportClient {
/**
* Creates a new transport client with pre-installed plugins.
* @param settings the settings passed to this transport client
* @param plugins a collection of additional plugins to run with this client
* @param hostFailureListener a failure listener that is invoked if a node is disconnected. This can be <code>null</code>
*
* @param settings the settings passed to this transport client
* @param plugins a collection of additional plugins to run with this client
* @param hostFailureListener a failure listener that is invoked if a node is disconnected; this can be <code>null</code>
*/
public PreBuiltTransportClient(Settings settings, Collection<Class<? extends Plugin>> plugins,
HostFailureListener hostFailureListener) {
public PreBuiltTransportClient(
Settings settings,
Collection<Class<? extends Plugin>> plugins,
HostFailureListener hostFailureListener) {
super(settings, Settings.EMPTY, addPlugins(plugins, PRE_INSTALLED_PLUGINS), hostFailureListener);
}

View File

@ -41,8 +41,6 @@ public class PreBuiltTransportClientTests extends RandomizedTest {
@Test
public void testPluginInstalled() {
// TODO: remove when Netty 4.1.5 is upgraded to Netty 4.1.6 including https://github.com/netty/netty/pull/5778
assumeFalse(Constants.JRE_IS_MINIMUM_JAVA9);
try (TransportClient client = new PreBuiltTransportClient(Settings.EMPTY)) {
Settings settings = client.settings();
assertEquals(Netty4Plugin.NETTY_TRANSPORT_NAME, NetworkModule.HTTP_DEFAULT_TYPE_SETTING.get(settings));

View File

@ -94,6 +94,8 @@ dependencies {
exclude group: 'org.elasticsearch', module: 'elasticsearch'
}
}
testCompile 'com.google.jimfs:jimfs:1.1'
testCompile 'com.google.guava:guava:18.0'
}
if (isEclipse) {

View File

@ -1 +0,0 @@
fd13b1c033741d48291315c6370f7d475a42dccf

View File

@ -0,0 +1 @@
2ef7b1cc34de149600f5e75bc2d5bf40de894e60

View File

@ -1 +0,0 @@
3a6fb7e75c9972559a78cf5cfc5a48a41a13ea40

View File

@ -0,0 +1 @@
b88721371cfa2d7242bb5e52fe70861aa061c050

View File

@ -1 +0,0 @@
005b73867bc12224946fc67fc8d49d9f5e698d7f

View File

@ -0,0 +1 @@
71590ad45cee21249774e2f93e5eca66e446cef3

View File

@ -1 +0,0 @@
eb63166c723b0b4b9fb5298fca232a2f6612ec34

View File

@ -0,0 +1 @@
8bd44d50f9a6cdff9c7578ea39d524eb519e35ab

View File

@ -1 +0,0 @@
770114e0188dd8b4f30e5878b4f6c8677cecf1be

View File

@ -0,0 +1 @@
e0feb9281a7da7a7df62398ab0fc655d51f68fed

View File

@ -1 +0,0 @@
f4eb0257e8419beaa9f84da6a51375fda4e491f2

View File

@ -0,0 +1 @@
14698ecbca1437615ee31d412d0edd3440b4fccf

View File

@ -1 +0,0 @@
c80ad16cd36c41012abb8a8bb1c7328c6d680b4a

View File

@ -0,0 +1 @@
09dd516b847dcaf8da4e9096bf3189b0b3607aef

View File

@ -1 +0,0 @@
070d4e370f4fe0b8a04b2bce5b4381201b0c783f

View File

@ -0,0 +1 @@
68a8f986a0076ad784cbb20813b9465b94e4c846

View File

@ -1 +0,0 @@
131d9a86f5943675493a85def0e692842f396458

View File

@ -0,0 +1 @@
6d921c1242b608a4dcd0784e32bcd94097ad93cd

View File

@ -1 +0,0 @@
385b2202036b50a764e4d2b032e21496b74a1c8e

View File

@ -0,0 +1 @@
74d3cdf1bc863e3836b06f1865c970127cc15f26

View File

@ -1 +0,0 @@
e8742a44ef4849a17d5e59ef36e9a52a8f2370c2

View File

@ -0,0 +1 @@
dd13729c0b401e3df11bce0c343d1e00f07b9a19

View File

@ -1 +0,0 @@
7ce2e4948fb66393a34f4200a6131cfde43e47bd

View File

@ -0,0 +1 @@
ce27abe3490bb8ccbebd2eefcb68f42a609ca986

View File

@ -1 +0,0 @@
6c1c385a597ce797b0049d9b2281b09593e1488a

View File

@ -0,0 +1 @@
bd1978e3fdac2fadf1068828b0b1b534a56873c3

View File

@ -1 +0,0 @@
fafaa22906c067e6894f9f2b18ad03ded98e2f38

View File

@ -0,0 +1 @@
fb8fe41948fccf13b5dbb5d50441cac974544ade

View File

@ -1 +0,0 @@
19c64a84617f42bb4c11b1e266df4009cd37fdd0

View File

@ -0,0 +1 @@
e7f7d1ad298c4af264199d9199f34f2e4d9ca2b5

View File

@ -1 +0,0 @@
bc8613fb61c0ae95dd3680b0f65e3380c3fd0d6c

View File

@ -0,0 +1 @@
5d4b3ce4df83d0509e0b5f7eecda72af458ba225

View File

@ -1 +0,0 @@
0fa2c3e722294e863f3c70a15e97a18397391fb4

View File

@ -0,0 +1 @@
3b486b51d3aede074ab6de890b427379d40c0438

View File

@ -1 +0,0 @@
db74c6313965ffdd10d9b19be2eed4ae2c76d2e3

View File

@ -0,0 +1 @@
344097014aeaaa0f94a217f3697e14ceee06581f

View File

@ -1 +0,0 @@
b85ae1121b5fd56df985615a3cdd7b3879e9b92d

View File

@ -0,0 +1 @@
6c4706b86718f2653120e0dbfd24e03248dd2ea7

View File

@ -1,291 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.analysis.synonym;
import static org.apache.lucene.util.automaton.Operations.DEFAULT_MAX_DETERMINIZED_STATES;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.BytesTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IntsRef;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.FiniteStringsIterator;
import org.apache.lucene.util.automaton.Operations;
import org.apache.lucene.util.automaton.Transition;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Creates a list of {@link TokenStream} where each stream is the tokens that make up a finite string in graph token stream. To do this,
* the graph token stream is converted to an {@link Automaton} and from there we use a {@link FiniteStringsIterator} to collect the various
* token streams for each finite string.
*/
public class GraphTokenStreamFiniteStrings {
private final Automaton.Builder builder;
Automaton det;
private final Map<BytesRef, Integer> termToID = new HashMap<>();
private final Map<Integer, BytesRef> idToTerm = new HashMap<>();
private int anyTermID = -1;
public GraphTokenStreamFiniteStrings() {
this.builder = new Automaton.Builder();
}
private static class BytesRefArrayTokenStream extends TokenStream {
private final BytesTermAttribute termAtt = addAttribute(BytesTermAttribute.class);
private final BytesRef[] terms;
private int offset;
BytesRefArrayTokenStream(BytesRef[] terms) {
this.terms = terms;
offset = 0;
}
@Override
public boolean incrementToken() throws IOException {
if (offset < terms.length) {
clearAttributes();
termAtt.setBytesRef(terms[offset]);
offset = offset + 1;
return true;
}
return false;
}
}
/**
* Gets
*/
public List<TokenStream> getTokenStreams(final TokenStream in) throws IOException {
// build automation
build(in);
List<TokenStream> tokenStreams = new ArrayList<>();
final FiniteStringsIterator finiteStrings = new FiniteStringsIterator(det);
for (IntsRef string; (string = finiteStrings.next()) != null; ) {
final BytesRef[] tokens = new BytesRef[string.length];
for (int idx = string.offset, len = string.offset + string.length; idx < len; idx++) {
tokens[idx - string.offset] = idToTerm.get(string.ints[idx]);
}
tokenStreams.add(new BytesRefArrayTokenStream(tokens));
}
return tokenStreams;
}
private void build(final TokenStream in) throws IOException {
if (det != null) {
throw new IllegalStateException("Automation already built");
}
final TermToBytesRefAttribute termBytesAtt = in.addAttribute(TermToBytesRefAttribute.class);
final PositionIncrementAttribute posIncAtt = in.addAttribute(PositionIncrementAttribute.class);
final PositionLengthAttribute posLengthAtt = in.addAttribute(PositionLengthAttribute.class);
final OffsetAttribute offsetAtt = in.addAttribute(OffsetAttribute.class);
in.reset();
int pos = -1;
int lastPos = 0;
int maxOffset = 0;
int maxPos = -1;
int state = -1;
while (in.incrementToken()) {
int posInc = posIncAtt.getPositionIncrement();
assert pos > -1 || posInc > 0;
if (posInc > 1) {
throw new IllegalArgumentException("cannot handle holes; to accept any term, use '*' term");
}
if (posInc > 0) {
// New node:
pos += posInc;
}
int endPos = pos + posLengthAtt.getPositionLength();
while (state < endPos) {
state = createState();
}
BytesRef term = termBytesAtt.getBytesRef();
//System.out.println(pos + "-" + endPos + ": " + term.utf8ToString() + ": posInc=" + posInc);
if (term.length == 1 && term.bytes[term.offset] == (byte) '*') {
addAnyTransition(pos, endPos);
} else {
addTransition(pos, endPos, term);
}
maxOffset = Math.max(maxOffset, offsetAtt.endOffset());
maxPos = Math.max(maxPos, endPos);
}
in.end();
// TODO: look at endOffset? ts2a did...
// TODO: this (setting "last" state as the only accept state) may be too simplistic?
setAccept(state, true);
finish();
}
/**
* Returns a new state; state 0 is always the initial state.
*/
private int createState() {
return builder.createState();
}
/**
* Marks the specified state as accept or not.
*/
private void setAccept(int state, boolean accept) {
builder.setAccept(state, accept);
}
/**
* Adds a transition to the automaton.
*/
private void addTransition(int source, int dest, String term) {
addTransition(source, dest, new BytesRef(term));
}
/**
* Adds a transition to the automaton.
*/
private void addTransition(int source, int dest, BytesRef term) {
if (term == null) {
throw new NullPointerException("term should not be null");
}
builder.addTransition(source, dest, getTermID(term));
}
/**
* Adds a transition matching any term.
*/
private void addAnyTransition(int source, int dest) {
builder.addTransition(source, dest, getTermID(null));
}
/**
* Call this once you are done adding states/transitions.
*/
private void finish() {
finish(DEFAULT_MAX_DETERMINIZED_STATES);
}
/**
* Call this once you are done adding states/transitions.
*
* @param maxDeterminizedStates Maximum number of states created when determinizing the automaton. Higher numbers allow this operation
* to consume more memory but allow more complex automatons.
*/
private void finish(int maxDeterminizedStates) {
Automaton automaton = builder.finish();
// System.out.println("before det:\n" + automaton.toDot());
Transition t = new Transition();
// TODO: should we add "eps back to initial node" for all states,
// and det that? then we don't need to revisit initial node at
// every position? but automaton could blow up? And, this makes it
// harder to skip useless positions at search time?
if (anyTermID != -1) {
// Make sure there are no leading or trailing ANY:
int count = automaton.initTransition(0, t);
for (int i = 0; i < count; i++) {
automaton.getNextTransition(t);
if (anyTermID >= t.min && anyTermID <= t.max) {
throw new IllegalStateException("automaton cannot lead with an ANY transition");
}
}
int numStates = automaton.getNumStates();
for (int i = 0; i < numStates; i++) {
count = automaton.initTransition(i, t);
for (int j = 0; j < count; j++) {
automaton.getNextTransition(t);
if (automaton.isAccept(t.dest) && anyTermID >= t.min && anyTermID <= t.max) {
throw new IllegalStateException("automaton cannot end with an ANY transition");
}
}
}
int termCount = termToID.size();
// We have to carefully translate these transitions so automaton
// realizes they also match all other terms:
Automaton newAutomaton = new Automaton();
for (int i = 0; i < numStates; i++) {
newAutomaton.createState();
newAutomaton.setAccept(i, automaton.isAccept(i));
}
for (int i = 0; i < numStates; i++) {
count = automaton.initTransition(i, t);
for (int j = 0; j < count; j++) {
automaton.getNextTransition(t);
int min, max;
if (t.min <= anyTermID && anyTermID <= t.max) {
// Match any term
min = 0;
max = termCount - 1;
} else {
min = t.min;
max = t.max;
}
newAutomaton.addTransition(t.source, t.dest, min, max);
}
}
newAutomaton.finishState();
automaton = newAutomaton;
}
det = Operations.removeDeadStates(Operations.determinize(automaton, maxDeterminizedStates));
}
private int getTermID(BytesRef term) {
Integer id = termToID.get(term);
if (id == null) {
id = termToID.size();
if (term != null) {
term = BytesRef.deepCopyOf(term);
}
termToID.put(term, id);
idToTerm.put(id, term);
if (term == null) {
anyTermID = id;
}
}
return id;
}
}

View File

@ -1,588 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.analysis.synonym;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.FlagsAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.RollingBuffer;
import org.apache.lucene.util.fst.FST;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
// TODO: maybe we should resolve token -> wordID then run
// FST on wordIDs, for better perf?
// TODO: a more efficient approach would be Aho/Corasick's
// algorithm
// http://en.wikipedia.org/wiki/Aho%E2%80%93Corasick_string_matching_algorithm
// It improves over the current approach here
// because it does not fully re-start matching at every
// token. For example if one pattern is "a b c x"
// and another is "b c d" and the input is "a b c d", on
// trying to parse "a b c x" but failing when you got to x,
// rather than starting over again your really should
// immediately recognize that "b c d" matches at the next
// input. I suspect this won't matter that much in
// practice, but it's possible on some set of synonyms it
// will. We'd have to modify Aho/Corasick to enforce our
// conflict resolving (eg greedy matching) because that algo
// finds all matches. This really amounts to adding a .*
// closure to the FST and then determinizing it.
//
// Another possible solution is described at http://www.cis.uni-muenchen.de/people/Schulz/Pub/dictle5.ps
/**
* Applies single- or multi-token synonyms from a {@link SynonymMap}
* to an incoming {@link TokenStream}, producing a fully correct graph
* output. This is a replacement for {@link SynonymFilter}, which produces
* incorrect graphs for multi-token synonyms.
*
* <b>NOTE</b>: this cannot consume an incoming graph; results will
* be undefined.
*/
public final class SynonymGraphFilter extends TokenFilter {
public static final String TYPE_SYNONYM = "SYNONYM";
public static final int GRAPH_FLAG = 8;
private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class);
private final PositionIncrementAttribute posIncrAtt = addAttribute(PositionIncrementAttribute.class);
private final PositionLengthAttribute posLenAtt = addAttribute(PositionLengthAttribute.class);
private final FlagsAttribute flagsAtt = addAttribute(FlagsAttribute.class);
private final TypeAttribute typeAtt = addAttribute(TypeAttribute.class);
private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class);
private final SynonymMap synonyms;
private final boolean ignoreCase;
private final FST<BytesRef> fst;
private final FST.BytesReader fstReader;
private final FST.Arc<BytesRef> scratchArc;
private final ByteArrayDataInput bytesReader = new ByteArrayDataInput();
private final BytesRef scratchBytes = new BytesRef();
private final CharsRefBuilder scratchChars = new CharsRefBuilder();
private final LinkedList<BufferedOutputToken> outputBuffer = new LinkedList<>();
private int nextNodeOut;
private int lastNodeOut;
private int maxLookaheadUsed;
// For testing:
private int captureCount;
private boolean liveToken;
// Start/end offset of the current match:
private int matchStartOffset;
private int matchEndOffset;
// True once the input TokenStream is exhausted:
private boolean finished;
private int lookaheadNextRead;
private int lookaheadNextWrite;
private RollingBuffer<BufferedInputToken> lookahead = new RollingBuffer<BufferedInputToken>() {
@Override
protected BufferedInputToken newInstance() {
return new BufferedInputToken();
}
};
static class BufferedInputToken implements RollingBuffer.Resettable {
final CharsRefBuilder term = new CharsRefBuilder();
AttributeSource.State state;
int startOffset = -1;
int endOffset = -1;
@Override
public void reset() {
state = null;
term.clear();
// Intentionally invalid to ferret out bugs:
startOffset = -1;
endOffset = -1;
}
}
static class BufferedOutputToken {
final String term;
// Non-null if this was an incoming token:
final State state;
final int startNode;
final int endNode;
public BufferedOutputToken(State state, String term, int startNode, int endNode) {
this.state = state;
this.term = term;
this.startNode = startNode;
this.endNode = endNode;
}
}
public SynonymGraphFilter(TokenStream input, SynonymMap synonyms, boolean ignoreCase) {
super(input);
this.synonyms = synonyms;
this.fst = synonyms.fst;
if (fst == null) {
throw new IllegalArgumentException("fst must be non-null");
}
this.fstReader = fst.getBytesReader();
scratchArc = new FST.Arc<>();
this.ignoreCase = ignoreCase;
}
@Override
public boolean incrementToken() throws IOException {
//System.out.println("\nS: incrToken lastNodeOut=" + lastNodeOut + " nextNodeOut=" + nextNodeOut);
assert lastNodeOut <= nextNodeOut;
if (outputBuffer.isEmpty() == false) {
// We still have pending outputs from a prior synonym match:
releaseBufferedToken();
//System.out.println(" syn: ret buffered=" + this);
assert liveToken == false;
return true;
}
// Try to parse a new synonym match at the current token:
if (parse()) {
// A new match was found:
releaseBufferedToken();
//System.out.println(" syn: after parse, ret buffered=" + this);
assert liveToken == false;
return true;
}
if (lookaheadNextRead == lookaheadNextWrite) {
// Fast path: parse pulled one token, but it didn't match
// the start for any synonym, so we now return it "live" w/o having
// cloned all of its atts:
if (finished) {
//System.out.println(" syn: ret END");
return false;
}
assert liveToken;
liveToken = false;
// NOTE: no need to change posInc since it's relative, i.e. whatever
// node our output is upto will just increase by the incoming posInc.
// We also don't need to change posLen, but only because we cannot
// consume a graph, so the incoming token can never span a future
// synonym match.
} else {
// We still have buffered lookahead tokens from a previous
// parse attempt that required lookahead; just replay them now:
//System.out.println(" restore buffer");
assert lookaheadNextRead < lookaheadNextWrite : "read=" + lookaheadNextRead + " write=" + lookaheadNextWrite;
BufferedInputToken token = lookahead.get(lookaheadNextRead);
lookaheadNextRead++;
restoreState(token.state);
lookahead.freeBefore(lookaheadNextRead);
//System.out.println(" after restore offset=" + offsetAtt.startOffset() + "-" + offsetAtt.endOffset());
assert liveToken == false;
}
lastNodeOut += posIncrAtt.getPositionIncrement();
nextNodeOut = lastNodeOut + posLenAtt.getPositionLength();
//System.out.println(" syn: ret lookahead=" + this);
return true;
}
private void releaseBufferedToken() throws IOException {
//System.out.println(" releaseBufferedToken");
BufferedOutputToken token = outputBuffer.pollFirst();
if (token.state != null) {
// This is an original input token (keepOrig=true case):
//System.out.println(" hasState");
restoreState(token.state);
//System.out.println(" startOffset=" + offsetAtt.startOffset() + " endOffset=" + offsetAtt.endOffset());
} else {
clearAttributes();
//System.out.println(" no state");
termAtt.append(token.term);
// We better have a match already:
assert matchStartOffset != -1;
offsetAtt.setOffset(matchStartOffset, matchEndOffset);
//System.out.println(" startOffset=" + matchStartOffset + " endOffset=" + matchEndOffset);
typeAtt.setType(TYPE_SYNONYM);
}
//System.out.println(" lastNodeOut=" + lastNodeOut);
//System.out.println(" term=" + termAtt);
posIncrAtt.setPositionIncrement(token.startNode - lastNodeOut);
lastNodeOut = token.startNode;
posLenAtt.setPositionLength(token.endNode - token.startNode);
flagsAtt.setFlags(flagsAtt.getFlags() | GRAPH_FLAG); // set the graph flag
}
/**
* Scans the next input token(s) to see if a synonym matches. Returns true
* if a match was found.
*/
private boolean parse() throws IOException {
// System.out.println(Thread.currentThread().getName() + ": S: parse: " + System.identityHashCode(this));
// Holds the longest match we've seen so far:
BytesRef matchOutput = null;
int matchInputLength = 0;
BytesRef pendingOutput = fst.outputs.getNoOutput();
fst.getFirstArc(scratchArc);
assert scratchArc.output == fst.outputs.getNoOutput();
// How many tokens in the current match
int matchLength = 0;
boolean doFinalCapture = false;
int lookaheadUpto = lookaheadNextRead;
matchStartOffset = -1;
byToken:
while (true) {
//System.out.println(" cycle lookaheadUpto=" + lookaheadUpto + " maxPos=" + lookahead.getMaxPos());
// Pull next token's chars:
final char[] buffer;
final int bufferLen;
final int inputEndOffset;
if (lookaheadUpto <= lookahead.getMaxPos()) {
// Still in our lookahead buffer
BufferedInputToken token = lookahead.get(lookaheadUpto);
lookaheadUpto++;
buffer = token.term.chars();
bufferLen = token.term.length();
inputEndOffset = token.endOffset;
//System.out.println(" use buffer now max=" + lookahead.getMaxPos());
if (matchStartOffset == -1) {
matchStartOffset = token.startOffset;
}
} else {
// We used up our lookahead buffer of input tokens
// -- pull next real input token:
assert finished || liveToken == false;
if (finished) {
//System.out.println(" break: finished");
break;
} else if (input.incrementToken()) {
//System.out.println(" input.incrToken");
liveToken = true;
buffer = termAtt.buffer();
bufferLen = termAtt.length();
if (matchStartOffset == -1) {
matchStartOffset = offsetAtt.startOffset();
}
inputEndOffset = offsetAtt.endOffset();
lookaheadUpto++;
} else {
// No more input tokens
finished = true;
//System.out.println(" break: now set finished");
break;
}
}
matchLength++;
//System.out.println(" cycle term=" + new String(buffer, 0, bufferLen));
// Run each char in this token through the FST:
int bufUpto = 0;
while (bufUpto < bufferLen) {
final int codePoint = Character.codePointAt(buffer, bufUpto, bufferLen);
if (fst.findTargetArc(ignoreCase ? Character.toLowerCase(codePoint) : codePoint, scratchArc, scratchArc, fstReader) ==
null) {
break byToken;
}
// Accum the output
pendingOutput = fst.outputs.add(pendingOutput, scratchArc.output);
bufUpto += Character.charCount(codePoint);
}
assert bufUpto == bufferLen;
// OK, entire token matched; now see if this is a final
// state in the FST (a match):
if (scratchArc.isFinal()) {
matchOutput = fst.outputs.add(pendingOutput, scratchArc.nextFinalOutput);
matchInputLength = matchLength;
matchEndOffset = inputEndOffset;
//System.out.println(" ** match");
}
// See if the FST can continue matching (ie, needs to
// see the next input token):
if (fst.findTargetArc(SynonymMap.WORD_SEPARATOR, scratchArc, scratchArc, fstReader) == null) {
// No further rules can match here; we're done
// searching for matching rules starting at the
// current input position.
break;
} else {
// More matching is possible -- accum the output (if
// any) of the WORD_SEP arc:
pendingOutput = fst.outputs.add(pendingOutput, scratchArc.output);
doFinalCapture = true;
if (liveToken) {
capture();
}
}
}
if (doFinalCapture && liveToken && finished == false) {
// Must capture the final token if we captured any prior tokens:
capture();
}
if (matchOutput != null) {
if (liveToken) {
// Single input token synonym; we must buffer it now:
capture();
}
// There is a match!
bufferOutputTokens(matchOutput, matchInputLength);
lookaheadNextRead += matchInputLength;
//System.out.println(" precmatch; set lookaheadNextRead=" + lookaheadNextRead + " now max=" + lookahead.getMaxPos());
lookahead.freeBefore(lookaheadNextRead);
//System.out.println(" match; set lookaheadNextRead=" + lookaheadNextRead + " now max=" + lookahead.getMaxPos());
return true;
} else {
//System.out.println(" no match; lookaheadNextRead=" + lookaheadNextRead);
return false;
}
//System.out.println(" parse done inputSkipCount=" + inputSkipCount + " nextRead=" + nextRead + " nextWrite=" + nextWrite);
}
/**
* Expands the output graph into the necessary tokens, adding
* synonyms as side paths parallel to the input tokens, and
* buffers them in the output token buffer.
*/
private void bufferOutputTokens(BytesRef bytes, int matchInputLength) {
bytesReader.reset(bytes.bytes, bytes.offset, bytes.length);
final int code = bytesReader.readVInt();
final boolean keepOrig = (code & 0x1) == 0;
//System.out.println(" buffer: keepOrig=" + keepOrig + " matchInputLength=" + matchInputLength);
// How many nodes along all paths; we need this to assign the
// node ID for the final end node where all paths merge back:
int totalPathNodes;
if (keepOrig) {
assert matchInputLength > 0;
totalPathNodes = matchInputLength - 1;
} else {
totalPathNodes = 0;
}
// How many synonyms we will insert over this match:
final int count = code >>> 1;
// TODO: we could encode this instead into the FST:
// 1st pass: count how many new nodes we need
List<List<String>> paths = new ArrayList<>();
for (int outputIDX = 0; outputIDX < count; outputIDX++) {
int wordID = bytesReader.readVInt();
synonyms.words.get(wordID, scratchBytes);
scratchChars.copyUTF8Bytes(scratchBytes);
int lastStart = 0;
List<String> path = new ArrayList<>();
paths.add(path);
int chEnd = scratchChars.length();
for (int chUpto = 0; chUpto <= chEnd; chUpto++) {
if (chUpto == chEnd || scratchChars.charAt(chUpto) == SynonymMap.WORD_SEPARATOR) {
path.add(new String(scratchChars.chars(), lastStart, chUpto - lastStart));
lastStart = 1 + chUpto;
}
}
assert path.size() > 0;
totalPathNodes += path.size() - 1;
}
//System.out.println(" totalPathNodes=" + totalPathNodes);
// 2nd pass: buffer tokens for the graph fragment
// NOTE: totalPathNodes will be 0 in the case where the matched
// input is a single token and all outputs are also a single token
// We "spawn" a side-path for each of the outputs for this matched
// synonym, all ending back at this end node:
int startNode = nextNodeOut;
int endNode = startNode + totalPathNodes + 1;
//System.out.println(" " + paths.size() + " new side-paths");
// First, fanout all tokens departing start node for these new side paths:
int newNodeCount = 0;
for (List<String> path : paths) {
int pathEndNode;
//System.out.println(" path size=" + path.size());
if (path.size() == 1) {
// Single token output, so there are no intermediate nodes:
pathEndNode = endNode;
} else {
pathEndNode = nextNodeOut + newNodeCount + 1;
newNodeCount += path.size() - 1;
}
outputBuffer.add(new BufferedOutputToken(null, path.get(0), startNode, pathEndNode));
}
// We must do the original tokens last, else the offsets "go backwards":
if (keepOrig) {
BufferedInputToken token = lookahead.get(lookaheadNextRead);
int inputEndNode;
if (matchInputLength == 1) {
// Single token matched input, so there are no intermediate nodes:
inputEndNode = endNode;
} else {
inputEndNode = nextNodeOut + newNodeCount + 1;
}
//System.out.println(" keepOrig first token: " + token.term);
outputBuffer.add(new BufferedOutputToken(token.state, token.term.toString(), startNode, inputEndNode));
}
nextNodeOut = endNode;
// Do full side-path for each syn output:
for (int pathID = 0; pathID < paths.size(); pathID++) {
List<String> path = paths.get(pathID);
if (path.size() > 1) {
int lastNode = outputBuffer.get(pathID).endNode;
for (int i = 1; i < path.size() - 1; i++) {
outputBuffer.add(new BufferedOutputToken(null, path.get(i), lastNode, lastNode + 1));
lastNode++;
}
outputBuffer.add(new BufferedOutputToken(null, path.get(path.size() - 1), lastNode, endNode));
}
}
if (keepOrig && matchInputLength > 1) {
// Do full "side path" with the original tokens:
int lastNode = outputBuffer.get(paths.size()).endNode;
for (int i = 1; i < matchInputLength - 1; i++) {
BufferedInputToken token = lookahead.get(lookaheadNextRead + i);
outputBuffer.add(new BufferedOutputToken(token.state, token.term.toString(), lastNode, lastNode + 1));
lastNode++;
}
BufferedInputToken token = lookahead.get(lookaheadNextRead + matchInputLength - 1);
outputBuffer.add(new BufferedOutputToken(token.state, token.term.toString(), lastNode, endNode));
}
/*
System.out.println(" after buffer: " + outputBuffer.size() + " tokens:");
for(BufferedOutputToken token : outputBuffer) {
System.out.println(" tok: " + token.term + " startNode=" + token.startNode + " endNode=" + token.endNode);
}
*/
}
/**
* Buffers the current input token into lookahead buffer.
*/
private void capture() {
assert liveToken;
liveToken = false;
BufferedInputToken token = lookahead.get(lookaheadNextWrite);
lookaheadNextWrite++;
token.state = captureState();
token.startOffset = offsetAtt.startOffset();
token.endOffset = offsetAtt.endOffset();
assert token.term.length() == 0;
token.term.append(termAtt);
captureCount++;
maxLookaheadUsed = Math.max(maxLookaheadUsed, lookahead.getBufferSize());
//System.out.println(" maxLookaheadUsed=" + maxLookaheadUsed);
}
@Override
public void reset() throws IOException {
super.reset();
lookahead.reset();
lookaheadNextWrite = 0;
lookaheadNextRead = 0;
captureCount = 0;
lastNodeOut = -1;
nextNodeOut = 0;
matchStartOffset = -1;
matchEndOffset = -1;
finished = false;
liveToken = false;
outputBuffer.clear();
maxLookaheadUsed = 0;
//System.out.println("S: reset");
}
// for testing
int getCaptureCount() {
return captureCount;
}
// for testing
int getMaxLookaheadUsed() {
return maxLookaheadUsed;
}
}

View File

@ -19,6 +19,9 @@
package org.apache.lucene.queryparser.classic;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
@ -30,6 +33,7 @@ import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.GraphQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.MultiPhraseQuery;
import org.apache.lucene.search.PhraseQuery;
@ -55,9 +59,6 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfNeeded;
/**
* A query parser that uses the {@link MapperService} in order to build smarter
* queries based on the mapping information.
@ -739,27 +740,48 @@ public class MapperQueryParser extends AnalyzingQueryParser {
private Query applySlop(Query q, int slop) {
if (q instanceof PhraseQuery) {
PhraseQuery pq = (PhraseQuery) q;
PhraseQuery.Builder builder = new PhraseQuery.Builder();
builder.setSlop(slop);
final Term[] terms = pq.getTerms();
final int[] positions = pq.getPositions();
for (int i = 0; i < terms.length; ++i) {
builder.add(terms[i], positions[i]);
}
pq = builder.build();
//make sure that the boost hasn't been set beforehand, otherwise we'd lose it
assert q instanceof BoostQuery == false;
return pq;
return addSlopToPhrase((PhraseQuery) q, slop);
} else if (q instanceof MultiPhraseQuery) {
MultiPhraseQuery.Builder builder = new MultiPhraseQuery.Builder((MultiPhraseQuery) q);
builder.setSlop(slop);
return builder.build();
} else if (q instanceof GraphQuery && ((GraphQuery) q).hasPhrase()) {
// we have a graph query that has at least one phrase sub-query
// re-build and set slop on all phrase queries
List<Query> oldQueries = ((GraphQuery) q).getQueries();
Query[] queries = new Query[oldQueries.size()];
for (int i = 0; i < queries.length; i++) {
Query oldQuery = oldQueries.get(i);
if (oldQuery instanceof PhraseQuery) {
queries[i] = addSlopToPhrase((PhraseQuery) oldQuery, slop);
} else {
queries[i] = oldQuery;
}
}
return new GraphQuery(queries);
} else {
return q;
}
}
/**
* Rebuild a phrase query with a slop value
*/
private PhraseQuery addSlopToPhrase(PhraseQuery query, int slop) {
PhraseQuery.Builder builder = new PhraseQuery.Builder();
builder.setSlop(slop);
final Term[] terms = query.getTerms();
final int[] positions = query.getPositions();
for (int i = 0; i < terms.length; ++i) {
builder.add(terms[i], positions[i]);
}
return builder.build();
}
private Collection<String> extractMultiFields(String field) {
Collection<String> fields;
if (field != null) {

Some files were not shown because too many files have changed in this diff Show More