Merge branch 'master' into feature/seq_no
This commit is contained in:
commit
ba80752e34
|
@ -461,3 +461,15 @@ supports a remote debugging option:
|
|||
---------------------------------------------------------------------------
|
||||
gradle run --debug-jvm
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
== Building with extra plugins
|
||||
Additional plugins may be built alongside elasticsearch, where their
|
||||
dependency on elasticsearch will be substituted with the local elasticsearch
|
||||
build. To add your plugin, check it out into the extra-plugins directory.
|
||||
The build will automatically pick it up. You can verify the plugin is
|
||||
included as part of the build by checking the projects of the build.
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
gradle projects
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
|
|
85
build.gradle
85
build.gradle
|
@ -72,12 +72,6 @@ subprojects {
|
|||
}
|
||||
}
|
||||
|
||||
if (hasProperty('projectsPrefix') == false) {
|
||||
allprojects {
|
||||
project.ext['projectsPrefix'] = ''
|
||||
}
|
||||
}
|
||||
|
||||
allprojects {
|
||||
// injecting groovy property variables into all projects
|
||||
project.ext {
|
||||
|
@ -107,15 +101,57 @@ subprojects {
|
|||
}
|
||||
}
|
||||
|
||||
configurations {
|
||||
all {
|
||||
resolutionStrategy {
|
||||
dependencySubstitution {
|
||||
substitute module("org.elasticsearch:rest-api-spec:${version}") with project("${projectsPrefix}:rest-api-spec")
|
||||
substitute module("org.elasticsearch:elasticsearch:${version}") with project("${projectsPrefix}:core")
|
||||
substitute module("org.elasticsearch:test-framework:${version}") with project("${projectsPrefix}:test-framework")
|
||||
substitute module("org.elasticsearch.distribution.zip:elasticsearch:${version}") with project("${projectsPrefix}:distribution:zip")
|
||||
substitute module("org.elasticsearch.distribution.tar:elasticsearch:${version}") with project("${projectsPrefix}:distribution:tar")
|
||||
ext.projectSubstitutions = [
|
||||
"org.elasticsearch:rest-api-spec:${version}": ':rest-api-spec',
|
||||
"org.elasticsearch:elasticsearch:${version}": ':core',
|
||||
"org.elasticsearch:test-framework:${version}": ':test-framework',
|
||||
"org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:zip',
|
||||
"org.elasticsearch.distribution.tar:elasticsearch:${version}": ':distribution:tar'
|
||||
]
|
||||
configurations.all {
|
||||
resolutionStrategy.dependencySubstitution { DependencySubstitutions subs ->
|
||||
projectSubstitutions.each { k,v ->
|
||||
subs.substitute(subs.module(k)).with(subs.project(v))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure similar tasks in dependent projects run first. The projectsEvaluated here is
|
||||
// important because, while dependencies.all will pickup future dependencies,
|
||||
// it is not necessarily true that the task exists in both projects at the time
|
||||
// the dependency is added.
|
||||
gradle.projectsEvaluated {
|
||||
allprojects {
|
||||
if (project.path == ':test-framework') {
|
||||
// :test-framework:test cannot run before and after :core:test
|
||||
return
|
||||
}
|
||||
configurations.all {
|
||||
dependencies.all { Dependency dep ->
|
||||
Project upstreamProject = null
|
||||
if (dep instanceof ProjectDependency) {
|
||||
upstreamProject = dep.dependencyProject
|
||||
} else {
|
||||
// gradle doesn't apply substitutions until resolve time, so they won't
|
||||
// show up as a ProjectDependency above
|
||||
String substitution = projectSubstitutions.get("${dep.group}:${dep.name}:${dep.version}")
|
||||
if (substitution != null) {
|
||||
upstreamProject = findProject(substitution)
|
||||
}
|
||||
}
|
||||
if (upstreamProject != null) {
|
||||
if (project.path == upstreamProject.path) {
|
||||
// TODO: distribution integ tests depend on themselves (!), fix that
|
||||
return
|
||||
}
|
||||
for (String taskName : ['test', 'integTest']) {
|
||||
Task task = project.tasks.findByName(taskName)
|
||||
Task upstreamTask = upstreamProject.tasks.findByName(taskName)
|
||||
if (task != null && upstreamTask != null) {
|
||||
task.mustRunAfter(upstreamTask)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -127,14 +163,21 @@ allprojects {
|
|||
apply plugin: 'idea'
|
||||
}
|
||||
|
||||
if (projectsPrefix.isEmpty()) {
|
||||
idea {
|
||||
project {
|
||||
languageLevel = org.elasticsearch.gradle.BuildPlugin.minimumJava
|
||||
vcs = 'Git'
|
||||
}
|
||||
idea {
|
||||
project {
|
||||
languageLevel = org.elasticsearch.gradle.BuildPlugin.minimumJava.toString()
|
||||
vcs = 'Git'
|
||||
}
|
||||
}
|
||||
// Make sure gradle idea was run before running anything in intellij (including import).
|
||||
File ideaMarker = new File(projectDir, '.local-idea-is-configured')
|
||||
tasks.idea.doLast {
|
||||
ideaMarker.setText('', 'UTF-8')
|
||||
}
|
||||
if (System.getProperty('idea.active') != null && ideaMarker.exists() == false) {
|
||||
throw new GradleException('You must run gradle idea from the root of elasticsearch before importing into IntelliJ')
|
||||
}
|
||||
|
||||
|
||||
// eclipse configuration
|
||||
allprojects {
|
||||
|
|
|
@ -18,6 +18,9 @@
|
|||
*/
|
||||
package org.elasticsearch.gradle.precommit
|
||||
|
||||
import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis
|
||||
import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApisExtension
|
||||
import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin
|
||||
import org.gradle.api.GradleException
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
|
@ -69,14 +72,20 @@ class PrecommitTasks {
|
|||
signaturesURLs = [getClass().getResource('/forbidden/all-signatures.txt')]
|
||||
suppressAnnotations = ['**.SuppressForbidden']
|
||||
}
|
||||
project.tasks.findByName('forbiddenApisMain').configure {
|
||||
bundledSignatures += ['jdk-system-out']
|
||||
signaturesURLs += [
|
||||
getClass().getResource('/forbidden/core-signatures.txt'),
|
||||
getClass().getResource('/forbidden/third-party-signatures.txt')]
|
||||
Task mainForbidden = project.tasks.findByName('forbiddenApisMain')
|
||||
if (mainForbidden != null) {
|
||||
mainForbidden.configure {
|
||||
bundledSignatures += ['jdk-system-out']
|
||||
signaturesURLs += [
|
||||
getClass().getResource('/forbidden/core-signatures.txt'),
|
||||
getClass().getResource('/forbidden/third-party-signatures.txt')]
|
||||
}
|
||||
}
|
||||
project.tasks.findByName('forbiddenApisTest').configure {
|
||||
signaturesURLs += [getClass().getResource('/forbidden/test-signatures.txt')]
|
||||
Task testForbidden = project.tasks.findByName('forbiddenApisTest')
|
||||
if (testForbidden != null) {
|
||||
testForbidden.configure {
|
||||
signaturesURLs += [getClass().getResource('/forbidden/test-signatures.txt')]
|
||||
}
|
||||
}
|
||||
Task forbiddenApis = project.tasks.findByName('forbiddenApis')
|
||||
forbiddenApis.group = "" // clear group, so this does not show up under verification tasks
|
||||
|
|
|
@ -34,6 +34,87 @@ import java.nio.file.Paths
|
|||
*/
|
||||
class ClusterFormationTasks {
|
||||
|
||||
static class NodeInfo {
|
||||
/** common configuration for all nodes, including this one */
|
||||
ClusterConfiguration config
|
||||
/** node number within the cluster, for creating unique names and paths */
|
||||
int nodeNum
|
||||
/** name of the cluster this node is part of */
|
||||
String clusterName
|
||||
/** root directory all node files and operations happen under */
|
||||
File baseDir
|
||||
/** the pid file the node will use */
|
||||
File pidFile
|
||||
/** elasticsearch home dir */
|
||||
File homeDir
|
||||
/** working directory for the node process */
|
||||
File cwd
|
||||
/** file that if it exists, indicates the node failed to start */
|
||||
File failedMarker
|
||||
/** stdout/stderr log of the elasticsearch process for this node */
|
||||
File startLog
|
||||
/** directory to install plugins from */
|
||||
File pluginsTmpDir
|
||||
/** environment variables to start the node with */
|
||||
Map<String, String> env
|
||||
/** arguments to start the node with */
|
||||
List<String> args
|
||||
/** Path to the elasticsearch start script */
|
||||
String esScript
|
||||
/** buffer for ant output when starting this node */
|
||||
ByteArrayOutputStream buffer = new ByteArrayOutputStream()
|
||||
|
||||
/** Creates a node to run as part of a cluster for the given task */
|
||||
NodeInfo(ClusterConfiguration config, int nodeNum, Project project, Task task) {
|
||||
this.config = config
|
||||
this.nodeNum = nodeNum
|
||||
clusterName = "${task.path.replace(':', '_').substring(1)}"
|
||||
baseDir = new File(project.buildDir, "cluster/${task.name} node${nodeNum}")
|
||||
pidFile = new File(baseDir, 'es.pid')
|
||||
homeDir = homeDir(baseDir, config.distribution)
|
||||
cwd = new File(baseDir, "cwd")
|
||||
failedMarker = new File(cwd, 'run.failed')
|
||||
startLog = new File(cwd, 'run.log')
|
||||
pluginsTmpDir = new File(baseDir, "plugins tmp")
|
||||
|
||||
env = [
|
||||
'JAVA_HOME' : project.javaHome,
|
||||
'ES_GC_OPTS': config.jvmArgs // we pass these with the undocumented gc opts so the argline can set gc, etc
|
||||
]
|
||||
args = config.systemProperties.collect { key, value -> "-D${key}=${value}" }
|
||||
for (Map.Entry<String, String> property : System.properties.entrySet()) {
|
||||
if (property.getKey().startsWith('es.')) {
|
||||
args.add("-D${property.getKey()}=${property.getValue()}")
|
||||
}
|
||||
}
|
||||
// running with cmd on windows will look for this with the .bat extension
|
||||
esScript = new File(homeDir, 'bin/elasticsearch').toString()
|
||||
}
|
||||
|
||||
/** Returns debug string for the command that started this node. */
|
||||
String getCommandString() {
|
||||
String esCommandString = "Elasticsearch node ${nodeNum} command: ${esScript} "
|
||||
esCommandString += args.join(' ')
|
||||
esCommandString += '\nenvironment:'
|
||||
env.each { k, v -> esCommandString += "\n ${k}: ${v}" }
|
||||
return esCommandString
|
||||
}
|
||||
|
||||
/** Returns the directory elasticsearch home is contained in for the given distribution */
|
||||
static File homeDir(File baseDir, String distro) {
|
||||
String path
|
||||
switch (distro) {
|
||||
case 'zip':
|
||||
case 'tar':
|
||||
path = "elasticsearch-${VersionProperties.elasticsearch}"
|
||||
break;
|
||||
default:
|
||||
throw new InvalidUserDataException("Unknown distribution: ${distro}")
|
||||
}
|
||||
return new File(baseDir, path)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds dependent tasks to the given task to start and stop a cluster with the given configuration.
|
||||
*/
|
||||
|
@ -43,10 +124,16 @@ class ClusterFormationTasks {
|
|||
return
|
||||
}
|
||||
configureDistributionDependency(project, config.distribution)
|
||||
List<Task> startTasks = []
|
||||
List<NodeInfo> nodes = []
|
||||
for (int i = 0; i < config.numNodes; ++i) {
|
||||
File nodeDir = new File(project.buildDir, "cluster/${task.name} node${i}")
|
||||
configureTasks(project, task, config, nodeDir)
|
||||
NodeInfo node = new NodeInfo(config, i, project, task)
|
||||
nodes.add(node)
|
||||
startTasks.add(configureNode(project, task, node))
|
||||
}
|
||||
|
||||
Task wait = configureWaitTask("${task.name}#wait", project, nodes, startTasks)
|
||||
task.dependsOn(wait)
|
||||
}
|
||||
|
||||
/** Adds a dependency on the given distribution */
|
||||
|
@ -75,63 +162,60 @@ class ClusterFormationTasks {
|
|||
* <li>Run additional setup commands</li>
|
||||
* <li>Start elasticsearch<li>
|
||||
* </ol>
|
||||
*
|
||||
* @return a task which starts the node.
|
||||
*/
|
||||
static void configureTasks(Project project, Task task, ClusterConfiguration config, File baseDir) {
|
||||
String clusterName = "${task.path.replace(':', '_').substring(1)}"
|
||||
File pidFile = pidFile(baseDir)
|
||||
File home = homeDir(baseDir, config.distribution)
|
||||
File cwd = new File(baseDir, "cwd")
|
||||
File pluginsTmpDir = new File(baseDir, "plugins tmp")
|
||||
static Task configureNode(Project project, Task task, NodeInfo node) {
|
||||
|
||||
// tasks are chained so their execution order is maintained
|
||||
Task setup = project.tasks.create(name: "${task.name}#clean", type: Delete, dependsOn: task.dependsOn.collect()) {
|
||||
delete home
|
||||
delete cwd
|
||||
Task setup = project.tasks.create(name: taskName(task, node, 'clean'), type: Delete, dependsOn: task.dependsOn.collect()) {
|
||||
delete node.homeDir
|
||||
delete node.cwd
|
||||
doLast {
|
||||
cwd.mkdirs()
|
||||
node.cwd.mkdirs()
|
||||
}
|
||||
}
|
||||
setup = configureCheckPreviousTask("${task.name}#checkPrevious", project, setup, pidFile)
|
||||
setup = configureStopTask("${task.name}#stopPrevious", project, setup, pidFile)
|
||||
setup = configureExtractTask("${task.name}#extract", project, setup, baseDir, config.distribution)
|
||||
setup = configureWriteConfigTask("${task.name}#configure", project, setup, home, config, clusterName, pidFile)
|
||||
setup = configureCopyPluginsTask("${task.name}#copyPlugins", project, setup, pluginsTmpDir, config)
|
||||
setup = configureCheckPreviousTask(taskName(task, node, 'checkPrevious'), project, setup, node)
|
||||
setup = configureStopTask(taskName(task, node, 'stopPrevious'), project, setup, node)
|
||||
setup = configureExtractTask(taskName(task, node, 'extract'), project, setup, node)
|
||||
setup = configureWriteConfigTask(taskName(task, node, 'configure'), project, setup, node)
|
||||
setup = configureCopyPluginsTask(taskName(task, node, 'copyPlugins'), project, setup, node)
|
||||
|
||||
// install plugins
|
||||
for (Map.Entry<String, FileCollection> plugin : config.plugins.entrySet()) {
|
||||
for (Map.Entry<String, FileCollection> plugin : node.config.plugins.entrySet()) {
|
||||
// replace every dash followed by a character with just the uppercase character
|
||||
String camelName = plugin.getKey().replaceAll(/-(\w)/) { _, c -> c.toUpperCase(Locale.ROOT) }
|
||||
String taskName = "${task.name}#install${camelName[0].toUpperCase(Locale.ROOT) + camelName.substring(1)}Plugin"
|
||||
String actionName = "install${camelName[0].toUpperCase(Locale.ROOT) + camelName.substring(1)}Plugin"
|
||||
// delay reading the file location until execution time by wrapping in a closure within a GString
|
||||
String file = "${-> new File(pluginsTmpDir, plugin.getValue().singleFile.getName()).toURI().toURL().toString()}"
|
||||
Object[] args = [new File(home, 'bin/plugin'), 'install', file]
|
||||
setup = configureExecTask(taskName, project, setup, cwd, args)
|
||||
String file = "${-> new File(node.pluginsTmpDir, plugin.getValue().singleFile.getName()).toURI().toURL().toString()}"
|
||||
Object[] args = [new File(node.homeDir, 'bin/plugin'), 'install', file]
|
||||
setup = configureExecTask(taskName(task, node, actionName), project, setup, node, args)
|
||||
}
|
||||
|
||||
// extra setup commands
|
||||
for (Map.Entry<String, Object[]> command : config.setupCommands.entrySet()) {
|
||||
setup = configureExecTask("${task.name}#${command.getKey()}", project, setup, cwd, command.getValue())
|
||||
for (Map.Entry<String, Object[]> command : node.config.setupCommands.entrySet()) {
|
||||
setup = configureExecTask(taskName(task, node, command.getKey()), project, setup, node, command.getValue())
|
||||
}
|
||||
|
||||
Task start = configureStartTask("${task.name}#start", project, setup, cwd, config, clusterName, pidFile, home)
|
||||
task.dependsOn(start)
|
||||
Task start = configureStartTask(taskName(task, node, 'start'), project, setup, node)
|
||||
|
||||
if (config.daemonize) {
|
||||
if (node.config.daemonize) {
|
||||
// if we are running in the background, make sure to stop the server when the task completes
|
||||
Task stop = configureStopTask("${task.name}#stop", project, [], pidFile)
|
||||
Task stop = configureStopTask(taskName(task, node, 'stop'), project, [], node)
|
||||
task.finalizedBy(stop)
|
||||
}
|
||||
return start
|
||||
}
|
||||
|
||||
/** Adds a task to extract the elasticsearch distribution */
|
||||
static Task configureExtractTask(String name, Project project, Task setup, File baseDir, String distro) {
|
||||
static Task configureExtractTask(String name, Project project, Task setup, NodeInfo node) {
|
||||
List extractDependsOn = [project.configurations.elasticsearchDistro, setup]
|
||||
Task extract
|
||||
switch (distro) {
|
||||
switch (node.config.distribution) {
|
||||
case 'zip':
|
||||
extract = project.tasks.create(name: name, type: Copy, dependsOn: extractDependsOn) {
|
||||
from { project.zipTree(project.configurations.elasticsearchDistro.singleFile) }
|
||||
into baseDir
|
||||
into node.baseDir
|
||||
}
|
||||
break;
|
||||
case 'tar':
|
||||
|
@ -139,54 +223,53 @@ class ClusterFormationTasks {
|
|||
from {
|
||||
project.tarTree(project.resources.gzip(project.configurations.elasticsearchDistro.singleFile))
|
||||
}
|
||||
into baseDir
|
||||
into node.baseDir
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new InvalidUserDataException("Unknown distribution: ${distro}")
|
||||
throw new InvalidUserDataException("Unknown distribution: ${node.config.distribution}")
|
||||
}
|
||||
return extract
|
||||
}
|
||||
|
||||
/** Adds a task to write elasticsearch.yml for the given node configuration */
|
||||
static Task configureWriteConfigTask(String name, Project project, Task setup, File home, ClusterConfiguration config, String clusterName, File pidFile) {
|
||||
static Task configureWriteConfigTask(String name, Project project, Task setup, NodeInfo node) {
|
||||
Map esConfig = [
|
||||
'cluster.name' : clusterName,
|
||||
'http.port' : config.httpPort,
|
||||
'transport.tcp.port' : config.transportPort,
|
||||
'pidfile' : pidFile,
|
||||
// TODO: make this work for multi node!
|
||||
'discovery.zen.ping.unicast.hosts': "localhost:${config.transportPort}",
|
||||
'path.repo' : "${home}/repo",
|
||||
'path.shared_data' : "${home}/../",
|
||||
'cluster.name' : node.clusterName,
|
||||
'http.port' : node.config.httpPort + node.nodeNum,
|
||||
'transport.tcp.port' : node.config.transportPort + node.nodeNum,
|
||||
'pidfile' : node.pidFile,
|
||||
'discovery.zen.ping.unicast.hosts': (0..<node.config.numNodes).collect{"127.0.0.1:${node.config.transportPort + it}"}.join(','),
|
||||
'path.repo' : "${node.homeDir}/repo",
|
||||
'path.shared_data' : "${node.homeDir}/../",
|
||||
// Define a node attribute so we can test that it exists
|
||||
'node.testattr' : 'test',
|
||||
'repositories.url.allowed_urls' : 'http://snapshot.test*'
|
||||
]
|
||||
|
||||
return project.tasks.create(name: name, type: DefaultTask, dependsOn: setup) << {
|
||||
File configFile = new File(home, 'config/elasticsearch.yml')
|
||||
File configFile = new File(node.homeDir, 'config/elasticsearch.yml')
|
||||
logger.info("Configuring ${configFile}")
|
||||
configFile.setText(esConfig.collect { key, value -> "${key}: ${value}" }.join('\n'), 'UTF-8')
|
||||
}
|
||||
}
|
||||
|
||||
/** Adds a task to copy plugins to a temp dir, which they will later be installed from. */
|
||||
static Task configureCopyPluginsTask(String name, Project project, Task setup, File pluginsTmpDir, ClusterConfiguration config) {
|
||||
if (config.plugins.isEmpty()) {
|
||||
static Task configureCopyPluginsTask(String name, Project project, Task setup, NodeInfo node) {
|
||||
if (node.config.plugins.isEmpty()) {
|
||||
return setup
|
||||
}
|
||||
|
||||
return project.tasks.create(name: name, type: Copy, dependsOn: setup) {
|
||||
into pluginsTmpDir
|
||||
from(config.plugins.values())
|
||||
into node.pluginsTmpDir
|
||||
from(node.config.plugins.values())
|
||||
}
|
||||
}
|
||||
|
||||
/** Adds a task to execute a command to help setup the cluster */
|
||||
static Task configureExecTask(String name, Project project, Task setup, File cwd, Object[] execArgs) {
|
||||
static Task configureExecTask(String name, Project project, Task setup, NodeInfo node, Object[] execArgs) {
|
||||
return project.tasks.create(name: name, type: Exec, dependsOn: setup) {
|
||||
workingDir cwd
|
||||
workingDir node.cwd
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
executable 'cmd'
|
||||
args '/C', 'call'
|
||||
|
@ -210,21 +293,8 @@ class ClusterFormationTasks {
|
|||
}
|
||||
|
||||
/** Adds a task to start an elasticsearch node with the given configuration */
|
||||
static Task configureStartTask(String name, Project project, Task setup, File cwd, ClusterConfiguration config, String clusterName, File pidFile, File home) {
|
||||
Map esEnv = [
|
||||
'JAVA_HOME' : project.javaHome,
|
||||
'ES_GC_OPTS': config.jvmArgs // we pass these with the undocumented gc opts so the argline can set gc, etc
|
||||
]
|
||||
List<String> esProps = config.systemProperties.collect { key, value -> "-D${key}=${value}" }
|
||||
for (Map.Entry<String, String> property : System.properties.entrySet()) {
|
||||
if (property.getKey().startsWith('es.')) {
|
||||
esProps.add("-D${property.getKey()}=${property.getValue()}")
|
||||
}
|
||||
}
|
||||
|
||||
static Task configureStartTask(String name, Project project, Task setup, NodeInfo node) {
|
||||
String executable
|
||||
// running with cmd on windows will look for this with the .bat extension
|
||||
String esScript = new File(home, 'bin/elasticsearch').toString()
|
||||
List<String> esArgs = []
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
executable = 'cmd'
|
||||
|
@ -234,15 +304,13 @@ class ClusterFormationTasks {
|
|||
executable = 'sh'
|
||||
}
|
||||
|
||||
File failedMarker = new File(cwd, 'run.failed')
|
||||
|
||||
// this closure is converted into ant nodes by groovy's AntBuilder
|
||||
Closure antRunner = {
|
||||
// we must add debug options inside the closure so the config is read at execution time, as
|
||||
// gradle task options are not processed until the end of the configuration phase
|
||||
if (config.debug) {
|
||||
if (node.config.debug) {
|
||||
println 'Running elasticsearch in debug mode, suspending until connected on port 8000'
|
||||
esEnv['JAVA_OPTS'] = '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000'
|
||||
node.env['JAVA_OPTS'] = '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000'
|
||||
}
|
||||
|
||||
// Due to how ant exec works with the spawn option, we lose all stdout/stderr from the
|
||||
|
@ -251,75 +319,41 @@ class ClusterFormationTasks {
|
|||
// of the real elasticsearch script. This allows ant to keep the streams open with the
|
||||
// dummy process, but us to have the output available if there is an error in the
|
||||
// elasticsearch start script
|
||||
if (config.daemonize) {
|
||||
String script = node.esScript
|
||||
if (node.config.daemonize) {
|
||||
String scriptName = 'run'
|
||||
String argsPasser = '"$@"'
|
||||
String exitMarker = '; if [ $? != 0 ]; then touch run.failed; fi'
|
||||
String exitMarker = "; if [ \$? != 0 ]; then touch run.failed; fi"
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
scriptName += '.bat'
|
||||
argsPasser = '%*'
|
||||
exitMarker = '\r\n if "%errorlevel%" neq "0" ( type nul >> run.failed )'
|
||||
exitMarker = "\r\n if \"%errorlevel%\" neq \"0\" ( type nul >> run.failed )"
|
||||
}
|
||||
File wrapperScript = new File(cwd, scriptName)
|
||||
wrapperScript.setText("\"${esScript}\" ${argsPasser} > run.log 2>&1 ${exitMarker}", 'UTF-8')
|
||||
esScript = wrapperScript.toString()
|
||||
File wrapperScript = new File(node.cwd, scriptName)
|
||||
wrapperScript.setText("\"${script}\" ${argsPasser} > run.log 2>&1 ${exitMarker}", 'UTF-8')
|
||||
script = wrapperScript.toString()
|
||||
}
|
||||
|
||||
exec(executable: executable, spawn: config.daemonize, dir: cwd, taskname: 'elasticsearch') {
|
||||
esEnv.each { key, value -> env(key: key, value: value) }
|
||||
arg(value: esScript)
|
||||
esProps.each { arg(value: it) }
|
||||
}
|
||||
waitfor(maxwait: '30', maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond', timeoutproperty: "failed${name}") {
|
||||
or {
|
||||
resourceexists {
|
||||
file(file: failedMarker.toString())
|
||||
}
|
||||
and {
|
||||
resourceexists {
|
||||
file(file: pidFile.toString())
|
||||
}
|
||||
http(url: "http://localhost:${config.httpPort}")
|
||||
}
|
||||
}
|
||||
exec(executable: executable, spawn: node.config.daemonize, dir: node.cwd, taskname: 'elasticsearch') {
|
||||
node.env.each { key, value -> env(key: key, value: value) }
|
||||
arg(value: script)
|
||||
node.args.each { arg(value: it) }
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// this closure is the actual code to run elasticsearch
|
||||
Closure elasticsearchRunner = {
|
||||
// Command as string for logging
|
||||
String esCommandString = "Elasticsearch command: ${esScript} "
|
||||
esCommandString += esProps.join(' ')
|
||||
if (esEnv.isEmpty() == false) {
|
||||
esCommandString += '\nenvironment:'
|
||||
esEnv.each { k, v -> esCommandString += "\n ${k}: ${v}" }
|
||||
}
|
||||
logger.info(esCommandString)
|
||||
node.getCommandString().eachLine { line -> logger.info(line) }
|
||||
|
||||
ByteArrayOutputStream buffer = new ByteArrayOutputStream()
|
||||
if (logger.isInfoEnabled() || config.daemonize == false) {
|
||||
if (logger.isInfoEnabled() || node.config.daemonize == false) {
|
||||
// run with piping streams directly out (even stderr to stdout since gradle would capture it)
|
||||
runAntCommand(project, antRunner, System.out, System.err)
|
||||
} else {
|
||||
// buffer the output, we may not need to print it
|
||||
PrintStream captureStream = new PrintStream(buffer, true, "UTF-8")
|
||||
PrintStream captureStream = new PrintStream(node.buffer, true, "UTF-8")
|
||||
runAntCommand(project, antRunner, captureStream, captureStream)
|
||||
}
|
||||
|
||||
if (ant.properties.containsKey("failed${name}".toString()) || failedMarker.exists()) {
|
||||
if (logger.isInfoEnabled() == false) {
|
||||
// We already log the command at info level. No need to do it twice.
|
||||
esCommandString.eachLine { line -> logger.error(line) }
|
||||
}
|
||||
// the waitfor failed, so dump any output we got (may be empty if info logging, but that is ok)
|
||||
buffer.toString('UTF-8').eachLine { line -> logger.error(line) }
|
||||
// also dump the log file for the startup script (which will include ES logging output to stdout)
|
||||
File startLog = new File(cwd, 'run.log')
|
||||
if (startLog.exists()) {
|
||||
startLog.eachLine { line -> logger.error(line) }
|
||||
}
|
||||
throw new GradleException('Failed to start elasticsearch')
|
||||
}
|
||||
}
|
||||
|
||||
Task start = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup)
|
||||
|
@ -327,12 +361,57 @@ class ClusterFormationTasks {
|
|||
return start
|
||||
}
|
||||
|
||||
static Task configureWaitTask(String name, Project project, List<NodeInfo> nodes, List<Task> startTasks) {
|
||||
Task wait = project.tasks.create(name: name, dependsOn: startTasks)
|
||||
wait.doLast {
|
||||
ant.waitfor(maxwait: '30', maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond', timeoutproperty: "failed${name}") {
|
||||
or {
|
||||
for (NodeInfo node : nodes) {
|
||||
resourceexists {
|
||||
file(file: node.failedMarker.toString())
|
||||
}
|
||||
}
|
||||
and {
|
||||
for (NodeInfo node : nodes) {
|
||||
resourceexists {
|
||||
file(file: node.pidFile.toString())
|
||||
}
|
||||
http(url: "http://localhost:${node.config.httpPort + node.nodeNum}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
boolean anyNodeFailed = false
|
||||
for (NodeInfo node : nodes) {
|
||||
anyNodeFailed |= node.failedMarker.exists()
|
||||
}
|
||||
if (ant.properties.containsKey("failed${name}".toString()) || anyNodeFailed) {
|
||||
for (NodeInfo node : nodes) {
|
||||
if (logger.isInfoEnabled() == false) {
|
||||
// We already log the command at info level. No need to do it twice.
|
||||
node.getCommandString().eachLine { line -> logger.error(line) }
|
||||
}
|
||||
// the waitfor failed, so dump any output we got (may be empty if info logging, but that is ok)
|
||||
logger.error("Node ${node.nodeNum} ant output:")
|
||||
node.buffer.toString('UTF-8').eachLine { line -> logger.error(line) }
|
||||
// also dump the log file for the startup script (which will include ES logging output to stdout)
|
||||
if (node.startLog.exists()) {
|
||||
logger.error("Node ${node.nodeNum} log:")
|
||||
node.startLog.eachLine { line -> logger.error(line) }
|
||||
}
|
||||
}
|
||||
throw new GradleException('Failed to start elasticsearch')
|
||||
}
|
||||
}
|
||||
return wait
|
||||
}
|
||||
|
||||
/** Adds a task to check if the process with the given pidfile is actually elasticsearch */
|
||||
static Task configureCheckPreviousTask(String name, Project project, Object depends, File pidFile) {
|
||||
static Task configureCheckPreviousTask(String name, Project project, Object depends, NodeInfo node) {
|
||||
return project.tasks.create(name: name, type: Exec, dependsOn: depends) {
|
||||
onlyIf { pidFile.exists() }
|
||||
onlyIf { node.pidFile.exists() }
|
||||
// the pid file won't actually be read until execution time, since the read is wrapped within an inner closure of the GString
|
||||
ext.pid = "${ -> pidFile.getText('UTF-8').trim()}"
|
||||
ext.pid = "${ -> node.pidFile.getText('UTF-8').trim()}"
|
||||
File jps
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
jps = getJpsExecutableByName(project, "jps.exe")
|
||||
|
@ -365,11 +444,11 @@ class ClusterFormationTasks {
|
|||
}
|
||||
|
||||
/** Adds a task to kill an elasticsearch node with the given pidfile */
|
||||
static Task configureStopTask(String name, Project project, Object depends, File pidFile) {
|
||||
static Task configureStopTask(String name, Project project, Object depends, NodeInfo node) {
|
||||
return project.tasks.create(name: name, type: Exec, dependsOn: depends) {
|
||||
onlyIf { pidFile.exists() }
|
||||
onlyIf { node.pidFile.exists() }
|
||||
// the pid file won't actually be read until execution time, since the read is wrapped within an inner closure of the GString
|
||||
ext.pid = "${ -> pidFile.getText('UTF-8').trim()}"
|
||||
ext.pid = "${ -> node.pidFile.getText('UTF-8').trim()}"
|
||||
doFirst {
|
||||
logger.info("Shutting down external node with pid ${pid}")
|
||||
}
|
||||
|
@ -381,27 +460,18 @@ class ClusterFormationTasks {
|
|||
args '-9', pid
|
||||
}
|
||||
doLast {
|
||||
project.delete(pidFile)
|
||||
project.delete(node.pidFile)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns the directory elasticsearch home is contained in for the given distribution */
|
||||
static File homeDir(File baseDir, String distro) {
|
||||
String path
|
||||
switch (distro) {
|
||||
case 'zip':
|
||||
case 'tar':
|
||||
path = "elasticsearch-${VersionProperties.elasticsearch}"
|
||||
break;
|
||||
default:
|
||||
throw new InvalidUserDataException("Unknown distribution: ${distro}")
|
||||
/** Returns a unique task name for this task and node configuration */
|
||||
static String taskName(Task parentTask, NodeInfo node, String action) {
|
||||
if (node.config.numNodes > 1) {
|
||||
return "${parentTask.name}#node${node.nodeNum}.${action}"
|
||||
} else {
|
||||
return "${parentTask.name}#${action}"
|
||||
}
|
||||
return new File(baseDir, path)
|
||||
}
|
||||
|
||||
static File pidFile(File dir) {
|
||||
return new File(dir, 'es.pid')
|
||||
}
|
||||
|
||||
/** Runs an ant command, sending output to the given out and error streams */
|
||||
|
|
|
@ -23,6 +23,7 @@ package org.elasticsearch.gradle.test
|
|||
import com.carrotsearch.gradle.junit4.RandomizedTestingPlugin
|
||||
import org.elasticsearch.gradle.BuildPlugin
|
||||
import org.elasticsearch.gradle.VersionProperties
|
||||
import org.elasticsearch.gradle.precommit.PrecommitTasks
|
||||
import org.gradle.api.Plugin
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.plugins.JavaBasePlugin
|
||||
|
@ -55,5 +56,6 @@ class StandaloneTestBasePlugin implements Plugin<Project> {
|
|||
plusConfigurations = [project.configurations.testRuntime]
|
||||
}
|
||||
}
|
||||
PrecommitTasks.configure(project)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -104,4 +104,11 @@ java.lang.Thread#getAllStackTraces()
|
|||
@defaultMessage Please do not terminate the application
|
||||
java.lang.System#exit(int)
|
||||
java.lang.Runtime#exit(int)
|
||||
java.lang.Runtime#halt(int)
|
||||
java.lang.Runtime#halt(int)
|
||||
|
||||
@defaultMessage Treat system properties as immutable
|
||||
java.lang.System#setProperties(java.util.Properties)
|
||||
java.lang.System#setProperty(java.lang.String,java.lang.String)
|
||||
java.lang.System#clearProperty(java.lang.String)
|
||||
java.lang.System#getProperties() @ Use BootstrapInfo.getSystemProperties for a read-only view
|
||||
|
||||
|
|
|
@ -87,7 +87,7 @@ dependencies {
|
|||
|
||||
compile "net.java.dev.jna:jna:${versions.jna}", optional
|
||||
|
||||
if (isEclipse == false || project.path == "${projectsPrefix}:core-tests") {
|
||||
if (isEclipse == false || project.path == ":core-tests") {
|
||||
testCompile("org.elasticsearch:test-framework:${version}") {
|
||||
// tests use the locally compiled version of core
|
||||
exclude group: 'org.elasticsearch', module: 'elasticsearch'
|
||||
|
@ -98,7 +98,7 @@ dependencies {
|
|||
if (isEclipse) {
|
||||
// in eclipse the project is under a fake root, we need to change around the source sets
|
||||
sourceSets {
|
||||
if (project.path == "${projectsPrefix}:core") {
|
||||
if (project.path == ":core") {
|
||||
main.java.srcDirs = ['java']
|
||||
main.resources.srcDirs = ['resources']
|
||||
} else {
|
||||
|
@ -117,7 +117,7 @@ forbiddenPatterns {
|
|||
exclude '**/org/elasticsearch/cluster/routing/shard_routes.txt'
|
||||
}
|
||||
|
||||
if (isEclipse == false || project.path == "${projectsPrefix}:core-tests") {
|
||||
if (isEclipse == false || project.path == ":core-tests") {
|
||||
task integTest(type: RandomizedTestingTask,
|
||||
group: JavaBasePlugin.VERIFICATION_GROUP,
|
||||
description: 'Multi-node tests',
|
||||
|
|
|
@ -324,9 +324,11 @@ public class BulkProcessor implements Closeable {
|
|||
}
|
||||
} else {
|
||||
boolean success = false;
|
||||
boolean acquired = false;
|
||||
try {
|
||||
listener.beforeBulk(executionId, bulkRequest);
|
||||
semaphore.acquire();
|
||||
acquired = true;
|
||||
client.bulk(bulkRequest, new ActionListener<BulkResponse>() {
|
||||
@Override
|
||||
public void onResponse(BulkResponse response) {
|
||||
|
@ -353,7 +355,7 @@ public class BulkProcessor implements Closeable {
|
|||
} catch (Throwable t) {
|
||||
listener.afterBulk(executionId, bulkRequest, t);
|
||||
} finally {
|
||||
if (!success) { // if we fail on client.bulk() release the semaphore
|
||||
if (!success && acquired) { // if we fail on client.bulk() release the semaphore
|
||||
semaphore.release();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -106,19 +106,37 @@ public abstract class FieldStats<T extends Comparable<T>> implements Streamable,
|
|||
return sumTotalTermFreq;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the lowest value in the field.
|
||||
*
|
||||
* Note that, documents marked as deleted that haven't yet been merged way aren't taken into account.
|
||||
*/
|
||||
public T getMinValue() {
|
||||
return minValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the highest value in the field.
|
||||
*
|
||||
* Note that, documents marked as deleted that haven't yet been merged way aren't taken into account.
|
||||
*/
|
||||
public T getMaxValue() {
|
||||
return maxValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the lowest value in the field represented as a string.
|
||||
*
|
||||
* Note that, documents marked as deleted that haven't yet been merged way aren't taken into account.
|
||||
*/
|
||||
public abstract String getMinValue();
|
||||
public abstract String getMinValueAsString();
|
||||
|
||||
/**
|
||||
* @return the highest value in the field represented as a string.
|
||||
*
|
||||
* Note that, documents marked as deleted that haven't yet been merged way aren't taken into account.
|
||||
*/
|
||||
public abstract String getMaxValue();
|
||||
public abstract String getMaxValueAsString();
|
||||
|
||||
/**
|
||||
* @param value The string to be parsed
|
||||
|
@ -192,8 +210,10 @@ public abstract class FieldStats<T extends Comparable<T>> implements Streamable,
|
|||
}
|
||||
|
||||
protected void toInnerXContent(XContentBuilder builder) throws IOException {
|
||||
builder.field(Fields.MIN_VALUE, minValue);
|
||||
builder.field(Fields.MAX_VALUE, maxValue);
|
||||
builder.field(Fields.MIN_VALUE, getMinValue());
|
||||
builder.field(Fields.MIN_VALUE_AS_STRING, getMinValueAsString());
|
||||
builder.field(Fields.MAX_VALUE, getMaxValue());
|
||||
builder.field(Fields.MAX_VALUE_AS_STRING, getMaxValueAsString());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -229,12 +249,12 @@ public abstract class FieldStats<T extends Comparable<T>> implements Streamable,
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getMinValue() {
|
||||
public String getMinValueAsString() {
|
||||
return String.valueOf(minValue.longValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMaxValue() {
|
||||
public String getMaxValueAsString() {
|
||||
return String.valueOf(maxValue.longValue());
|
||||
}
|
||||
|
||||
|
@ -282,12 +302,12 @@ public abstract class FieldStats<T extends Comparable<T>> implements Streamable,
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getMinValue() {
|
||||
public String getMinValueAsString() {
|
||||
return String.valueOf(minValue.floatValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMaxValue() {
|
||||
public String getMaxValueAsString() {
|
||||
return String.valueOf(maxValue.floatValue());
|
||||
}
|
||||
|
||||
|
@ -335,12 +355,12 @@ public abstract class FieldStats<T extends Comparable<T>> implements Streamable,
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getMinValue() {
|
||||
public String getMinValueAsString() {
|
||||
return String.valueOf(minValue.doubleValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMaxValue() {
|
||||
public String getMaxValueAsString() {
|
||||
return String.valueOf(maxValue.doubleValue());
|
||||
}
|
||||
|
||||
|
@ -388,12 +408,12 @@ public abstract class FieldStats<T extends Comparable<T>> implements Streamable,
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getMinValue() {
|
||||
public String getMinValueAsString() {
|
||||
return minValue.utf8ToString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMaxValue() {
|
||||
public String getMaxValueAsString() {
|
||||
return maxValue.utf8ToString();
|
||||
}
|
||||
|
||||
|
@ -419,8 +439,8 @@ public abstract class FieldStats<T extends Comparable<T>> implements Streamable,
|
|||
|
||||
@Override
|
||||
protected void toInnerXContent(XContentBuilder builder) throws IOException {
|
||||
builder.field(Fields.MIN_VALUE, getMinValue());
|
||||
builder.field(Fields.MAX_VALUE, getMaxValue());
|
||||
builder.field(Fields.MIN_VALUE, getMinValueAsString());
|
||||
builder.field(Fields.MAX_VALUE, getMaxValueAsString());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -452,12 +472,12 @@ public abstract class FieldStats<T extends Comparable<T>> implements Streamable,
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getMinValue() {
|
||||
public String getMinValueAsString() {
|
||||
return dateFormatter.printer().print(minValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMaxValue() {
|
||||
public String getMaxValueAsString() {
|
||||
return dateFormatter.printer().print(maxValue);
|
||||
}
|
||||
|
||||
|
@ -470,12 +490,6 @@ public abstract class FieldStats<T extends Comparable<T>> implements Streamable,
|
|||
return dateFormatter.parser().parseMillis(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void toInnerXContent(XContentBuilder builder) throws IOException {
|
||||
builder.field(Fields.MIN_VALUE, getMinValue());
|
||||
builder.field(Fields.MAX_VALUE, getMaxValue());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
|
@ -525,7 +539,9 @@ public abstract class FieldStats<T extends Comparable<T>> implements Streamable,
|
|||
final static XContentBuilderString SUM_DOC_FREQ = new XContentBuilderString("sum_doc_freq");
|
||||
final static XContentBuilderString SUM_TOTAL_TERM_FREQ = new XContentBuilderString("sum_total_term_freq");
|
||||
final static XContentBuilderString MIN_VALUE = new XContentBuilderString("min_value");
|
||||
final static XContentBuilderString MIN_VALUE_AS_STRING = new XContentBuilderString("min_value_as_string");
|
||||
final static XContentBuilderString MAX_VALUE = new XContentBuilderString("max_value");
|
||||
final static XContentBuilderString MAX_VALUE_AS_STRING = new XContentBuilderString("max_value_as_string");
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -119,18 +119,14 @@ public class TransportFieldStatsTransportAction extends TransportBroadcastAction
|
|||
while (iterator.hasNext()) {
|
||||
Map.Entry<String, Map<String, FieldStats>> entry = iterator.next();
|
||||
FieldStats indexConstraintFieldStats = entry.getValue().get(indexConstraint.getField());
|
||||
if (indexConstraintFieldStats == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (indexConstraintFieldStats.match(indexConstraint)) {
|
||||
if (indexConstraintFieldStats != null && indexConstraintFieldStats.match(indexConstraint)) {
|
||||
// If the field stats didn't occur in the list of fields in the original request we need to remove the
|
||||
// field stats, because it was never requested and was only needed to validate the index constraint
|
||||
if (fieldStatFields.contains(indexConstraint.getField()) == false) {
|
||||
entry.getValue().remove(indexConstraint.getField());
|
||||
}
|
||||
} else {
|
||||
// The index constraint didn't match, so we remove all the field stats of the index we're checking
|
||||
// The index constraint didn't match or was empty, so we remove all the field stats of the index we're checking
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -138,6 +138,7 @@ final class Bootstrap {
|
|||
// Force probes to be loaded
|
||||
ProcessProbe.getInstance();
|
||||
OsProbe.getInstance();
|
||||
JvmInfo.jvmInfo();
|
||||
}
|
||||
|
||||
private void setup(boolean addShutdownHook, Settings settings, Environment environment) throws Exception {
|
||||
|
@ -230,13 +231,20 @@ final class Bootstrap {
|
|||
}
|
||||
}
|
||||
|
||||
/** Set the system property before anything has a chance to trigger its use */
|
||||
// TODO: why? is it just a bad default somewhere? or is it some BS around 'but the client' garbage <-- my guess
|
||||
@SuppressForbidden(reason = "sets logger prefix on initialization")
|
||||
static void initLoggerPrefix() {
|
||||
System.setProperty("es.logger.prefix", "");
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is invoked by {@link Elasticsearch#main(String[])}
|
||||
* to startup elasticsearch.
|
||||
*/
|
||||
static void init(String[] args) throws Throwable {
|
||||
// Set the system property before anything has a chance to trigger its use
|
||||
System.setProperty("es.logger.prefix", "");
|
||||
initLoggerPrefix();
|
||||
|
||||
BootstrapCLIParser bootstrapCLIParser = new BootstrapCLIParser();
|
||||
CliTool.ExitStatus status = bootstrapCLIParser.execute(args);
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.commons.cli.CommandLine;
|
|||
import org.apache.commons.cli.Option;
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.cli.CliTool;
|
||||
import org.elasticsearch.common.cli.CliToolConfig;
|
||||
import org.elasticsearch.common.cli.Terminal;
|
||||
|
@ -100,6 +101,8 @@ final class BootstrapCLIParser extends CliTool {
|
|||
.stopAtNonOption(true) // needed to parse the --foo.bar options, so this parser must be lenient
|
||||
.build();
|
||||
|
||||
// TODO: don't use system properties as a way to do this, its horrible...
|
||||
@SuppressForbidden(reason = "Sets system properties passed as CLI parameters")
|
||||
public static Command parse(Terminal terminal, CommandLine cli) {
|
||||
if (cli.hasOption("V")) {
|
||||
return Version.parse(terminal, cli);
|
||||
|
|
|
@ -19,9 +19,15 @@
|
|||
|
||||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
|
||||
import java.util.Dictionary;
|
||||
import java.util.Enumeration;
|
||||
|
||||
/**
|
||||
* Exposes system startup information
|
||||
*/
|
||||
@SuppressForbidden(reason = "exposes read-only view of system properties")
|
||||
public final class BootstrapInfo {
|
||||
|
||||
/** no instantiation */
|
||||
|
@ -57,4 +63,61 @@ public final class BootstrapInfo {
|
|||
* This is not a full URL, just a path.
|
||||
*/
|
||||
public static final String UNTRUSTED_CODEBASE = "/untrusted";
|
||||
|
||||
// create a view of sysprops map that does not allow modifications
|
||||
// this must be done this way (e.g. versus an actual typed map), because
|
||||
// some test methods still change properties, so whitelisted changes must
|
||||
// be reflected in this view.
|
||||
private static final Dictionary<Object,Object> SYSTEM_PROPERTIES;
|
||||
static {
|
||||
final Dictionary<Object,Object> sysprops = System.getProperties();
|
||||
SYSTEM_PROPERTIES = new Dictionary<Object,Object>() {
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return sysprops.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return sysprops.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Enumeration<Object> keys() {
|
||||
return sysprops.keys();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Enumeration<Object> elements() {
|
||||
return sysprops.elements();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object get(Object key) {
|
||||
return sysprops.get(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object put(Object key, Object value) {
|
||||
throw new UnsupportedOperationException("treat system properties as immutable");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object remove(Object key) {
|
||||
throw new UnsupportedOperationException("treat system properties as immutable");
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a read-only view of all system properties
|
||||
*/
|
||||
public static Dictionary<Object,Object> getSystemProperties() {
|
||||
SecurityManager sm = System.getSecurityManager();
|
||||
if (sm != null) {
|
||||
sm.checkPropertyAccess("*");
|
||||
}
|
||||
return SYSTEM_PROPERTIES;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -96,6 +96,10 @@ import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksAction;
|
|||
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequest;
|
||||
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequestBuilder;
|
||||
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse;
|
||||
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateAction;
|
||||
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateRequest;
|
||||
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateRequestBuilder;
|
||||
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateResponse;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder;
|
||||
|
@ -177,9 +181,9 @@ import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest
|
|||
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsResponse;
|
||||
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoreRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse;
|
||||
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresAction;
|
||||
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresRequest;
|
||||
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder;
|
||||
|
@ -208,10 +212,6 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction
|
|||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
|
||||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse;
|
||||
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateAction;
|
||||
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateRequest;
|
||||
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateRequestBuilder;
|
||||
import org.elasticsearch.action.admin.cluster.validate.template.RenderSearchTemplateResponse;
|
||||
import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerAction;
|
||||
import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerRequest;
|
||||
import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerRequestBuilder;
|
||||
|
@ -263,8 +263,6 @@ import org.elasticsearch.action.suggest.SuggestAction;
|
|||
import org.elasticsearch.action.suggest.SuggestRequest;
|
||||
import org.elasticsearch.action.suggest.SuggestRequestBuilder;
|
||||
import org.elasticsearch.action.suggest.SuggestResponse;
|
||||
import org.elasticsearch.action.support.AdapterActionFuture;
|
||||
import org.elasticsearch.action.support.DelegatingActionListener;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.action.support.ThreadedActionListener;
|
||||
import org.elasticsearch.action.termvectors.*;
|
||||
|
|
|
@ -1,123 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import com.spatial4j.core.shape.Shape;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||
import com.vividsolutions.jts.geom.LineString;
|
||||
|
||||
public abstract class BaseLineStringBuilder<E extends BaseLineStringBuilder<E>> extends PointCollection<E> {
|
||||
|
||||
public BaseLineStringBuilder(ArrayList<Coordinate> points) {
|
||||
super(points);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return coordinatesToXcontent(builder, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Shape build() {
|
||||
Coordinate[] coordinates = points.toArray(new Coordinate[points.size()]);
|
||||
Geometry geometry;
|
||||
if(wrapdateline) {
|
||||
ArrayList<LineString> strings = decompose(FACTORY, coordinates, new ArrayList<LineString>());
|
||||
|
||||
if(strings.size() == 1) {
|
||||
geometry = strings.get(0);
|
||||
} else {
|
||||
LineString[] linestrings = strings.toArray(new LineString[strings.size()]);
|
||||
geometry = FACTORY.createMultiLineString(linestrings);
|
||||
}
|
||||
|
||||
} else {
|
||||
geometry = FACTORY.createLineString(coordinates);
|
||||
}
|
||||
return jtsGeometry(geometry);
|
||||
}
|
||||
|
||||
protected static ArrayList<LineString> decompose(GeometryFactory factory, Coordinate[] coordinates, ArrayList<LineString> strings) {
|
||||
for(Coordinate[] part : decompose(+DATELINE, coordinates)) {
|
||||
for(Coordinate[] line : decompose(-DATELINE, part)) {
|
||||
strings.add(factory.createLineString(line));
|
||||
}
|
||||
}
|
||||
return strings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decompose a linestring given as array of coordinates at a vertical line.
|
||||
*
|
||||
* @param dateline x-axis intercept of the vertical line
|
||||
* @param coordinates coordinates forming the linestring
|
||||
* @return array of linestrings given as coordinate arrays
|
||||
*/
|
||||
protected static Coordinate[][] decompose(double dateline, Coordinate[] coordinates) {
|
||||
int offset = 0;
|
||||
ArrayList<Coordinate[]> parts = new ArrayList<>();
|
||||
|
||||
double shift = coordinates[0].x > DATELINE ? DATELINE : (coordinates[0].x < -DATELINE ? -DATELINE : 0);
|
||||
|
||||
for (int i = 1; i < coordinates.length; i++) {
|
||||
double t = intersection(coordinates[i-1], coordinates[i], dateline);
|
||||
if(!Double.isNaN(t)) {
|
||||
Coordinate[] part;
|
||||
if(t<1) {
|
||||
part = Arrays.copyOfRange(coordinates, offset, i+1);
|
||||
part[part.length-1] = Edge.position(coordinates[i-1], coordinates[i], t);
|
||||
coordinates[offset+i-1] = Edge.position(coordinates[i-1], coordinates[i], t);
|
||||
shift(shift, part);
|
||||
offset = i-1;
|
||||
shift = coordinates[i].x > DATELINE ? DATELINE : (coordinates[i].x < -DATELINE ? -DATELINE : 0);
|
||||
} else {
|
||||
part = shift(shift, Arrays.copyOfRange(coordinates, offset, i+1));
|
||||
offset = i;
|
||||
}
|
||||
parts.add(part);
|
||||
}
|
||||
}
|
||||
|
||||
if(offset == 0) {
|
||||
parts.add(shift(shift, coordinates));
|
||||
} else if(offset < coordinates.length-1) {
|
||||
Coordinate[] part = Arrays.copyOfRange(coordinates, offset, coordinates.length);
|
||||
parts.add(shift(shift, part));
|
||||
}
|
||||
return parts.toArray(new Coordinate[parts.size()][]);
|
||||
}
|
||||
|
||||
private static Coordinate[] shift(double shift, Coordinate...coordinates) {
|
||||
if(shift != 0) {
|
||||
for (int j = 0; j < coordinates.length; j++) {
|
||||
coordinates[j] = new Coordinate(coordinates[j].x - 2 * shift, coordinates[j].y);
|
||||
}
|
||||
}
|
||||
return coordinates;
|
||||
}
|
||||
}
|
|
@ -1,520 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import com.spatial4j.core.exception.InvalidShapeException;
|
||||
import com.spatial4j.core.shape.Shape;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||
import com.vividsolutions.jts.geom.LinearRing;
|
||||
import com.vividsolutions.jts.geom.MultiPolygon;
|
||||
import com.vividsolutions.jts.geom.Polygon;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
|
||||
/**
|
||||
* The {@link BasePolygonBuilder} implements the groundwork to create polygons. This contains
|
||||
* Methods to wrap polygons at the dateline and building shapes from the data held by the
|
||||
* builder.
|
||||
* Since this Builder can be embedded to other builders (i.e. {@link MultiPolygonBuilder})
|
||||
* the class of the embedding builder is given by the generic argument <code>E</code>
|
||||
|
||||
* @param <E> type of the embedding class
|
||||
*/
|
||||
public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extends ShapeBuilder {
|
||||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.POLYGON;
|
||||
|
||||
// line string defining the shell of the polygon
|
||||
protected LineStringBuilder shell;
|
||||
|
||||
// List of line strings defining the holes of the polygon
|
||||
protected final ArrayList<LineStringBuilder> holes = new ArrayList<>();
|
||||
|
||||
public BasePolygonBuilder(Orientation orientation) {
|
||||
super(orientation);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private E thisRef() {
|
||||
return (E)this;
|
||||
}
|
||||
|
||||
public E point(double longitude, double latitude) {
|
||||
shell.point(longitude, latitude);
|
||||
return thisRef();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a point to the shell of the polygon
|
||||
* @param coordinate coordinate of the new point
|
||||
* @return this
|
||||
*/
|
||||
public E point(Coordinate coordinate) {
|
||||
shell.point(coordinate);
|
||||
return thisRef();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a array of points to the shell of the polygon
|
||||
* @param coordinates coordinates of the new points to add
|
||||
* @return this
|
||||
*/
|
||||
public E points(Coordinate...coordinates) {
|
||||
shell.points(coordinates);
|
||||
return thisRef();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new hole to the polygon
|
||||
* @param hole linear ring defining the hole
|
||||
* @return this
|
||||
*/
|
||||
public E hole(LineStringBuilder hole) {
|
||||
holes.add(hole);
|
||||
return thisRef();
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the shell of the polygon
|
||||
*/
|
||||
public BasePolygonBuilder close() {
|
||||
shell.close();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates only 1 vertex is tangential (shared) between the interior and exterior of a polygon
|
||||
*/
|
||||
protected void validateHole(BaseLineStringBuilder shell, BaseLineStringBuilder hole) {
|
||||
HashSet exterior = Sets.newHashSet(shell.points);
|
||||
HashSet interior = Sets.newHashSet(hole.points);
|
||||
exterior.retainAll(interior);
|
||||
if (exterior.size() >= 2) {
|
||||
throw new InvalidShapeException("Invalid polygon, interior cannot share more than one point with the exterior");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The coordinates setup by the builder will be assembled to a polygon. The result will consist of
|
||||
* a set of polygons. Each of these components holds a list of linestrings defining the polygon: the
|
||||
* first set of coordinates will be used as the shell of the polygon. The others are defined to holes
|
||||
* within the polygon.
|
||||
* This Method also wraps the polygons at the dateline. In order to this fact the result may
|
||||
* contains more polygons and less holes than defined in the builder it self.
|
||||
*
|
||||
* @return coordinates of the polygon
|
||||
*/
|
||||
public Coordinate[][][] coordinates() {
|
||||
int numEdges = shell.points.size()-1; // Last point is repeated
|
||||
for (int i = 0; i < holes.size(); i++) {
|
||||
numEdges += holes.get(i).points.size()-1;
|
||||
validateHole(shell, this.holes.get(i));
|
||||
}
|
||||
|
||||
Edge[] edges = new Edge[numEdges];
|
||||
Edge[] holeComponents = new Edge[holes.size()];
|
||||
int offset = createEdges(0, orientation, shell, null, edges, 0);
|
||||
for (int i = 0; i < holes.size(); i++) {
|
||||
int length = createEdges(i+1, orientation, shell, this.holes.get(i), edges, offset);
|
||||
holeComponents[i] = edges[offset];
|
||||
offset += length;
|
||||
}
|
||||
|
||||
int numHoles = holeComponents.length;
|
||||
|
||||
numHoles = merge(edges, 0, intersections(+DATELINE, edges), holeComponents, numHoles);
|
||||
numHoles = merge(edges, 0, intersections(-DATELINE, edges), holeComponents, numHoles);
|
||||
|
||||
return compose(edges, holeComponents, numHoles);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Shape build() {
|
||||
return jtsGeometry(buildGeometry(FACTORY, wrapdateline));
|
||||
}
|
||||
|
||||
protected XContentBuilder coordinatesArray(XContentBuilder builder, Params params) throws IOException {
|
||||
shell.coordinatesToXcontent(builder, true);
|
||||
for(BaseLineStringBuilder hole : holes) {
|
||||
hole.coordinatesToXcontent(builder, true);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FIELD_TYPE, TYPE.shapename);
|
||||
builder.startArray(FIELD_COORDINATES);
|
||||
coordinatesArray(builder, params);
|
||||
builder.endArray();
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public Geometry buildGeometry(GeometryFactory factory, boolean fixDateline) {
|
||||
if(fixDateline) {
|
||||
Coordinate[][][] polygons = coordinates();
|
||||
return polygons.length == 1
|
||||
? polygon(factory, polygons[0])
|
||||
: multipolygon(factory, polygons);
|
||||
} else {
|
||||
return toPolygon(factory);
|
||||
}
|
||||
}
|
||||
|
||||
public Polygon toPolygon() {
|
||||
return toPolygon(FACTORY);
|
||||
}
|
||||
|
||||
protected Polygon toPolygon(GeometryFactory factory) {
|
||||
final LinearRing shell = linearRing(factory, this.shell.points);
|
||||
final LinearRing[] holes = new LinearRing[this.holes.size()];
|
||||
Iterator<LineStringBuilder> iterator = this.holes.iterator();
|
||||
for (int i = 0; iterator.hasNext(); i++) {
|
||||
holes[i] = linearRing(factory, iterator.next().points);
|
||||
}
|
||||
return factory.createPolygon(shell, holes);
|
||||
}
|
||||
|
||||
protected static LinearRing linearRing(GeometryFactory factory, ArrayList<Coordinate> coordinates) {
|
||||
return factory.createLinearRing(coordinates.toArray(new Coordinate[coordinates.size()]));
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoShapeType type() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
protected static Polygon polygon(GeometryFactory factory, Coordinate[][] polygon) {
|
||||
LinearRing shell = factory.createLinearRing(polygon[0]);
|
||||
LinearRing[] holes;
|
||||
|
||||
if(polygon.length > 1) {
|
||||
holes = new LinearRing[polygon.length-1];
|
||||
for (int i = 0; i < holes.length; i++) {
|
||||
holes[i] = factory.createLinearRing(polygon[i+1]);
|
||||
}
|
||||
} else {
|
||||
holes = null;
|
||||
}
|
||||
return factory.createPolygon(shell, holes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a Multipolygon from a set of coordinates. Each primary array contains a polygon which
|
||||
* in turn contains an array of linestrings. These line Strings are represented as an array of
|
||||
* coordinates. The first linestring will be the shell of the polygon the others define holes
|
||||
* within the polygon.
|
||||
*
|
||||
* @param factory {@link GeometryFactory} to use
|
||||
* @param polygons definition of polygons
|
||||
* @return a new Multipolygon
|
||||
*/
|
||||
protected static MultiPolygon multipolygon(GeometryFactory factory, Coordinate[][][] polygons) {
|
||||
Polygon[] polygonSet = new Polygon[polygons.length];
|
||||
for (int i = 0; i < polygonSet.length; i++) {
|
||||
polygonSet[i] = polygon(factory, polygons[i]);
|
||||
}
|
||||
return factory.createMultiPolygon(polygonSet);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method sets the component id of all edges in a ring to a given id and shifts the
|
||||
* coordinates of this component according to the dateline
|
||||
*
|
||||
* @param edge An arbitrary edge of the component
|
||||
* @param id id to apply to the component
|
||||
* @param edges a list of edges to which all edges of the component will be added (could be <code>null</code>)
|
||||
* @return number of edges that belong to this component
|
||||
*/
|
||||
private static int component(final Edge edge, final int id, final ArrayList<Edge> edges) {
|
||||
// find a coordinate that is not part of the dateline
|
||||
Edge any = edge;
|
||||
while(any.coordinate.x == +DATELINE || any.coordinate.x == -DATELINE) {
|
||||
if((any = any.next) == edge) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
double shiftOffset = any.coordinate.x > DATELINE ? DATELINE : (any.coordinate.x < -DATELINE ? -DATELINE : 0);
|
||||
if (debugEnabled()) {
|
||||
LOGGER.debug("shift: {[]}", shiftOffset);
|
||||
}
|
||||
|
||||
// run along the border of the component, collect the
|
||||
// edges, shift them according to the dateline and
|
||||
// update the component id
|
||||
int length = 0, connectedComponents = 0;
|
||||
// if there are two connected components, splitIndex keeps track of where to split the edge array
|
||||
// start at 1 since the source coordinate is shared
|
||||
int splitIndex = 1;
|
||||
Edge current = edge;
|
||||
Edge prev = edge;
|
||||
// bookkeep the source and sink of each visited coordinate
|
||||
HashMap<Coordinate, Tuple<Edge, Edge>> visitedEdge = new HashMap<>();
|
||||
do {
|
||||
current.coordinate = shift(current.coordinate, shiftOffset);
|
||||
current.component = id;
|
||||
|
||||
if (edges != null) {
|
||||
// found a closed loop - we have two connected components so we need to slice into two distinct components
|
||||
if (visitedEdge.containsKey(current.coordinate)) {
|
||||
if (connectedComponents > 0 && current.next != edge) {
|
||||
throw new InvalidShapeException("Shape contains more than one shared point");
|
||||
}
|
||||
|
||||
// a negative id flags the edge as visited for the edges(...) method.
|
||||
// since we're splitting connected components, we want the edges method to visit
|
||||
// the newly separated component
|
||||
final int visitID = -id;
|
||||
Edge firstAppearance = visitedEdge.get(current.coordinate).v2();
|
||||
// correct the graph pointers by correcting the 'next' pointer for both the
|
||||
// first appearance and this appearance of the edge
|
||||
Edge temp = firstAppearance.next;
|
||||
firstAppearance.next = current.next;
|
||||
current.next = temp;
|
||||
current.component = visitID;
|
||||
// backtrack until we get back to this coordinate, setting the visit id to
|
||||
// a non-visited value (anything positive)
|
||||
do {
|
||||
prev.component = visitID;
|
||||
prev = visitedEdge.get(prev.coordinate).v1();
|
||||
++splitIndex;
|
||||
} while (!current.coordinate.equals(prev.coordinate));
|
||||
++connectedComponents;
|
||||
} else {
|
||||
visitedEdge.put(current.coordinate, new Tuple<Edge, Edge>(prev, current));
|
||||
}
|
||||
edges.add(current);
|
||||
prev = current;
|
||||
}
|
||||
length++;
|
||||
} while(connectedComponents == 0 && (current = current.next) != edge);
|
||||
|
||||
return (splitIndex != 1) ? length-splitIndex: length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute all coordinates of a component
|
||||
* @param component an arbitrary edge of the component
|
||||
* @param coordinates Array of coordinates to write the result to
|
||||
* @return the coordinates parameter
|
||||
*/
|
||||
private static Coordinate[] coordinates(Edge component, Coordinate[] coordinates) {
|
||||
for (int i = 0; i < coordinates.length; i++) {
|
||||
coordinates[i] = (component = component.next).coordinate;
|
||||
}
|
||||
return coordinates;
|
||||
}
|
||||
|
||||
private static Coordinate[][][] buildCoordinates(ArrayList<ArrayList<Coordinate[]>> components) {
|
||||
Coordinate[][][] result = new Coordinate[components.size()][][];
|
||||
for (int i = 0; i < result.length; i++) {
|
||||
ArrayList<Coordinate[]> component = components.get(i);
|
||||
result[i] = component.toArray(new Coordinate[component.size()][]);
|
||||
}
|
||||
|
||||
if(debugEnabled()) {
|
||||
for (int i = 0; i < result.length; i++) {
|
||||
LOGGER.debug("Component {[]}:", i);
|
||||
for (int j = 0; j < result[i].length; j++) {
|
||||
LOGGER.debug("\t" + Arrays.toString(result[i][j]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static final Coordinate[][] EMPTY = new Coordinate[0][];
|
||||
|
||||
private static Coordinate[][] holes(Edge[] holes, int numHoles) {
|
||||
if (numHoles == 0) {
|
||||
return EMPTY;
|
||||
}
|
||||
final Coordinate[][] points = new Coordinate[numHoles][];
|
||||
|
||||
for (int i = 0; i < numHoles; i++) {
|
||||
int length = component(holes[i], -(i+1), null); // mark as visited by inverting the sign
|
||||
points[i] = coordinates(holes[i], new Coordinate[length+1]);
|
||||
}
|
||||
|
||||
return points;
|
||||
}
|
||||
|
||||
private static Edge[] edges(Edge[] edges, int numHoles, ArrayList<ArrayList<Coordinate[]>> components) {
|
||||
ArrayList<Edge> mainEdges = new ArrayList<>(edges.length);
|
||||
|
||||
for (int i = 0; i < edges.length; i++) {
|
||||
if (edges[i].component >= 0) {
|
||||
int length = component(edges[i], -(components.size()+numHoles+1), mainEdges);
|
||||
ArrayList<Coordinate[]> component = new ArrayList<>();
|
||||
component.add(coordinates(edges[i], new Coordinate[length+1]));
|
||||
components.add(component);
|
||||
}
|
||||
}
|
||||
|
||||
return mainEdges.toArray(new Edge[mainEdges.size()]);
|
||||
}
|
||||
|
||||
private static Coordinate[][][] compose(Edge[] edges, Edge[] holes, int numHoles) {
|
||||
final ArrayList<ArrayList<Coordinate[]>> components = new ArrayList<>();
|
||||
assign(holes, holes(holes, numHoles), numHoles, edges(edges, numHoles, components), components);
|
||||
return buildCoordinates(components);
|
||||
}
|
||||
|
||||
private static void assign(Edge[] holes, Coordinate[][] points, int numHoles, Edge[] edges, ArrayList<ArrayList<Coordinate[]>> components) {
|
||||
// Assign Hole to related components
|
||||
// To find the new component the hole belongs to all intersections of the
|
||||
// polygon edges with a vertical line are calculated. This vertical line
|
||||
// is an arbitrary point of the hole. The polygon edge next to this point
|
||||
// is part of the polygon the hole belongs to.
|
||||
if (debugEnabled()) {
|
||||
LOGGER.debug("Holes: " + Arrays.toString(holes));
|
||||
}
|
||||
for (int i = 0; i < numHoles; i++) {
|
||||
final Edge current = new Edge(holes[i].coordinate, holes[i].next);
|
||||
// the edge intersects with itself at its own coordinate. We need intersect to be set this way so the binary search
|
||||
// will get the correct position in the edge list and therefore the correct component to add the hole
|
||||
current.intersect = current.coordinate;
|
||||
final int intersections = intersections(current.coordinate.x, edges);
|
||||
// if no intersection is found then the hole is not within the polygon, so
|
||||
// don't waste time calling a binary search
|
||||
final int pos;
|
||||
boolean sharedVertex = false;
|
||||
if (intersections == 0 || ((pos = Arrays.binarySearch(edges, 0, intersections, current, INTERSECTION_ORDER)) >= 0)
|
||||
&& !(sharedVertex = (edges[pos].intersect.compareTo(current.coordinate) == 0)) ) {
|
||||
throw new InvalidShapeException("Invalid shape: Hole is not within polygon");
|
||||
}
|
||||
final int index = -((sharedVertex) ? 0 : pos+2);
|
||||
final int component = -edges[index].component - numHoles - 1;
|
||||
|
||||
if(debugEnabled()) {
|
||||
LOGGER.debug("\tposition ("+index+") of edge "+current+": " + edges[index]);
|
||||
LOGGER.debug("\tComponent: " + component);
|
||||
LOGGER.debug("\tHole intersections ("+current.coordinate.x+"): " + Arrays.toString(edges));
|
||||
}
|
||||
|
||||
components.get(component).add(points[i]);
|
||||
}
|
||||
}
|
||||
|
||||
private static int merge(Edge[] intersections, int offset, int length, Edge[] holes, int numHoles) {
|
||||
// Intersections appear pairwise. On the first edge the inner of
|
||||
// of the polygon is entered. On the second edge the outer face
|
||||
// is entered. Other kinds of intersections are discard by the
|
||||
// intersection function
|
||||
|
||||
for (int i = 0; i < length; i += 2) {
|
||||
Edge e1 = intersections[offset + i + 0];
|
||||
Edge e2 = intersections[offset + i + 1];
|
||||
|
||||
// If two segments are connected maybe a hole must be deleted
|
||||
// Since Edges of components appear pairwise we need to check
|
||||
// the second edge only (the first edge is either polygon or
|
||||
// already handled)
|
||||
if (e2.component > 0) {
|
||||
//TODO: Check if we could save the set null step
|
||||
numHoles--;
|
||||
holes[e2.component-1] = holes[numHoles];
|
||||
holes[numHoles] = null;
|
||||
}
|
||||
// only connect edges if intersections are pairwise
|
||||
// 1. per the comment above, the edge array is sorted by y-value of the intersection
|
||||
// with the dateline. Two edges have the same y intercept when they cross the
|
||||
// dateline thus they appear sequentially (pairwise) in the edge array. Two edges
|
||||
// do not have the same y intercept when we're forming a multi-poly from a poly
|
||||
// that wraps the dateline (but there are 2 ordered intercepts).
|
||||
// The connect method creates a new edge for these paired edges in the linked list.
|
||||
// For boundary conditions (e.g., intersect but not crossing) there is no sibling edge
|
||||
// to connect. Thus the first logic check enforces the pairwise rule
|
||||
// 2. the second logic check ensures the two candidate edges aren't already connected by an
|
||||
// existing edge along the dateline - this is necessary due to a logic change in
|
||||
// ShapeBuilder.intersection that computes dateline edges as valid intersect points
|
||||
// in support of OGC standards
|
||||
if (e1.intersect != Edge.MAX_COORDINATE && e2.intersect != Edge.MAX_COORDINATE
|
||||
&& !(e1.next.next.coordinate.equals3D(e2.coordinate) && Math.abs(e1.next.coordinate.x) == DATELINE
|
||||
&& Math.abs(e2.coordinate.x) == DATELINE) ) {
|
||||
connect(e1, e2);
|
||||
}
|
||||
}
|
||||
return numHoles;
|
||||
}
|
||||
|
||||
private static void connect(Edge in, Edge out) {
|
||||
assert in != null && out != null;
|
||||
assert in != out;
|
||||
// Connecting two Edges by inserting the point at
|
||||
// dateline intersection and connect these by adding
|
||||
// two edges between this points. One per direction
|
||||
if(in.intersect != in.next.coordinate) {
|
||||
// NOTE: the order of the object creation is crucial here! Don't change it!
|
||||
// first edge has no point on dateline
|
||||
Edge e1 = new Edge(in.intersect, in.next);
|
||||
|
||||
if(out.intersect != out.next.coordinate) {
|
||||
// second edge has no point on dateline
|
||||
Edge e2 = new Edge(out.intersect, out.next);
|
||||
in.next = new Edge(in.intersect, e2, in.intersect);
|
||||
} else {
|
||||
// second edge intersects with dateline
|
||||
in.next = new Edge(in.intersect, out.next, in.intersect);
|
||||
}
|
||||
out.next = new Edge(out.intersect, e1, out.intersect);
|
||||
} else if (in.next != out && in.coordinate != out.intersect) {
|
||||
// first edge intersects with dateline
|
||||
Edge e2 = new Edge(out.intersect, in.next, out.intersect);
|
||||
|
||||
if(out.intersect != out.next.coordinate) {
|
||||
// second edge has no point on dateline
|
||||
Edge e1 = new Edge(out.intersect, out.next);
|
||||
in.next = new Edge(in.intersect, e1, in.intersect);
|
||||
|
||||
} else {
|
||||
// second edge intersects with dateline
|
||||
in.next = new Edge(in.intersect, out.next, in.intersect);
|
||||
}
|
||||
out.next = e2;
|
||||
}
|
||||
}
|
||||
|
||||
private static int createEdges(int component, Orientation orientation, BaseLineStringBuilder shell,
|
||||
BaseLineStringBuilder hole,
|
||||
Edge[] edges, int offset) {
|
||||
// inner rings (holes) have an opposite direction than the outer rings
|
||||
// XOR will invert the orientation for outer ring cases (Truth Table:, T/T = F, T/F = T, F/T = T, F/F = F)
|
||||
boolean direction = (component == 0 ^ orientation == Orientation.RIGHT);
|
||||
// set the points array accordingly (shell or hole)
|
||||
Coordinate[] points = (hole != null) ? hole.coordinates(false) : shell.coordinates(false);
|
||||
Edge.ring(component, direction, orientation == Orientation.LEFT, shell, points, 0, edges, offset, points.length-1);
|
||||
return points.length-1;
|
||||
}
|
||||
}
|
|
@ -57,7 +57,7 @@ public class GeometryCollectionBuilder extends ShapeBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
public GeometryCollectionBuilder line(BaseLineStringBuilder line) {
|
||||
public GeometryCollectionBuilder line(LineStringBuilder line) {
|
||||
this.shapes.add(line);
|
||||
return this;
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ public class GeometryCollectionBuilder extends ShapeBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
public GeometryCollectionBuilder polygon(BasePolygonBuilder<?> polygon) {
|
||||
public GeometryCollectionBuilder polygon(PolygonBuilder polygon) {
|
||||
this.shapes.add(polygon);
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -19,25 +19,23 @@
|
|||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class LineStringBuilder extends BaseLineStringBuilder<LineStringBuilder> {
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import com.spatial4j.core.shape.Shape;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||
import com.vividsolutions.jts.geom.LineString;
|
||||
|
||||
public LineStringBuilder() {
|
||||
this(new ArrayList<Coordinate>());
|
||||
}
|
||||
|
||||
public LineStringBuilder(ArrayList<Coordinate> points) {
|
||||
super(points);
|
||||
}
|
||||
public class LineStringBuilder extends PointCollection<LineStringBuilder> {
|
||||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.LINESTRING;
|
||||
|
||||
protected boolean translated = false;
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
|
@ -48,11 +46,6 @@ public class LineStringBuilder extends BaseLineStringBuilder<LineStringBuilder>
|
|||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoShapeType type() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the current lineString by adding the starting point as the end point
|
||||
*/
|
||||
|
@ -65,4 +58,87 @@ public class LineStringBuilder extends BaseLineStringBuilder<LineStringBuilder>
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoShapeType type() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Shape build() {
|
||||
Coordinate[] coordinates = points.toArray(new Coordinate[points.size()]);
|
||||
Geometry geometry;
|
||||
if(wrapdateline) {
|
||||
ArrayList<LineString> strings = decompose(FACTORY, coordinates, new ArrayList<LineString>());
|
||||
|
||||
if(strings.size() == 1) {
|
||||
geometry = strings.get(0);
|
||||
} else {
|
||||
LineString[] linestrings = strings.toArray(new LineString[strings.size()]);
|
||||
geometry = FACTORY.createMultiLineString(linestrings);
|
||||
}
|
||||
|
||||
} else {
|
||||
geometry = FACTORY.createLineString(coordinates);
|
||||
}
|
||||
return jtsGeometry(geometry);
|
||||
}
|
||||
|
||||
static ArrayList<LineString> decompose(GeometryFactory factory, Coordinate[] coordinates, ArrayList<LineString> strings) {
|
||||
for(Coordinate[] part : decompose(+DATELINE, coordinates)) {
|
||||
for(Coordinate[] line : decompose(-DATELINE, part)) {
|
||||
strings.add(factory.createLineString(line));
|
||||
}
|
||||
}
|
||||
return strings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decompose a linestring given as array of coordinates at a vertical line.
|
||||
*
|
||||
* @param dateline x-axis intercept of the vertical line
|
||||
* @param coordinates coordinates forming the linestring
|
||||
* @return array of linestrings given as coordinate arrays
|
||||
*/
|
||||
private static Coordinate[][] decompose(double dateline, Coordinate[] coordinates) {
|
||||
int offset = 0;
|
||||
ArrayList<Coordinate[]> parts = new ArrayList<>();
|
||||
|
||||
double shift = coordinates[0].x > DATELINE ? DATELINE : (coordinates[0].x < -DATELINE ? -DATELINE : 0);
|
||||
|
||||
for (int i = 1; i < coordinates.length; i++) {
|
||||
double t = intersection(coordinates[i-1], coordinates[i], dateline);
|
||||
if(!Double.isNaN(t)) {
|
||||
Coordinate[] part;
|
||||
if(t<1) {
|
||||
part = Arrays.copyOfRange(coordinates, offset, i+1);
|
||||
part[part.length-1] = Edge.position(coordinates[i-1], coordinates[i], t);
|
||||
coordinates[offset+i-1] = Edge.position(coordinates[i-1], coordinates[i], t);
|
||||
shift(shift, part);
|
||||
offset = i-1;
|
||||
shift = coordinates[i].x > DATELINE ? DATELINE : (coordinates[i].x < -DATELINE ? -DATELINE : 0);
|
||||
} else {
|
||||
part = shift(shift, Arrays.copyOfRange(coordinates, offset, i+1));
|
||||
offset = i;
|
||||
}
|
||||
parts.add(part);
|
||||
}
|
||||
}
|
||||
|
||||
if(offset == 0) {
|
||||
parts.add(shift(shift, coordinates));
|
||||
} else if(offset < coordinates.length-1) {
|
||||
Coordinate[] part = Arrays.copyOfRange(coordinates, offset, coordinates.length);
|
||||
parts.add(shift(shift, part));
|
||||
}
|
||||
return parts.toArray(new Coordinate[parts.size()][]);
|
||||
}
|
||||
|
||||
private static Coordinate[] shift(double shift, Coordinate...coordinates) {
|
||||
if(shift != 0) {
|
||||
for (int j = 0; j < coordinates.length; j++) {
|
||||
coordinates[j] = new Coordinate(coordinates[j].x - 2 * shift, coordinates[j].y);
|
||||
}
|
||||
}
|
||||
return coordinates;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,7 +60,7 @@ public class MultiLineStringBuilder extends ShapeBuilder {
|
|||
builder.field(FIELD_TYPE, TYPE.shapename);
|
||||
builder.field(FIELD_COORDINATES);
|
||||
builder.startArray();
|
||||
for(BaseLineStringBuilder line : lines) {
|
||||
for(LineStringBuilder line : lines) {
|
||||
line.coordinatesToXcontent(builder, false);
|
||||
}
|
||||
builder.endArray();
|
||||
|
@ -73,8 +73,8 @@ public class MultiLineStringBuilder extends ShapeBuilder {
|
|||
final Geometry geometry;
|
||||
if(wrapdateline) {
|
||||
ArrayList<LineString> parts = new ArrayList<>();
|
||||
for (BaseLineStringBuilder line : lines) {
|
||||
BaseLineStringBuilder.decompose(FACTORY, line.coordinates(false), parts);
|
||||
for (LineStringBuilder line : lines) {
|
||||
LineStringBuilder.decompose(FACTORY, line.coordinates(false), parts);
|
||||
}
|
||||
if(parts.size() == 1) {
|
||||
geometry = parts.get(0);
|
||||
|
|
|
@ -31,6 +31,7 @@ import java.util.List;
|
|||
|
||||
public class MultiPointBuilder extends PointCollection<MultiPointBuilder> {
|
||||
|
||||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.MULTIPOINT;
|
||||
|
||||
@Override
|
||||
|
|
|
@ -33,7 +33,7 @@ public class MultiPolygonBuilder extends ShapeBuilder {
|
|||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.MULTIPOLYGON;
|
||||
|
||||
protected final ArrayList<BasePolygonBuilder<?>> polygons = new ArrayList<>();
|
||||
protected final ArrayList<PolygonBuilder> polygons = new ArrayList<>();
|
||||
|
||||
public MultiPolygonBuilder() {
|
||||
this(Orientation.RIGHT);
|
||||
|
@ -43,7 +43,7 @@ public class MultiPolygonBuilder extends ShapeBuilder {
|
|||
super(orientation);
|
||||
}
|
||||
|
||||
public MultiPolygonBuilder polygon(BasePolygonBuilder<?> polygon) {
|
||||
public MultiPolygonBuilder polygon(PolygonBuilder polygon) {
|
||||
this.polygons.add(polygon);
|
||||
return this;
|
||||
}
|
||||
|
@ -53,7 +53,7 @@ public class MultiPolygonBuilder extends ShapeBuilder {
|
|||
builder.startObject();
|
||||
builder.field(FIELD_TYPE, TYPE.shapename);
|
||||
builder.startArray(FIELD_COORDINATES);
|
||||
for(BasePolygonBuilder<?> polygon : polygons) {
|
||||
for(PolygonBuilder polygon : polygons) {
|
||||
builder.startArray();
|
||||
polygon.coordinatesArray(builder, params);
|
||||
builder.endArray();
|
||||
|
@ -73,13 +73,13 @@ public class MultiPolygonBuilder extends ShapeBuilder {
|
|||
List<Shape> shapes = new ArrayList<>(this.polygons.size());
|
||||
|
||||
if(wrapdateline) {
|
||||
for (BasePolygonBuilder<?> polygon : this.polygons) {
|
||||
for (PolygonBuilder polygon : this.polygons) {
|
||||
for(Coordinate[][] part : polygon.coordinates()) {
|
||||
shapes.add(jtsGeometry(PolygonBuilder.polygon(FACTORY, part)));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (BasePolygonBuilder<?> polygon : this.polygons) {
|
||||
for (PolygonBuilder polygon : this.polygons) {
|
||||
shapes.add(jtsGeometry(polygon.toPolygon(FACTORY)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,7 +34,6 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
public abstract class PointCollection<E extends PointCollection<E>> extends ShapeBuilder {
|
||||
|
||||
protected final ArrayList<Coordinate> points;
|
||||
protected boolean translated = false;
|
||||
|
||||
protected PointCollection() {
|
||||
this(new ArrayList<Coordinate>());
|
||||
|
|
|
@ -19,11 +19,40 @@
|
|||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import com.spatial4j.core.exception.InvalidShapeException;
|
||||
import com.spatial4j.core.shape.Shape;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
import com.vividsolutions.jts.geom.GeometryFactory;
|
||||
import com.vividsolutions.jts.geom.LinearRing;
|
||||
import com.vividsolutions.jts.geom.MultiPolygon;
|
||||
import com.vividsolutions.jts.geom.Polygon;
|
||||
|
||||
public class PolygonBuilder extends BasePolygonBuilder<PolygonBuilder> {
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
|
||||
/**
|
||||
* The {@link PolygonBuilder} implements the groundwork to create polygons. This contains
|
||||
* Methods to wrap polygons at the dateline and building shapes from the data held by the
|
||||
* builder.
|
||||
*/
|
||||
public class PolygonBuilder extends ShapeBuilder {
|
||||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.POLYGON;
|
||||
|
||||
// line string defining the shell of the polygon
|
||||
private LineStringBuilder shell;
|
||||
|
||||
// List of line strings defining the holes of the polygon
|
||||
private final ArrayList<LineStringBuilder> holes = new ArrayList<>();
|
||||
|
||||
public PolygonBuilder() {
|
||||
this(new ArrayList<Coordinate>(), Orientation.RIGHT);
|
||||
|
@ -33,14 +62,460 @@ public class PolygonBuilder extends BasePolygonBuilder<PolygonBuilder> {
|
|||
this(new ArrayList<Coordinate>(), orientation);
|
||||
}
|
||||
|
||||
protected PolygonBuilder(ArrayList<Coordinate> points, Orientation orientation) {
|
||||
public PolygonBuilder(ArrayList<Coordinate> points, Orientation orientation) {
|
||||
super(orientation);
|
||||
this.shell = new LineStringBuilder(points);
|
||||
this.shell = new LineStringBuilder().points(points);
|
||||
}
|
||||
|
||||
public PolygonBuilder point(double longitude, double latitude) {
|
||||
shell.point(longitude, latitude);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a point to the shell of the polygon
|
||||
* @param coordinate coordinate of the new point
|
||||
* @return this
|
||||
*/
|
||||
public PolygonBuilder point(Coordinate coordinate) {
|
||||
shell.point(coordinate);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an array of points to the shell of the polygon
|
||||
* @param coordinates coordinates of the new points to add
|
||||
* @return this
|
||||
*/
|
||||
public PolygonBuilder points(Coordinate...coordinates) {
|
||||
shell.points(coordinates);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new hole to the polygon
|
||||
* @param hole linear ring defining the hole
|
||||
* @return this
|
||||
*/
|
||||
public PolygonBuilder hole(LineStringBuilder hole) {
|
||||
holes.add(hole);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the shell of the polygon
|
||||
*/
|
||||
public PolygonBuilder close() {
|
||||
shell.close();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates only 1 vertex is tangential (shared) between the interior and exterior of a polygon
|
||||
*/
|
||||
protected void validateHole(LineStringBuilder shell, LineStringBuilder hole) {
|
||||
HashSet<Coordinate> exterior = Sets.newHashSet(shell.points);
|
||||
HashSet<Coordinate> interior = Sets.newHashSet(hole.points);
|
||||
exterior.retainAll(interior);
|
||||
if (exterior.size() >= 2) {
|
||||
throw new InvalidShapeException("Invalid polygon, interior cannot share more than one point with the exterior");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The coordinates setup by the builder will be assembled to a polygon. The result will consist of
|
||||
* a set of polygons. Each of these components holds a list of linestrings defining the polygon: the
|
||||
* first set of coordinates will be used as the shell of the polygon. The others are defined to holes
|
||||
* within the polygon.
|
||||
* This Method also wraps the polygons at the dateline. In order to this fact the result may
|
||||
* contains more polygons and less holes than defined in the builder it self.
|
||||
*
|
||||
* @return coordinates of the polygon
|
||||
*/
|
||||
public Coordinate[][][] coordinates() {
|
||||
int numEdges = shell.points.size()-1; // Last point is repeated
|
||||
for (int i = 0; i < holes.size(); i++) {
|
||||
numEdges += holes.get(i).points.size()-1;
|
||||
validateHole(shell, this.holes.get(i));
|
||||
}
|
||||
|
||||
Edge[] edges = new Edge[numEdges];
|
||||
Edge[] holeComponents = new Edge[holes.size()];
|
||||
int offset = createEdges(0, orientation, shell, null, edges, 0);
|
||||
for (int i = 0; i < holes.size(); i++) {
|
||||
int length = createEdges(i+1, orientation, shell, this.holes.get(i), edges, offset);
|
||||
holeComponents[i] = edges[offset];
|
||||
offset += length;
|
||||
}
|
||||
|
||||
int numHoles = holeComponents.length;
|
||||
|
||||
numHoles = merge(edges, 0, intersections(+DATELINE, edges), holeComponents, numHoles);
|
||||
numHoles = merge(edges, 0, intersections(-DATELINE, edges), holeComponents, numHoles);
|
||||
|
||||
return compose(edges, holeComponents, numHoles);
|
||||
}
|
||||
|
||||
@Override
|
||||
public PolygonBuilder close() {
|
||||
super.close();
|
||||
return this;
|
||||
public Shape build() {
|
||||
return jtsGeometry(buildGeometry(FACTORY, wrapdateline));
|
||||
}
|
||||
|
||||
protected XContentBuilder coordinatesArray(XContentBuilder builder, Params params) throws IOException {
|
||||
shell.coordinatesToXcontent(builder, true);
|
||||
for(LineStringBuilder hole : holes) {
|
||||
hole.coordinatesToXcontent(builder, true);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(FIELD_TYPE, TYPE.shapename);
|
||||
builder.startArray(FIELD_COORDINATES);
|
||||
coordinatesArray(builder, params);
|
||||
builder.endArray();
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public Geometry buildGeometry(GeometryFactory factory, boolean fixDateline) {
|
||||
if(fixDateline) {
|
||||
Coordinate[][][] polygons = coordinates();
|
||||
return polygons.length == 1
|
||||
? polygon(factory, polygons[0])
|
||||
: multipolygon(factory, polygons);
|
||||
} else {
|
||||
return toPolygon(factory);
|
||||
}
|
||||
}
|
||||
|
||||
public Polygon toPolygon() {
|
||||
return toPolygon(FACTORY);
|
||||
}
|
||||
|
||||
protected Polygon toPolygon(GeometryFactory factory) {
|
||||
final LinearRing shell = linearRing(factory, this.shell.points);
|
||||
final LinearRing[] holes = new LinearRing[this.holes.size()];
|
||||
Iterator<LineStringBuilder> iterator = this.holes.iterator();
|
||||
for (int i = 0; iterator.hasNext(); i++) {
|
||||
holes[i] = linearRing(factory, iterator.next().points);
|
||||
}
|
||||
return factory.createPolygon(shell, holes);
|
||||
}
|
||||
|
||||
protected static LinearRing linearRing(GeometryFactory factory, ArrayList<Coordinate> coordinates) {
|
||||
return factory.createLinearRing(coordinates.toArray(new Coordinate[coordinates.size()]));
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoShapeType type() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
protected static Polygon polygon(GeometryFactory factory, Coordinate[][] polygon) {
|
||||
LinearRing shell = factory.createLinearRing(polygon[0]);
|
||||
LinearRing[] holes;
|
||||
|
||||
if(polygon.length > 1) {
|
||||
holes = new LinearRing[polygon.length-1];
|
||||
for (int i = 0; i < holes.length; i++) {
|
||||
holes[i] = factory.createLinearRing(polygon[i+1]);
|
||||
}
|
||||
} else {
|
||||
holes = null;
|
||||
}
|
||||
return factory.createPolygon(shell, holes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a Multipolygon from a set of coordinates. Each primary array contains a polygon which
|
||||
* in turn contains an array of linestrings. These line Strings are represented as an array of
|
||||
* coordinates. The first linestring will be the shell of the polygon the others define holes
|
||||
* within the polygon.
|
||||
*
|
||||
* @param factory {@link GeometryFactory} to use
|
||||
* @param polygons definition of polygons
|
||||
* @return a new Multipolygon
|
||||
*/
|
||||
protected static MultiPolygon multipolygon(GeometryFactory factory, Coordinate[][][] polygons) {
|
||||
Polygon[] polygonSet = new Polygon[polygons.length];
|
||||
for (int i = 0; i < polygonSet.length; i++) {
|
||||
polygonSet[i] = polygon(factory, polygons[i]);
|
||||
}
|
||||
return factory.createMultiPolygon(polygonSet);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method sets the component id of all edges in a ring to a given id and shifts the
|
||||
* coordinates of this component according to the dateline
|
||||
*
|
||||
* @param edge An arbitrary edge of the component
|
||||
* @param id id to apply to the component
|
||||
* @param edges a list of edges to which all edges of the component will be added (could be <code>null</code>)
|
||||
* @return number of edges that belong to this component
|
||||
*/
|
||||
private static int component(final Edge edge, final int id, final ArrayList<Edge> edges) {
|
||||
// find a coordinate that is not part of the dateline
|
||||
Edge any = edge;
|
||||
while(any.coordinate.x == +DATELINE || any.coordinate.x == -DATELINE) {
|
||||
if((any = any.next) == edge) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
double shiftOffset = any.coordinate.x > DATELINE ? DATELINE : (any.coordinate.x < -DATELINE ? -DATELINE : 0);
|
||||
if (debugEnabled()) {
|
||||
LOGGER.debug("shift: {[]}", shiftOffset);
|
||||
}
|
||||
|
||||
// run along the border of the component, collect the
|
||||
// edges, shift them according to the dateline and
|
||||
// update the component id
|
||||
int length = 0, connectedComponents = 0;
|
||||
// if there are two connected components, splitIndex keeps track of where to split the edge array
|
||||
// start at 1 since the source coordinate is shared
|
||||
int splitIndex = 1;
|
||||
Edge current = edge;
|
||||
Edge prev = edge;
|
||||
// bookkeep the source and sink of each visited coordinate
|
||||
HashMap<Coordinate, Tuple<Edge, Edge>> visitedEdge = new HashMap<>();
|
||||
do {
|
||||
current.coordinate = shift(current.coordinate, shiftOffset);
|
||||
current.component = id;
|
||||
|
||||
if (edges != null) {
|
||||
// found a closed loop - we have two connected components so we need to slice into two distinct components
|
||||
if (visitedEdge.containsKey(current.coordinate)) {
|
||||
if (connectedComponents > 0 && current.next != edge) {
|
||||
throw new InvalidShapeException("Shape contains more than one shared point");
|
||||
}
|
||||
|
||||
// a negative id flags the edge as visited for the edges(...) method.
|
||||
// since we're splitting connected components, we want the edges method to visit
|
||||
// the newly separated component
|
||||
final int visitID = -id;
|
||||
Edge firstAppearance = visitedEdge.get(current.coordinate).v2();
|
||||
// correct the graph pointers by correcting the 'next' pointer for both the
|
||||
// first appearance and this appearance of the edge
|
||||
Edge temp = firstAppearance.next;
|
||||
firstAppearance.next = current.next;
|
||||
current.next = temp;
|
||||
current.component = visitID;
|
||||
// backtrack until we get back to this coordinate, setting the visit id to
|
||||
// a non-visited value (anything positive)
|
||||
do {
|
||||
prev.component = visitID;
|
||||
prev = visitedEdge.get(prev.coordinate).v1();
|
||||
++splitIndex;
|
||||
} while (!current.coordinate.equals(prev.coordinate));
|
||||
++connectedComponents;
|
||||
} else {
|
||||
visitedEdge.put(current.coordinate, new Tuple<Edge, Edge>(prev, current));
|
||||
}
|
||||
edges.add(current);
|
||||
prev = current;
|
||||
}
|
||||
length++;
|
||||
} while(connectedComponents == 0 && (current = current.next) != edge);
|
||||
|
||||
return (splitIndex != 1) ? length-splitIndex: length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute all coordinates of a component
|
||||
* @param component an arbitrary edge of the component
|
||||
* @param coordinates Array of coordinates to write the result to
|
||||
* @return the coordinates parameter
|
||||
*/
|
||||
private static Coordinate[] coordinates(Edge component, Coordinate[] coordinates) {
|
||||
for (int i = 0; i < coordinates.length; i++) {
|
||||
coordinates[i] = (component = component.next).coordinate;
|
||||
}
|
||||
return coordinates;
|
||||
}
|
||||
|
||||
private static Coordinate[][][] buildCoordinates(ArrayList<ArrayList<Coordinate[]>> components) {
|
||||
Coordinate[][][] result = new Coordinate[components.size()][][];
|
||||
for (int i = 0; i < result.length; i++) {
|
||||
ArrayList<Coordinate[]> component = components.get(i);
|
||||
result[i] = component.toArray(new Coordinate[component.size()][]);
|
||||
}
|
||||
|
||||
if(debugEnabled()) {
|
||||
for (int i = 0; i < result.length; i++) {
|
||||
LOGGER.debug("Component {[]}:", i);
|
||||
for (int j = 0; j < result[i].length; j++) {
|
||||
LOGGER.debug("\t" + Arrays.toString(result[i][j]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static final Coordinate[][] EMPTY = new Coordinate[0][];
|
||||
|
||||
private static Coordinate[][] holes(Edge[] holes, int numHoles) {
|
||||
if (numHoles == 0) {
|
||||
return EMPTY;
|
||||
}
|
||||
final Coordinate[][] points = new Coordinate[numHoles][];
|
||||
|
||||
for (int i = 0; i < numHoles; i++) {
|
||||
int length = component(holes[i], -(i+1), null); // mark as visited by inverting the sign
|
||||
points[i] = coordinates(holes[i], new Coordinate[length+1]);
|
||||
}
|
||||
|
||||
return points;
|
||||
}
|
||||
|
||||
private static Edge[] edges(Edge[] edges, int numHoles, ArrayList<ArrayList<Coordinate[]>> components) {
|
||||
ArrayList<Edge> mainEdges = new ArrayList<>(edges.length);
|
||||
|
||||
for (int i = 0; i < edges.length; i++) {
|
||||
if (edges[i].component >= 0) {
|
||||
int length = component(edges[i], -(components.size()+numHoles+1), mainEdges);
|
||||
ArrayList<Coordinate[]> component = new ArrayList<>();
|
||||
component.add(coordinates(edges[i], new Coordinate[length+1]));
|
||||
components.add(component);
|
||||
}
|
||||
}
|
||||
|
||||
return mainEdges.toArray(new Edge[mainEdges.size()]);
|
||||
}
|
||||
|
||||
private static Coordinate[][][] compose(Edge[] edges, Edge[] holes, int numHoles) {
|
||||
final ArrayList<ArrayList<Coordinate[]>> components = new ArrayList<>();
|
||||
assign(holes, holes(holes, numHoles), numHoles, edges(edges, numHoles, components), components);
|
||||
return buildCoordinates(components);
|
||||
}
|
||||
|
||||
private static void assign(Edge[] holes, Coordinate[][] points, int numHoles, Edge[] edges, ArrayList<ArrayList<Coordinate[]>> components) {
|
||||
// Assign Hole to related components
|
||||
// To find the new component the hole belongs to all intersections of the
|
||||
// polygon edges with a vertical line are calculated. This vertical line
|
||||
// is an arbitrary point of the hole. The polygon edge next to this point
|
||||
// is part of the polygon the hole belongs to.
|
||||
if (debugEnabled()) {
|
||||
LOGGER.debug("Holes: " + Arrays.toString(holes));
|
||||
}
|
||||
for (int i = 0; i < numHoles; i++) {
|
||||
final Edge current = new Edge(holes[i].coordinate, holes[i].next);
|
||||
// the edge intersects with itself at its own coordinate. We need intersect to be set this way so the binary search
|
||||
// will get the correct position in the edge list and therefore the correct component to add the hole
|
||||
current.intersect = current.coordinate;
|
||||
final int intersections = intersections(current.coordinate.x, edges);
|
||||
// if no intersection is found then the hole is not within the polygon, so
|
||||
// don't waste time calling a binary search
|
||||
final int pos;
|
||||
boolean sharedVertex = false;
|
||||
if (intersections == 0 || ((pos = Arrays.binarySearch(edges, 0, intersections, current, INTERSECTION_ORDER)) >= 0)
|
||||
&& !(sharedVertex = (edges[pos].intersect.compareTo(current.coordinate) == 0)) ) {
|
||||
throw new InvalidShapeException("Invalid shape: Hole is not within polygon");
|
||||
}
|
||||
final int index = -((sharedVertex) ? 0 : pos+2);
|
||||
final int component = -edges[index].component - numHoles - 1;
|
||||
|
||||
if(debugEnabled()) {
|
||||
LOGGER.debug("\tposition ("+index+") of edge "+current+": " + edges[index]);
|
||||
LOGGER.debug("\tComponent: " + component);
|
||||
LOGGER.debug("\tHole intersections ("+current.coordinate.x+"): " + Arrays.toString(edges));
|
||||
}
|
||||
|
||||
components.get(component).add(points[i]);
|
||||
}
|
||||
}
|
||||
|
||||
private static int merge(Edge[] intersections, int offset, int length, Edge[] holes, int numHoles) {
|
||||
// Intersections appear pairwise. On the first edge the inner of
|
||||
// of the polygon is entered. On the second edge the outer face
|
||||
// is entered. Other kinds of intersections are discard by the
|
||||
// intersection function
|
||||
|
||||
for (int i = 0; i < length; i += 2) {
|
||||
Edge e1 = intersections[offset + i + 0];
|
||||
Edge e2 = intersections[offset + i + 1];
|
||||
|
||||
// If two segments are connected maybe a hole must be deleted
|
||||
// Since Edges of components appear pairwise we need to check
|
||||
// the second edge only (the first edge is either polygon or
|
||||
// already handled)
|
||||
if (e2.component > 0) {
|
||||
//TODO: Check if we could save the set null step
|
||||
numHoles--;
|
||||
holes[e2.component-1] = holes[numHoles];
|
||||
holes[numHoles] = null;
|
||||
}
|
||||
// only connect edges if intersections are pairwise
|
||||
// 1. per the comment above, the edge array is sorted by y-value of the intersection
|
||||
// with the dateline. Two edges have the same y intercept when they cross the
|
||||
// dateline thus they appear sequentially (pairwise) in the edge array. Two edges
|
||||
// do not have the same y intercept when we're forming a multi-poly from a poly
|
||||
// that wraps the dateline (but there are 2 ordered intercepts).
|
||||
// The connect method creates a new edge for these paired edges in the linked list.
|
||||
// For boundary conditions (e.g., intersect but not crossing) there is no sibling edge
|
||||
// to connect. Thus the first logic check enforces the pairwise rule
|
||||
// 2. the second logic check ensures the two candidate edges aren't already connected by an
|
||||
// existing edge along the dateline - this is necessary due to a logic change in
|
||||
// ShapeBuilder.intersection that computes dateline edges as valid intersect points
|
||||
// in support of OGC standards
|
||||
if (e1.intersect != Edge.MAX_COORDINATE && e2.intersect != Edge.MAX_COORDINATE
|
||||
&& !(e1.next.next.coordinate.equals3D(e2.coordinate) && Math.abs(e1.next.coordinate.x) == DATELINE
|
||||
&& Math.abs(e2.coordinate.x) == DATELINE) ) {
|
||||
connect(e1, e2);
|
||||
}
|
||||
}
|
||||
return numHoles;
|
||||
}
|
||||
|
||||
private static void connect(Edge in, Edge out) {
|
||||
assert in != null && out != null;
|
||||
assert in != out;
|
||||
// Connecting two Edges by inserting the point at
|
||||
// dateline intersection and connect these by adding
|
||||
// two edges between this points. One per direction
|
||||
if(in.intersect != in.next.coordinate) {
|
||||
// NOTE: the order of the object creation is crucial here! Don't change it!
|
||||
// first edge has no point on dateline
|
||||
Edge e1 = new Edge(in.intersect, in.next);
|
||||
|
||||
if(out.intersect != out.next.coordinate) {
|
||||
// second edge has no point on dateline
|
||||
Edge e2 = new Edge(out.intersect, out.next);
|
||||
in.next = new Edge(in.intersect, e2, in.intersect);
|
||||
} else {
|
||||
// second edge intersects with dateline
|
||||
in.next = new Edge(in.intersect, out.next, in.intersect);
|
||||
}
|
||||
out.next = new Edge(out.intersect, e1, out.intersect);
|
||||
} else if (in.next != out && in.coordinate != out.intersect) {
|
||||
// first edge intersects with dateline
|
||||
Edge e2 = new Edge(out.intersect, in.next, out.intersect);
|
||||
|
||||
if(out.intersect != out.next.coordinate) {
|
||||
// second edge has no point on dateline
|
||||
Edge e1 = new Edge(out.intersect, out.next);
|
||||
in.next = new Edge(in.intersect, e1, in.intersect);
|
||||
|
||||
} else {
|
||||
// second edge intersects with dateline
|
||||
in.next = new Edge(in.intersect, out.next, in.intersect);
|
||||
}
|
||||
out.next = e2;
|
||||
}
|
||||
}
|
||||
|
||||
private static int createEdges(int component, Orientation orientation, LineStringBuilder shell,
|
||||
LineStringBuilder hole,
|
||||
Edge[] edges, int offset) {
|
||||
// inner rings (holes) have an opposite direction than the outer rings
|
||||
// XOR will invert the orientation for outer ring cases (Truth Table:, T/T = F, T/F = T, F/T = T, F/F = F)
|
||||
boolean direction = (component == 0 ^ orientation == Orientation.RIGHT);
|
||||
// set the points array accordingly (shell or hole)
|
||||
Coordinate[] points = (hole != null) ? hole.coordinates(false) : shell.coordinates(false);
|
||||
Edge.ring(component, direction, orientation == Orientation.LEFT, shell, points, 0, edges, offset, points.length-1);
|
||||
return points.length-1;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -444,7 +444,7 @@ public abstract class ShapeBuilder extends ToXContentToBytes {
|
|||
* number of points
|
||||
* @return Array of edges
|
||||
*/
|
||||
protected static Edge[] ring(int component, boolean direction, boolean handedness, BaseLineStringBuilder shell,
|
||||
protected static Edge[] ring(int component, boolean direction, boolean handedness, LineStringBuilder shell,
|
||||
Coordinate[] points, int offset, Edge[] edges, int toffset, int length) {
|
||||
// calculate the direction of the points:
|
||||
// find the point a the top of the set and check its
|
||||
|
|
|
@ -50,7 +50,7 @@ public class JdkESLogger extends AbstractESLogger {
|
|||
} else if ("debug".equalsIgnoreCase(level)) {
|
||||
logger.setLevel(Level.FINE);
|
||||
} else if ("trace".equalsIgnoreCase(level)) {
|
||||
logger.setLevel(Level.FINE);
|
||||
logger.setLevel(Level.FINEST);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.common.logging.log4j;
|
|||
|
||||
import org.apache.log4j.PropertyConfigurator;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.bootstrap.BootstrapInfo;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsException;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -106,8 +107,8 @@ public class LogConfigurator {
|
|||
resolveConfig(environment, settingsBuilder);
|
||||
}
|
||||
settingsBuilder
|
||||
.putProperties("elasticsearch.", System.getProperties())
|
||||
.putProperties("es.", System.getProperties());
|
||||
.putProperties("elasticsearch.", BootstrapInfo.getSystemProperties())
|
||||
.putProperties("es.", BootstrapInfo.getSystemProperties());
|
||||
// add custom settings after config was added so that they are not overwritten by config
|
||||
settingsBuilder.put(settings);
|
||||
settingsBuilder.replacePropertyPlaceholders();
|
||||
|
|
|
@ -46,13 +46,14 @@ import java.nio.file.Path;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Dictionary;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
|
@ -1028,9 +1029,9 @@ public final class Settings implements ToXContent {
|
|||
/**
|
||||
* Sets all the provided settings.
|
||||
*/
|
||||
public Builder put(Properties properties) {
|
||||
for (Map.Entry entry : properties.entrySet()) {
|
||||
map.put((String) entry.getKey(), (String) entry.getValue());
|
||||
public Builder put(Dictionary<Object,Object> properties) {
|
||||
for (Object key : Collections.list(properties.keys())) {
|
||||
map.put(Objects.toString(key), Objects.toString(properties.get(key)));
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
@ -1096,10 +1097,10 @@ public final class Settings implements ToXContent {
|
|||
* @param properties The properties to put
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder putProperties(String prefix, Properties properties) {
|
||||
for (Object key1 : properties.keySet()) {
|
||||
String key = (String) key1;
|
||||
String value = properties.getProperty(key);
|
||||
public Builder putProperties(String prefix, Dictionary<Object,Object> properties) {
|
||||
for (Object key1 : Collections.list(properties.keys())) {
|
||||
String key = Objects.toString(key1);
|
||||
String value = Objects.toString(properties.get(key));
|
||||
if (key.startsWith(prefix)) {
|
||||
map.put(key.substring(prefix.length()), value);
|
||||
}
|
||||
|
@ -1114,10 +1115,10 @@ public final class Settings implements ToXContent {
|
|||
* @param properties The properties to put
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder putProperties(String prefix, Properties properties, String[] ignorePrefixes) {
|
||||
for (Object key1 : properties.keySet()) {
|
||||
String key = (String) key1;
|
||||
String value = properties.getProperty(key);
|
||||
public Builder putProperties(String prefix, Dictionary<Object,Object> properties, String[] ignorePrefixes) {
|
||||
for (Object key1 : Collections.list(properties.keys())) {
|
||||
String key = Objects.toString(key1);
|
||||
String value = Objects.toString(properties.get(key));
|
||||
if (key.startsWith(prefix)) {
|
||||
boolean ignore = false;
|
||||
for (String ignorePrefix : ignorePrefixes) {
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.http.netty;
|
||||
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.netty.NettyUtils;
|
||||
|
@ -138,6 +139,8 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
|
|||
protected volatile HttpServerAdapter httpServerAdapter;
|
||||
|
||||
@Inject
|
||||
@SuppressForbidden(reason = "sets org.jboss.netty.epollBugWorkaround based on netty.epollBugWorkaround")
|
||||
// TODO: why be confusing like this? just let the user do it with the netty parameter instead!
|
||||
public NettyHttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays) {
|
||||
super(settings);
|
||||
this.networkService = networkService;
|
||||
|
|
|
@ -216,7 +216,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
|
|||
}
|
||||
}
|
||||
|
||||
public synchronized IndexShard createShard(int sShardId, ShardRouting routing) throws IOException {
|
||||
public synchronized IndexShard createShard(ShardRouting routing) throws IOException {
|
||||
final boolean primary = routing.primary();
|
||||
/*
|
||||
* TODO: we execute this in parallel but it's a synced method. Yet, we might
|
||||
|
@ -224,10 +224,10 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
|
|||
* keep it synced.
|
||||
*/
|
||||
if (closed.get()) {
|
||||
throw new IllegalStateException("Can't create shard [" + index().name() + "][" + sShardId + "], closed");
|
||||
throw new IllegalStateException("Can't create shard " + routing.shardId() + ", closed");
|
||||
}
|
||||
final Settings indexSettings = this.indexSettings.getSettings();
|
||||
final ShardId shardId = new ShardId(index(), sShardId);
|
||||
final ShardId shardId = routing.shardId();
|
||||
boolean success = false;
|
||||
Store store = null;
|
||||
IndexShard indexShard = null;
|
||||
|
@ -285,6 +285,7 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
|
|||
|
||||
eventListener.indexShardStateChanged(indexShard, null, indexShard.state(), "shard created");
|
||||
eventListener.afterIndexShardCreated(indexShard);
|
||||
indexShard.updateRoutingEntry(routing, true);
|
||||
shards = newMapBuilder(shards).put(shardId.id(), indexShard).immutableMap();
|
||||
success = true;
|
||||
return indexShard;
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.elasticsearch.common.unit.Fuzziness;
|
|||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -481,6 +482,10 @@ public abstract class MappedFieldType extends FieldType {
|
|||
}
|
||||
|
||||
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
|
||||
if (numericType() != null) {
|
||||
throw new QueryShardException(context, "Cannot use regular expression to filter numeric field [" + names.fullName + "]");
|
||||
}
|
||||
|
||||
RegexpQuery query = new RegexpQuery(createTerm(value), flags, maxDeterminizedStates);
|
||||
if (method != null) {
|
||||
query.setRewriteMethod(method);
|
||||
|
|
|
@ -625,8 +625,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
|
|||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("[{}][{}] creating shard", shardRouting.index(), shardId);
|
||||
}
|
||||
IndexShard indexShard = indexService.createShard(shardId, shardRouting);
|
||||
indexShard.updateRoutingEntry(shardRouting, state.blocks().disableStatePersistence() == false);
|
||||
IndexShard indexShard = indexService.createShard(shardRouting);
|
||||
indexShard.addShardFailureCallback(failedShardHandler);
|
||||
} catch (IndexShardAlreadyExistsException e) {
|
||||
// ignore this, the method call can happen several times
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.common.xcontent.XContentBuilderString;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.lang.management.*;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -57,7 +58,7 @@ public class JvmInfo implements Streamable, ToXContent {
|
|||
JvmInfo info = new JvmInfo();
|
||||
info.pid = pid;
|
||||
info.startTime = runtimeMXBean.getStartTime();
|
||||
info.version = runtimeMXBean.getSystemProperties().get("java.version");
|
||||
info.version = System.getProperty("java.version");
|
||||
info.vmName = runtimeMXBean.getVmName();
|
||||
info.vmVendor = runtimeMXBean.getVmVendor();
|
||||
info.vmVersion = runtimeMXBean.getVmVersion();
|
||||
|
@ -84,7 +85,7 @@ public class JvmInfo implements Streamable, ToXContent {
|
|||
}
|
||||
}
|
||||
info.classPath = runtimeMXBean.getClassPath();
|
||||
info.systemProperties = runtimeMXBean.getSystemProperties();
|
||||
info.systemProperties = Collections.unmodifiableMap(runtimeMXBean.getSystemProperties());
|
||||
|
||||
List<GarbageCollectorMXBean> gcMxBeans = ManagementFactory.getGarbageCollectorMXBeans();
|
||||
info.gcCollectors = new String[gcMxBeans.size()];
|
||||
|
@ -104,6 +105,11 @@ public class JvmInfo implements Streamable, ToXContent {
|
|||
}
|
||||
|
||||
public static JvmInfo jvmInfo() {
|
||||
SecurityManager sm = System.getSecurityManager();
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new ManagementPermission("monitor"));
|
||||
sm.checkPropertyAccess("*");
|
||||
}
|
||||
return INSTANCE;
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,8 @@
|
|||
package org.elasticsearch.node.internal;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.elasticsearch.bootstrap.BootstrapInfo;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -125,11 +127,11 @@ public class InternalSettingsPreparer {
|
|||
if (useSystemProperties(input)) {
|
||||
if (loadDefaults) {
|
||||
for (String prefix : PROPERTY_DEFAULTS_PREFIXES) {
|
||||
output.putProperties(prefix, System.getProperties());
|
||||
output.putProperties(prefix, BootstrapInfo.getSystemProperties());
|
||||
}
|
||||
}
|
||||
for (String prefix : PROPERTY_PREFIXES) {
|
||||
output.putProperties(prefix, System.getProperties(), PROPERTY_DEFAULTS_PREFIXES);
|
||||
output.putProperties(prefix, BootstrapInfo.getSystemProperties(), PROPERTY_DEFAULTS_PREFIXES);
|
||||
}
|
||||
}
|
||||
output.replacePropertyPlaceholders();
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.bytes.ReleasablePagedBytesReference;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.common.compress.CompressorFactory;
|
||||
|
@ -205,6 +206,8 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
|
|||
final ScheduledPing scheduledPing;
|
||||
|
||||
@Inject
|
||||
@SuppressForbidden(reason = "sets org.jboss.netty.epollBugWorkaround based on netty.epollBugWorkaround")
|
||||
// TODO: why be confusing like this? just let the user do it with the netty parameter instead!
|
||||
public NettyTransport(Settings settings, ThreadPool threadPool, NetworkService networkService, BigArrays bigArrays, Version version, NamedWriteableRegistry namedWriteableRegistry) {
|
||||
super(settings);
|
||||
this.threadPool = threadPool;
|
||||
|
|
|
@ -48,8 +48,37 @@ grant {
|
|||
// Allow connecting to the internet anywhere
|
||||
permission java.net.SocketPermission "*", "accept,connect,resolve";
|
||||
|
||||
// Allow read/write to all system properties
|
||||
permission java.util.PropertyPermission "*", "read,write";
|
||||
// Allow read access to all system properties
|
||||
permission java.util.PropertyPermission "*", "read";
|
||||
|
||||
// TODO: clean all these property writes up, and don't allow any more in. these are all bogus!
|
||||
|
||||
// LuceneTestCase randomization (locale/timezone/cpus/ssd)
|
||||
// TODO: put these in doPrivileged and move these to test-framework.policy
|
||||
permission java.util.PropertyPermission "user.language", "write";
|
||||
permission java.util.PropertyPermission "user.timezone", "write";
|
||||
permission java.util.PropertyPermission "lucene.cms.override_core_count", "write";
|
||||
permission java.util.PropertyPermission "lucene.cms.override_spins", "write";
|
||||
// messiness in LuceneTestCase: do the above, or clean this up, or simply allow to fail if its denied
|
||||
permission java.util.PropertyPermission "solr.solr.home", "write";
|
||||
permission java.util.PropertyPermission "solr.data.dir", "write";
|
||||
permission java.util.PropertyPermission "solr.directoryFactory", "write";
|
||||
|
||||
// set by ESTestCase to improve test reproducibility
|
||||
// TODO: set this with gradle or some other way that repros with seed?
|
||||
permission java.util.PropertyPermission "es.processors.override", "write";
|
||||
// set by CLIToolTestCase
|
||||
// TODO: do this differently? or test commandline tools differently?
|
||||
permission java.util.PropertyPermission "es.default.path.home", "write";
|
||||
|
||||
// TODO: these simply trigger a noisy warning if its unable to clear the properties
|
||||
// fix that in randomizedtesting
|
||||
permission java.util.PropertyPermission "junit4.childvm.count", "write";
|
||||
permission java.util.PropertyPermission "junit4.childvm.id", "write";
|
||||
|
||||
// set by NettyTransport/NettyHttpServerTransport based on another parameter
|
||||
// TODO: look into this and decide if users should simply set the actual sysprop?!
|
||||
permission java.util.PropertyPermission "org.jboss.netty.epollBugWorkaround", "write";
|
||||
|
||||
// needed by lucene SPI currently
|
||||
permission java.lang.RuntimePermission "getClassLoader";
|
||||
|
|
|
@ -3,5 +3,5 @@
|
|||
apply from: '../../build.gradle'
|
||||
|
||||
dependencies {
|
||||
testCompile project("${projectsPrefix}:core")
|
||||
testCompile project(':core')
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
|||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.bootstrap.BootstrapForTesting;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -36,6 +37,7 @@ import static org.elasticsearch.node.NodeBuilder.nodeBuilder;
|
|||
|
||||
/**
|
||||
*/
|
||||
@SuppressForbidden(reason = "not really source code or a test")
|
||||
public class ManyMappingsBenchmark {
|
||||
|
||||
private static final String MAPPING = "{\n" +
|
||||
|
|
|
@ -19,10 +19,12 @@
|
|||
|
||||
package org.elasticsearch.benchmark.monitor.os;
|
||||
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.monitor.os.OsProbe;
|
||||
|
||||
@SuppressForbidden(reason = "not really source code or a test")
|
||||
public class OsProbeBenchmark {
|
||||
|
||||
private static final int ITERATIONS = 100_000;
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.elasticsearch.bootstrap.BootstrapForTesting;
|
|||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -46,6 +47,7 @@ import static org.elasticsearch.node.NodeBuilder.nodeBuilder;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
@SuppressForbidden(reason = "not really source code or a test")
|
||||
public class ReplicaRecoveryBenchmark {
|
||||
|
||||
private static final String INDEX_NAME = "index";
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.action.search.SearchResponse;
|
|||
import org.elasticsearch.benchmark.search.aggregations.TermsAggregationSearchBenchmark.StatsResult;
|
||||
import org.elasticsearch.bootstrap.BootstrapForTesting;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.SizeValue;
|
||||
|
@ -51,6 +52,7 @@ import static org.elasticsearch.node.NodeBuilder.nodeBuilder;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
@SuppressForbidden(reason = "not really source code or a test")
|
||||
public class GlobalOrdinalsBenchmark {
|
||||
|
||||
private static final String INDEX_NAME = "index";
|
||||
|
|
|
@ -74,18 +74,6 @@ public class JarHellTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testBootclasspathLeniency() throws Exception {
|
||||
Path dir = createTempDir();
|
||||
String previousJavaHome = System.getProperty("java.home");
|
||||
System.setProperty("java.home", dir.toString());
|
||||
URL[] jars = {makeJar(dir, "foo.jar", null, "DuplicateClass.class"), makeJar(dir, "bar.jar", null, "DuplicateClass.class")};
|
||||
try {
|
||||
JarHell.checkJarHell(jars);
|
||||
} finally {
|
||||
System.setProperty("java.home", previousJavaHome);
|
||||
}
|
||||
}
|
||||
|
||||
public void testDuplicateClasspathLeniency() throws Exception {
|
||||
Path dir = createTempDir();
|
||||
URL jar = makeJar(dir, "foo.jar", null, "Foo.class");
|
||||
|
@ -179,40 +167,6 @@ public class JarHellTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testRequiredJDKVersionIsOK() throws Exception {
|
||||
Path dir = createTempDir();
|
||||
String previousJavaVersion = System.getProperty("java.specification.version");
|
||||
System.setProperty("java.specification.version", "1.7");
|
||||
|
||||
Manifest manifest = new Manifest();
|
||||
Attributes attributes = manifest.getMainAttributes();
|
||||
attributes.put(Attributes.Name.MANIFEST_VERSION, "1.0.0");
|
||||
attributes.put(new Attributes.Name("X-Compile-Target-JDK"), "1.7");
|
||||
URL[] jars = {makeJar(dir, "foo.jar", manifest, "Foo.class")};
|
||||
try {
|
||||
JarHell.checkJarHell(jars);
|
||||
} finally {
|
||||
System.setProperty("java.specification.version", previousJavaVersion);
|
||||
}
|
||||
}
|
||||
|
||||
public void testBadJDKVersionProperty() throws Exception {
|
||||
Path dir = createTempDir();
|
||||
String previousJavaVersion = System.getProperty("java.specification.version");
|
||||
System.setProperty("java.specification.version", "bogus");
|
||||
|
||||
Manifest manifest = new Manifest();
|
||||
Attributes attributes = manifest.getMainAttributes();
|
||||
attributes.put(Attributes.Name.MANIFEST_VERSION, "1.0.0");
|
||||
attributes.put(new Attributes.Name("X-Compile-Target-JDK"), "1.7");
|
||||
URL[] jars = {makeJar(dir, "foo.jar", manifest, "Foo.class")};
|
||||
try {
|
||||
JarHell.checkJarHell(jars);
|
||||
} finally {
|
||||
System.setProperty("java.specification.version", previousJavaVersion);
|
||||
}
|
||||
}
|
||||
|
||||
public void testBadJDKVersionInJar() throws Exception {
|
||||
Path dir = createTempDir();
|
||||
Manifest manifest = new Manifest();
|
||||
|
|
|
@ -19,119 +19,14 @@
|
|||
|
||||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.FilePermission;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.PermissionCollection;
|
||||
import java.security.Permissions;
|
||||
import java.util.Set;
|
||||
|
||||
public class SecurityTests extends ESTestCase {
|
||||
|
||||
/** test generated permissions */
|
||||
public void testGeneratedPermissions() throws Exception {
|
||||
Path path = createTempDir();
|
||||
// make a fake ES home and ensure we only grant permissions to that.
|
||||
Path esHome = path.resolve("esHome");
|
||||
Settings.Builder settingsBuilder = Settings.builder();
|
||||
settingsBuilder.put("path.home", esHome.toString());
|
||||
Settings settings = settingsBuilder.build();
|
||||
|
||||
Path fakeTmpDir = createTempDir();
|
||||
String realTmpDir = System.getProperty("java.io.tmpdir");
|
||||
Permissions permissions;
|
||||
try {
|
||||
System.setProperty("java.io.tmpdir", fakeTmpDir.toString());
|
||||
Environment environment = new Environment(settings);
|
||||
permissions = Security.createPermissions(environment);
|
||||
} finally {
|
||||
System.setProperty("java.io.tmpdir", realTmpDir);
|
||||
}
|
||||
|
||||
// the fake es home
|
||||
assertNoPermissions(esHome, permissions);
|
||||
// its parent
|
||||
assertNoPermissions(esHome.getParent(), permissions);
|
||||
// some other sibling
|
||||
assertNoPermissions(esHome.getParent().resolve("other"), permissions);
|
||||
// double check we overwrote java.io.tmpdir correctly for the test
|
||||
assertNoPermissions(PathUtils.get(realTmpDir), permissions);
|
||||
}
|
||||
|
||||
/** test generated permissions for all configured paths */
|
||||
public void testEnvironmentPaths() throws Exception {
|
||||
Path path = createTempDir();
|
||||
// make a fake ES home and ensure we only grant permissions to that.
|
||||
Path esHome = path.resolve("esHome");
|
||||
|
||||
Settings.Builder settingsBuilder = Settings.builder();
|
||||
settingsBuilder.put("path.home", esHome.resolve("home").toString());
|
||||
settingsBuilder.put("path.conf", esHome.resolve("conf").toString());
|
||||
settingsBuilder.put("path.scripts", esHome.resolve("scripts").toString());
|
||||
settingsBuilder.put("path.plugins", esHome.resolve("plugins").toString());
|
||||
settingsBuilder.putArray("path.data", esHome.resolve("data1").toString(), esHome.resolve("data2").toString());
|
||||
settingsBuilder.put("path.shared_data", esHome.resolve("custom").toString());
|
||||
settingsBuilder.put("path.logs", esHome.resolve("logs").toString());
|
||||
settingsBuilder.put("pidfile", esHome.resolve("test.pid").toString());
|
||||
Settings settings = settingsBuilder.build();
|
||||
|
||||
Path fakeTmpDir = createTempDir();
|
||||
String realTmpDir = System.getProperty("java.io.tmpdir");
|
||||
Permissions permissions;
|
||||
Environment environment;
|
||||
try {
|
||||
System.setProperty("java.io.tmpdir", fakeTmpDir.toString());
|
||||
environment = new Environment(settings);
|
||||
permissions = Security.createPermissions(environment);
|
||||
} finally {
|
||||
System.setProperty("java.io.tmpdir", realTmpDir);
|
||||
}
|
||||
|
||||
// the fake es home
|
||||
assertNoPermissions(esHome, permissions);
|
||||
// its parent
|
||||
assertNoPermissions(esHome.getParent(), permissions);
|
||||
// some other sibling
|
||||
assertNoPermissions(esHome.getParent().resolve("other"), permissions);
|
||||
// double check we overwrote java.io.tmpdir correctly for the test
|
||||
assertNoPermissions(PathUtils.get(realTmpDir), permissions);
|
||||
|
||||
// check that all directories got permissions:
|
||||
|
||||
// bin file: ro
|
||||
assertExactPermissions(new FilePermission(environment.binFile().toString(), "read,readlink"), permissions);
|
||||
// lib file: ro
|
||||
assertExactPermissions(new FilePermission(environment.libFile().toString(), "read,readlink"), permissions);
|
||||
// config file: ro
|
||||
assertExactPermissions(new FilePermission(environment.configFile().toString(), "read,readlink"), permissions);
|
||||
// scripts file: ro
|
||||
assertExactPermissions(new FilePermission(environment.scriptsFile().toString(), "read,readlink"), permissions);
|
||||
// plugins: ro
|
||||
assertExactPermissions(new FilePermission(environment.pluginsFile().toString(), "read,readlink"), permissions);
|
||||
|
||||
// data paths: r/w
|
||||
for (Path dataPath : environment.dataFiles()) {
|
||||
assertExactPermissions(new FilePermission(dataPath.toString(), "read,readlink,write,delete"), permissions);
|
||||
}
|
||||
for (Path dataPath : environment.dataWithClusterFiles()) {
|
||||
assertExactPermissions(new FilePermission(dataPath.toString(), "read,readlink,write,delete"), permissions);
|
||||
}
|
||||
assertExactPermissions(new FilePermission(environment.sharedDataFile().toString(), "read,readlink,write,delete"), permissions);
|
||||
// logs: r/w
|
||||
assertExactPermissions(new FilePermission(environment.logsFile().toString(), "read,readlink,write,delete"), permissions);
|
||||
// temp dir: r/w
|
||||
assertExactPermissions(new FilePermission(fakeTmpDir.toString(), "read,readlink,write,delete"), permissions);
|
||||
// PID file: delete only (for the shutdown hook)
|
||||
assertExactPermissions(new FilePermission(environment.pidFile().toString(), "delete"), permissions);
|
||||
}
|
||||
|
||||
public void testEnsureExists() throws IOException {
|
||||
Path p = createTempDir();
|
||||
|
||||
|
@ -163,43 +58,6 @@ public class SecurityTests extends ESTestCase {
|
|||
} catch (IOException expected) {}
|
||||
}
|
||||
|
||||
public void testEnsureSymlink() throws IOException {
|
||||
Path p = createTempDir();
|
||||
|
||||
Path exists = p.resolve("exists");
|
||||
Files.createDirectory(exists);
|
||||
|
||||
// symlink
|
||||
Path linkExists = p.resolve("linkExists");
|
||||
try {
|
||||
Files.createSymbolicLink(linkExists, exists);
|
||||
} catch (UnsupportedOperationException | IOException e) {
|
||||
assumeNoException("test requires filesystem that supports symbolic links", e);
|
||||
} catch (SecurityException e) {
|
||||
assumeNoException("test cannot create symbolic links with security manager enabled", e);
|
||||
}
|
||||
Security.ensureDirectoryExists(linkExists);
|
||||
Files.createTempFile(linkExists, null, null);
|
||||
}
|
||||
|
||||
public void testEnsureBrokenSymlink() throws IOException {
|
||||
Path p = createTempDir();
|
||||
|
||||
// broken symlink
|
||||
Path brokenLink = p.resolve("brokenLink");
|
||||
try {
|
||||
Files.createSymbolicLink(brokenLink, p.resolve("nonexistent"));
|
||||
} catch (UnsupportedOperationException | IOException e) {
|
||||
assumeNoException("test requires filesystem that supports symbolic links", e);
|
||||
} catch (SecurityException e) {
|
||||
assumeNoException("test cannot create symbolic links with security manager enabled", e);
|
||||
}
|
||||
try {
|
||||
Security.ensureDirectoryExists(brokenLink);
|
||||
fail("didn't get expected exception");
|
||||
} catch (IOException expected) {}
|
||||
}
|
||||
|
||||
/** can't execute processes */
|
||||
public void testProcessExecution() throws Exception {
|
||||
assumeTrue("test requires security manager", System.getSecurityManager() != null);
|
||||
|
@ -208,61 +66,4 @@ public class SecurityTests extends ESTestCase {
|
|||
fail("didn't get expected exception");
|
||||
} catch (SecurityException expected) {}
|
||||
}
|
||||
|
||||
/** When a configured dir is a symlink, test that permissions work on link target */
|
||||
public void testSymlinkPermissions() throws IOException {
|
||||
// see https://github.com/elastic/elasticsearch/issues/12170
|
||||
assumeFalse("windows does not automatically grant permission to the target of symlinks", Constants.WINDOWS);
|
||||
Path dir = createTempDir();
|
||||
|
||||
Path target = dir.resolve("target");
|
||||
Files.createDirectory(target);
|
||||
|
||||
// symlink
|
||||
Path link = dir.resolve("link");
|
||||
try {
|
||||
Files.createSymbolicLink(link, target);
|
||||
} catch (UnsupportedOperationException | IOException e) {
|
||||
assumeNoException("test requires filesystem that supports symbolic links", e);
|
||||
} catch (SecurityException e) {
|
||||
assumeNoException("test cannot create symbolic links with security manager enabled", e);
|
||||
}
|
||||
Permissions permissions = new Permissions();
|
||||
Security.addPath(permissions, "testing", link, "read");
|
||||
assertExactPermissions(new FilePermission(link.toString(), "read"), permissions);
|
||||
assertExactPermissions(new FilePermission(link.resolve("foo").toString(), "read"), permissions);
|
||||
assertExactPermissions(new FilePermission(target.toString(), "read"), permissions);
|
||||
assertExactPermissions(new FilePermission(target.resolve("foo").toString(), "read"), permissions);
|
||||
}
|
||||
|
||||
/**
|
||||
* checks exact file permissions, meaning those and only those for that path.
|
||||
*/
|
||||
static void assertExactPermissions(FilePermission expected, PermissionCollection actual) {
|
||||
String target = expected.getName(); // see javadocs
|
||||
Set<String> permissionSet = asSet(expected.getActions().split(","));
|
||||
boolean read = permissionSet.remove("read");
|
||||
boolean readlink = permissionSet.remove("readlink");
|
||||
boolean write = permissionSet.remove("write");
|
||||
boolean delete = permissionSet.remove("delete");
|
||||
boolean execute = permissionSet.remove("execute");
|
||||
assertTrue("unrecognized permission: " + permissionSet, permissionSet.isEmpty());
|
||||
assertEquals(read, actual.implies(new FilePermission(target, "read")));
|
||||
assertEquals(readlink, actual.implies(new FilePermission(target, "readlink")));
|
||||
assertEquals(write, actual.implies(new FilePermission(target, "write")));
|
||||
assertEquals(delete, actual.implies(new FilePermission(target, "delete")));
|
||||
assertEquals(execute, actual.implies(new FilePermission(target, "execute")));
|
||||
}
|
||||
|
||||
/**
|
||||
* checks that this path has no permissions
|
||||
*/
|
||||
static void assertNoPermissions(Path path, PermissionCollection actual) {
|
||||
String target = path.toString();
|
||||
assertFalse(actual.implies(new FilePermission(target, "read")));
|
||||
assertFalse(actual.implies(new FilePermission(target, "readlink")));
|
||||
assertFalse(actual.implies(new FilePermission(target, "write")));
|
||||
assertFalse(actual.implies(new FilePermission(target, "delete")));
|
||||
assertFalse(actual.implies(new FilePermission(target, "execute")));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -413,8 +413,7 @@ public class CacheTests extends ESTestCase {
|
|||
|
||||
Value that = (Value) o;
|
||||
|
||||
return value == that.value;
|
||||
|
||||
return value.equals(that.value);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -633,7 +632,7 @@ public class CacheTests extends ESTestCase {
|
|||
first = random.nextBoolean();
|
||||
second = random.nextBoolean();
|
||||
} while (first && second);
|
||||
if (first && !second) {
|
||||
if (first) {
|
||||
try {
|
||||
cache.computeIfAbsent(key, k -> {
|
||||
if (random.nextBoolean()) {
|
||||
|
@ -647,9 +646,9 @@ public class CacheTests extends ESTestCase {
|
|||
assertThat(e.getCause(), instanceOf(Exception.class));
|
||||
assertEquals(e.getCause().getMessage(), "testCachePollution");
|
||||
}
|
||||
} else if (!first && second) {
|
||||
} else if (second) {
|
||||
cache.invalidate(key);
|
||||
} else if (!first && !second) {
|
||||
} else {
|
||||
cache.get(key);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -91,6 +91,20 @@ public class JDKESLoggerTests extends ESTestCase {
|
|||
assertThat(record.getSourceMethodName(), equalTo("testLocationInfoTest"));
|
||||
}
|
||||
|
||||
public void testSetLogLevelString() {
|
||||
// verify the string based level-setters
|
||||
esTestLogger.setLevel("error");
|
||||
assertThat(esTestLogger.getLevel(), equalTo("SEVERE"));
|
||||
esTestLogger.setLevel("warn");
|
||||
assertThat(esTestLogger.getLevel(), equalTo("WARNING"));
|
||||
esTestLogger.setLevel("info");
|
||||
assertThat(esTestLogger.getLevel(), equalTo("INFO"));
|
||||
esTestLogger.setLevel("debug");
|
||||
assertThat(esTestLogger.getLevel(), equalTo("FINE"));
|
||||
esTestLogger.setLevel("trace");
|
||||
assertThat(esTestLogger.getLevel(), equalTo("FINEST"));
|
||||
}
|
||||
|
||||
private static class TestHandler extends Handler {
|
||||
|
||||
private List<LogRecord> records = new ArrayList<>();
|
||||
|
|
|
@ -67,25 +67,23 @@ public class SettingsTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testReplacePropertiesPlaceholderSystemProperty() {
|
||||
System.setProperty("sysProp1", "sysVal1");
|
||||
try {
|
||||
Settings settings = settingsBuilder()
|
||||
.put("setting1", "${sysProp1}")
|
||||
.replacePropertyPlaceholders()
|
||||
.build();
|
||||
assertThat(settings.get("setting1"), equalTo("sysVal1"));
|
||||
} finally {
|
||||
System.clearProperty("sysProp1");
|
||||
}
|
||||
|
||||
String value = System.getProperty("java.home");
|
||||
assertFalse(value.isEmpty());
|
||||
Settings settings = settingsBuilder()
|
||||
.put("setting1", "${sysProp1:defaultVal1}")
|
||||
.put("setting1", "${java.home}")
|
||||
.replacePropertyPlaceholders()
|
||||
.build();
|
||||
assertThat(settings.get("setting1"), equalTo(value));
|
||||
|
||||
assertNull(System.getProperty("_test_property_should_not_exist"));
|
||||
settings = settingsBuilder()
|
||||
.put("setting1", "${_test_property_should_not_exist:defaultVal1}")
|
||||
.replacePropertyPlaceholders()
|
||||
.build();
|
||||
assertThat(settings.get("setting1"), equalTo("defaultVal1"));
|
||||
|
||||
settings = settingsBuilder()
|
||||
.put("setting1", "${sysProp1:}")
|
||||
.put("setting1", "${_test_property_should_not_exist:}")
|
||||
.replacePropertyPlaceholders()
|
||||
.build();
|
||||
assertThat(settings.get("setting1"), is(nullValue()));
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.fieldstats;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.fieldstats.FieldStats;
|
||||
import org.elasticsearch.action.fieldstats.FieldStatsResponse;
|
||||
|
@ -47,12 +48,12 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
|||
));
|
||||
ensureGreen("test");
|
||||
|
||||
byte minByte = Byte.MAX_VALUE;
|
||||
byte maxByte = Byte.MIN_VALUE;
|
||||
short minShort = Short.MAX_VALUE;
|
||||
short maxShort = Short.MIN_VALUE;
|
||||
int minInt = Integer.MAX_VALUE;
|
||||
int maxInt = Integer.MIN_VALUE;
|
||||
long minByte = Byte.MAX_VALUE;
|
||||
long maxByte = Byte.MIN_VALUE;
|
||||
long minShort = Short.MAX_VALUE;
|
||||
long maxShort = Short.MIN_VALUE;
|
||||
long minInt = Integer.MAX_VALUE;
|
||||
long maxInt = Integer.MIN_VALUE;
|
||||
long minLong = Long.MAX_VALUE;
|
||||
long maxLong = Long.MIN_VALUE;
|
||||
float minFloat = Float.MAX_VALUE;
|
||||
|
@ -66,11 +67,11 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
|||
List<IndexRequestBuilder> request = new ArrayList<>(numDocs);
|
||||
for (int doc = 0; doc < numDocs; doc++) {
|
||||
byte b = randomByte();
|
||||
minByte = (byte) Math.min(minByte, b);
|
||||
maxByte = (byte) Math.max(maxByte, b);
|
||||
minByte = Math.min(minByte, b);
|
||||
maxByte = Math.max(maxByte, b);
|
||||
short s = randomShort();
|
||||
minShort = (short) Math.min(minShort, s);
|
||||
maxShort = (short) Math.max(maxShort, s);
|
||||
minShort = Math.min(minShort, s);
|
||||
maxShort = Math.max(maxShort, s);
|
||||
int i = randomInt();
|
||||
minInt = Math.min(minInt, i);
|
||||
maxInt = Math.max(maxInt, i);
|
||||
|
@ -106,18 +107,18 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
|||
assertThat(stats.getDensity(), equalTo(100));
|
||||
}
|
||||
|
||||
assertThat(response.getAllFieldStats().get("byte").getMinValue(), equalTo(Byte.toString(minByte)));
|
||||
assertThat(response.getAllFieldStats().get("byte").getMaxValue(), equalTo(Byte.toString(maxByte)));
|
||||
assertThat(response.getAllFieldStats().get("short").getMinValue(), equalTo(Short.toString(minShort)));
|
||||
assertThat(response.getAllFieldStats().get("short").getMaxValue(), equalTo(Short.toString(maxShort)));
|
||||
assertThat(response.getAllFieldStats().get("integer").getMinValue(), equalTo(Integer.toString(minInt)));
|
||||
assertThat(response.getAllFieldStats().get("integer").getMaxValue(), equalTo(Integer.toString(maxInt)));
|
||||
assertThat(response.getAllFieldStats().get("long").getMinValue(), equalTo(Long.toString(minLong)));
|
||||
assertThat(response.getAllFieldStats().get("long").getMaxValue(), equalTo(Long.toString(maxLong)));
|
||||
assertThat(response.getAllFieldStats().get("float").getMinValue(), equalTo(Float.toString(minFloat)));
|
||||
assertThat(response.getAllFieldStats().get("float").getMaxValue(), equalTo(Float.toString(maxFloat)));
|
||||
assertThat(response.getAllFieldStats().get("double").getMinValue(), equalTo(Double.toString(minDouble)));
|
||||
assertThat(response.getAllFieldStats().get("double").getMaxValue(), equalTo(Double.toString(maxDouble)));
|
||||
assertThat(response.getAllFieldStats().get("byte").getMinValue(), equalTo(minByte));
|
||||
assertThat(response.getAllFieldStats().get("byte").getMaxValue(), equalTo(maxByte));
|
||||
assertThat(response.getAllFieldStats().get("short").getMinValue(), equalTo(minShort));
|
||||
assertThat(response.getAllFieldStats().get("short").getMaxValue(), equalTo(maxShort));
|
||||
assertThat(response.getAllFieldStats().get("integer").getMinValue(), equalTo(minInt));
|
||||
assertThat(response.getAllFieldStats().get("integer").getMaxValue(), equalTo(maxInt));
|
||||
assertThat(response.getAllFieldStats().get("long").getMinValue(), equalTo(minLong));
|
||||
assertThat(response.getAllFieldStats().get("long").getMaxValue(), equalTo(maxLong));
|
||||
assertThat(response.getAllFieldStats().get("float").getMinValue(), equalTo(minFloat));
|
||||
assertThat(response.getAllFieldStats().get("float").getMaxValue(), equalTo(maxFloat));
|
||||
assertThat(response.getAllFieldStats().get("double").getMinValue(), equalTo(minDouble));
|
||||
assertThat(response.getAllFieldStats().get("double").getMaxValue(), equalTo(maxDouble));
|
||||
}
|
||||
|
||||
public void testFieldStatsIndexLevel() throws Exception {
|
||||
|
@ -139,32 +140,32 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
|||
// default:
|
||||
FieldStatsResponse response = client().prepareFieldStats().setFields("value").get();
|
||||
assertAllSuccessful(response);
|
||||
assertThat(response.getAllFieldStats().get("value").getMinValue(), equalTo(Long.toString(-10)));
|
||||
assertThat(response.getAllFieldStats().get("value").getMaxValue(), equalTo(Long.toString(300)));
|
||||
assertThat(response.getAllFieldStats().get("value").getMinValue(), equalTo(-10l));
|
||||
assertThat(response.getAllFieldStats().get("value").getMaxValue(), equalTo(300l));
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMinValue(), equalTo(Long.toString(-10)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMaxValue(), equalTo(Long.toString(300)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMinValue(), equalTo(-10l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMaxValue(), equalTo(300l));
|
||||
|
||||
// Level: cluster
|
||||
response = client().prepareFieldStats().setFields("value").setLevel("cluster").get();
|
||||
assertAllSuccessful(response);
|
||||
assertThat(response.getAllFieldStats().get("value").getMinValue(), equalTo(Long.toString(-10)));
|
||||
assertThat(response.getAllFieldStats().get("value").getMaxValue(), equalTo(Long.toString(300)));
|
||||
assertThat(response.getAllFieldStats().get("value").getMinValue(), equalTo(-10l));
|
||||
assertThat(response.getAllFieldStats().get("value").getMaxValue(), equalTo(300l));
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMinValue(), equalTo(Long.toString(-10)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMaxValue(), equalTo(Long.toString(300)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMinValue(), equalTo(-10l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMaxValue(), equalTo(300l));
|
||||
|
||||
// Level: indices
|
||||
response = client().prepareFieldStats().setFields("value").setLevel("indices").get();
|
||||
assertAllSuccessful(response);
|
||||
assertThat(response.getAllFieldStats(), nullValue());
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(3));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(Long.toString(-10)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(Long.toString(100)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(Long.toString(101)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(Long.toString(200)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(Long.toString(201)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(Long.toString(300)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(-10l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(100l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300l));
|
||||
|
||||
// Illegal level option:
|
||||
try {
|
||||
|
@ -200,10 +201,10 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
|||
FieldStatsResponse response = client().prepareFieldStats().setFields("value").setLevel("indices").get();
|
||||
assertAllSuccessful(response);
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(Long.toString(1)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(Long.toString(2)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo("a"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo("b"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(2l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(new BytesRef("a")));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(new BytesRef("b")));
|
||||
}
|
||||
|
||||
public void testFieldStatsFiltering() throws Exception {
|
||||
|
@ -230,8 +231,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
|||
assertAllSuccessful(response);
|
||||
assertThat(response.getAllFieldStats(), nullValue());
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(Long.toString(201)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(Long.toString(300)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300l));
|
||||
|
||||
response = client().prepareFieldStats()
|
||||
.setFields("value")
|
||||
|
@ -241,10 +242,10 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
|||
assertAllSuccessful(response);
|
||||
assertThat(response.getAllFieldStats(), nullValue());
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(Long.toString(-10)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(Long.toString(100)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(Long.toString(101)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(Long.toString(200)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(-10l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMaxValue(), equalTo(100l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200l));
|
||||
|
||||
response = client().prepareFieldStats()
|
||||
.setFields("value")
|
||||
|
@ -254,10 +255,10 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
|||
assertAllSuccessful(response);
|
||||
assertThat(response.getAllFieldStats(), nullValue());
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(Long.toString(101)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(Long.toString(200)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(Long.toString(201)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(Long.toString(300)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300l));
|
||||
|
||||
response = client().prepareFieldStats()
|
||||
.setFields("value")
|
||||
|
@ -285,8 +286,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
|||
assertAllSuccessful(response);
|
||||
assertThat(response.getAllFieldStats(), nullValue());
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(Long.toString(101)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(Long.toString(200)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(101l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200l));
|
||||
|
||||
response = client().prepareFieldStats()
|
||||
.setFields("value")
|
||||
|
@ -296,8 +297,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
|||
assertAllSuccessful(response);
|
||||
assertThat(response.getAllFieldStats(), nullValue());
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(Long.toString(201)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(Long.toString(300)));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300l));
|
||||
}
|
||||
|
||||
public void testIncompatibleFilter() throws Exception {
|
||||
|
|
|
@ -19,10 +19,12 @@
|
|||
|
||||
package org.elasticsearch.fieldstats;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.action.fieldstats.FieldStats;
|
||||
import org.elasticsearch.action.fieldstats.FieldStatsResponse;
|
||||
import org.elasticsearch.action.fieldstats.IndexConstraint;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
@ -75,8 +77,10 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
assertThat(result.getAllFieldStats().get("field").getMaxDoc(), equalTo(11l));
|
||||
assertThat(result.getAllFieldStats().get("field").getDocCount(), equalTo(11l));
|
||||
assertThat(result.getAllFieldStats().get("field").getDensity(), equalTo(100));
|
||||
assertThat(result.getAllFieldStats().get("field").getMinValue(), equalTo(String.format(Locale.ENGLISH, "%03d", 0)));
|
||||
assertThat(result.getAllFieldStats().get("field").getMaxValue(), equalTo(String.format(Locale.ENGLISH, "%03d", 10)));
|
||||
assertThat(result.getAllFieldStats().get("field").getMinValue(), equalTo(new BytesRef(String.format(Locale.ENGLISH, "%03d", 0))));
|
||||
assertThat(result.getAllFieldStats().get("field").getMaxValue(), equalTo(new BytesRef(String.format(Locale.ENGLISH, "%03d", 10))));
|
||||
assertThat(result.getAllFieldStats().get("field").getMinValueAsString(), equalTo(String.format(Locale.ENGLISH, "%03d", 0)));
|
||||
assertThat(result.getAllFieldStats().get("field").getMaxValueAsString(), equalTo(String.format(Locale.ENGLISH, "%03d", 10)));
|
||||
}
|
||||
|
||||
public void testDouble() {
|
||||
|
@ -91,8 +95,9 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
assertThat(result.getAllFieldStats().get(fieldName).getMaxDoc(), equalTo(11l));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getDocCount(), equalTo(11l));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getDensity(), equalTo(100));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(Double.toString(-1)));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(Double.toString(9)));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(-1d));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(9d));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMinValueAsString(), equalTo(Double.toString(-1)));
|
||||
}
|
||||
|
||||
public void testFloat() {
|
||||
|
@ -107,8 +112,10 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
assertThat(result.getAllFieldStats().get(fieldName).getMaxDoc(), equalTo(11l));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getDocCount(), equalTo(11l));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getDensity(), equalTo(100));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(Float.toString(-1)));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(Float.toString(9)));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(-1.0));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(9.0));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMinValueAsString(), equalTo(Float.toString(-1)));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMaxValueAsString(), equalTo(Float.toString(9)));
|
||||
}
|
||||
|
||||
private void testNumberRange(String fieldName, String fieldType, long min, long max) {
|
||||
|
@ -123,8 +130,10 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
assertThat(result.getAllFieldStats().get(fieldName).getMaxDoc(), equalTo(numDocs));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getDocCount(), equalTo(numDocs));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getDensity(), equalTo(100));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(java.lang.Long.toString(min)));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(java.lang.Long.toString(max)));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(min));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(max));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMinValueAsString(), equalTo(java.lang.Long.toString(min)));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMaxValueAsString(), equalTo(java.lang.Long.toString(max)));
|
||||
client().admin().indices().prepareDelete("test").get();
|
||||
}
|
||||
|
||||
|
@ -193,15 +202,15 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
assertThat(result.getTotalShards(), equalTo(2));
|
||||
assertThat(result.getSuccessfulShards(), equalTo(1));
|
||||
assertThat(result.getShardFailures()[0].reason(), either(containsString("field [field1] doesn't exist")).or(containsString("field [field2] doesn't exist")));
|
||||
assertThat(result.getIndicesMergedFieldStats().get("_all").get("field1").getMinValue(), equalTo("a"));
|
||||
assertThat(result.getIndicesMergedFieldStats().get("_all").get("field1").getMaxValue(), equalTo("b"));
|
||||
assertThat(result.getIndicesMergedFieldStats().get("_all").get("field1").getMinValueAsString(), equalTo("a"));
|
||||
assertThat(result.getIndicesMergedFieldStats().get("_all").get("field1").getMaxValueAsString(), equalTo("b"));
|
||||
}
|
||||
|
||||
public void testNumberFiltering() {
|
||||
createIndex("test1", Settings.EMPTY, "type", "value", "type=long");
|
||||
client().prepareIndex("test1", "test").setSource("value", 1).get();
|
||||
client().prepareIndex("test1", "test").setSource("value", 1l).get();
|
||||
createIndex("test2", Settings.EMPTY, "type", "value", "type=long");
|
||||
client().prepareIndex("test2", "test").setSource("value", 3).get();
|
||||
client().prepareIndex("test2", "test").setSource("value", 3l).get();
|
||||
client().admin().indices().prepareRefresh().get();
|
||||
|
||||
FieldStatsResponse response = client().prepareFieldStats()
|
||||
|
@ -209,8 +218,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
.setLevel("indices")
|
||||
.get();
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo("1"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo("3"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(3l));
|
||||
|
||||
response = client().prepareFieldStats()
|
||||
.setFields("value")
|
||||
|
@ -232,7 +241,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
.setLevel("indices")
|
||||
.get();
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo("1"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1l));
|
||||
|
||||
response = client().prepareFieldStats()
|
||||
.setFields("value")
|
||||
|
@ -240,7 +249,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
.setLevel("indices")
|
||||
.get();
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo("1"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1l));
|
||||
|
||||
response = client().prepareFieldStats()
|
||||
.setFields("value")
|
||||
|
@ -255,7 +264,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
.setLevel("indices")
|
||||
.get();
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo("3"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(3l));
|
||||
|
||||
response = client().prepareFieldStats()
|
||||
.setFields("value")
|
||||
|
@ -263,7 +272,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
.setLevel("indices")
|
||||
.get();
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo("3"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(3l));
|
||||
|
||||
response = client().prepareFieldStats()
|
||||
.setFields("value")
|
||||
|
@ -278,8 +287,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
.setLevel("indices")
|
||||
.get();
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo("1"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo("3"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(1l));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(3l));
|
||||
|
||||
response = client().prepareFieldStats()
|
||||
.setFields("value")
|
||||
|
@ -290,10 +299,15 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
public void testDateFiltering() {
|
||||
DateTime dateTime1 = new DateTime(2014, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC);
|
||||
String dateTime1Str = DateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().print(dateTime1);
|
||||
DateTime dateTime2 = new DateTime(2014, 1, 2, 0, 0, 0, 0, DateTimeZone.UTC);
|
||||
String dateTime2Str = DateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().print(dateTime2);
|
||||
|
||||
createIndex("test1", Settings.EMPTY, "type", "value", "type=date");
|
||||
client().prepareIndex("test1", "test").setSource("value", "2014-01-01T00:00:00.000Z").get();
|
||||
client().prepareIndex("test1", "test").setSource("value", dateTime1Str).get();
|
||||
createIndex("test2", Settings.EMPTY, "type", "value", "type=date");
|
||||
client().prepareIndex("test2", "test").setSource("value", "2014-01-02T00:00:00.000Z").get();
|
||||
client().prepareIndex("test2", "test").setSource("value", dateTime2Str).get();
|
||||
client().admin().indices().prepareRefresh().get();
|
||||
|
||||
FieldStatsResponse response = client().prepareFieldStats()
|
||||
|
@ -301,8 +315,10 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
.setLevel("indices")
|
||||
.get();
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo("2014-01-01T00:00:00.000Z"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo("2014-01-02T00:00:00.000Z"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(dateTime1.getMillis()));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(dateTime2.getMillis()));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValueAsString(), equalTo(dateTime1Str));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(), equalTo(dateTime2Str));
|
||||
|
||||
response = client().prepareFieldStats()
|
||||
.setFields("value")
|
||||
|
@ -317,7 +333,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
.setLevel("indices")
|
||||
.get();
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo("2014-01-01T00:00:00.000Z"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(dateTime1.getMillis()));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValueAsString(), equalTo(dateTime1Str));
|
||||
|
||||
response = client().prepareFieldStats()
|
||||
.setFields("value")
|
||||
|
@ -325,7 +342,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
.setLevel("indices")
|
||||
.get();
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo("2014-01-02T00:00:00.000Z"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(dateTime2.getMillis()));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(), equalTo(dateTime2Str));
|
||||
|
||||
response = client().prepareFieldStats()
|
||||
.setFields("value")
|
||||
|
@ -340,7 +358,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
.setLevel("indices")
|
||||
.get();
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo("2014-01-02T00:00:00.000Z"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(dateTime2.getMillis()));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(), equalTo(dateTime2Str));
|
||||
|
||||
response = client().prepareFieldStats()
|
||||
.setFields("value")
|
||||
|
@ -348,8 +367,10 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
.setLevel("indices")
|
||||
.get();
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo("2014-01-01T00:00:00.000Z"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo("2014-01-02T00:00:00.000Z"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(dateTime1.getMillis()));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(dateTime2.getMillis()));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValueAsString(), equalTo(dateTime1Str));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(), equalTo(dateTime2Str));
|
||||
|
||||
response = client().prepareFieldStats()
|
||||
.setFields("value")
|
||||
|
@ -357,8 +378,10 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
.setLevel("indices")
|
||||
.get();
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(2));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo("2014-01-01T00:00:00.000Z"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo("2014-01-02T00:00:00.000Z"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValue(), equalTo(dateTime1.getMillis()));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo(dateTime2.getMillis()));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValueAsString(), equalTo(dateTime1Str));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(), equalTo(dateTime2Str));
|
||||
}
|
||||
|
||||
public void testDateFiltering_optionalFormat() {
|
||||
|
@ -376,7 +399,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
.setLevel("indices")
|
||||
.get();
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValue(), equalTo("2014-01-02T00:00:00.000Z"));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(), equalTo("2014-01-02T00:00:00.000Z"));
|
||||
|
||||
try {
|
||||
client().prepareFieldStats()
|
||||
|
@ -404,8 +427,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
|||
.setIndexContraints(new IndexConstraint("value", MIN, GTE, "1998-01-01T00:00:00.000Z"))
|
||||
.setLevel("indices")
|
||||
.get();
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1));
|
||||
assertThat(response.getIndicesMergedFieldStats().get("test1").size(), equalTo(0));
|
||||
assertThat(response.getIndicesMergedFieldStats().size(), equalTo(0));
|
||||
}
|
||||
|
||||
}
|
|
@ -20,7 +20,10 @@ package org.elasticsearch.index.shard;
|
|||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexCommit;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
|
@ -80,18 +83,13 @@ import org.elasticsearch.index.translog.Translog;
|
|||
import org.elasticsearch.index.translog.TranslogConfig;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.indices.recovery.RecoveryState;
|
||||
import org.elasticsearch.test.DummyShardLock;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.IndexSettingsModule;
|
||||
import org.elasticsearch.test.FieldMaskingReader;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.elasticsearch.test.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.BrokenBarrierException;
|
||||
|
@ -779,7 +777,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
ShardRouting routing = new ShardRouting(shard.routingEntry());
|
||||
test.removeShard(0, "b/c simon says so");
|
||||
ShardRoutingHelper.reinit(routing);
|
||||
IndexShard newShard = test.createShard(0, routing);
|
||||
IndexShard newShard = test.createShard(routing);
|
||||
newShard.updateRoutingEntry(routing, false);
|
||||
DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, Version.CURRENT);
|
||||
newShard.markAsRecovering("store", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.STORE, localNode, localNode));
|
||||
|
@ -811,7 +809,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
Lucene.cleanLuceneIndex(store.directory());
|
||||
store.decRef();
|
||||
ShardRoutingHelper.reinit(routing);
|
||||
IndexShard newShard = test.createShard(0, routing);
|
||||
IndexShard newShard = test.createShard(routing);
|
||||
newShard.updateRoutingEntry(routing, false);
|
||||
newShard.markAsRecovering("store", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.STORE, localNode, localNode));
|
||||
try {
|
||||
|
@ -831,7 +829,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
// OK!
|
||||
}
|
||||
test.removeShard(0, "I broken it");
|
||||
newShard = test.createShard(0, routing);
|
||||
newShard = test.createShard(routing);
|
||||
newShard.updateRoutingEntry(routing, false);
|
||||
newShard.markAsRecovering("store", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.STORE, localNode, localNode));
|
||||
assertTrue("recover even if there is nothing to recover", newShard.recoverFromStore(localNode));
|
||||
|
@ -869,7 +867,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
ShardRoutingHelper.reinit(routing);
|
||||
routing = ShardRoutingHelper.newWithRestoreSource(routing, new RestoreSource(new SnapshotId("foo", "bar"), Version.CURRENT, "test"));
|
||||
test_target.removeShard(0, "just do it man!");
|
||||
final IndexShard test_target_shard = test_target.createShard(0, routing);
|
||||
final IndexShard test_target_shard = test_target.createShard(routing);
|
||||
Store sourceStore = test_shard.store();
|
||||
Store targetStore = test_target_shard.store();
|
||||
|
||||
|
|
|
@ -97,7 +97,7 @@ public class IndicesLifecycleListenerSingleNodeTests extends ESSingleNodeTestCas
|
|||
String nodeId = newRouting.currentNodeId();
|
||||
ShardRoutingHelper.moveToUnassigned(newRouting, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "boom"));
|
||||
ShardRoutingHelper.initialize(newRouting, nodeId);
|
||||
IndexShard shard = index.createShard(0, newRouting);
|
||||
IndexShard shard = index.createShard(newRouting);
|
||||
shard.updateRoutingEntry(newRouting, true);
|
||||
final DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, Version.CURRENT);
|
||||
shard.markAsRecovering("store", new RecoveryState(shard.shardId(), newRouting.primary(), RecoveryState.Type.SNAPSHOT, newRouting.restoreSource(), localNode));
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.node.internal;
|
|||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.common.cli.CliToolTestCase;
|
||||
import org.elasticsearch.common.cli.Terminal;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsException;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -35,9 +34,7 @@ import java.io.InputStream;
|
|||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
|
@ -45,38 +42,8 @@ import static org.hamcrest.Matchers.*;
|
|||
|
||||
public class InternalSettingsPreparerTests extends ESTestCase {
|
||||
|
||||
Map<String, String> savedProperties = new HashMap<>();
|
||||
Settings baseEnvSettings;
|
||||
|
||||
@Before
|
||||
public void saveSettingsSystemProperties() {
|
||||
// clear out any properties the settings preparer may look for
|
||||
savedProperties.clear();
|
||||
for (Object propObj : System.getProperties().keySet()) {
|
||||
String property = (String)propObj;
|
||||
// NOTE: these prefixes are prefixes of the defaults, so both are handled here
|
||||
for (String prefix : InternalSettingsPreparer.PROPERTY_PREFIXES) {
|
||||
if (property.startsWith(prefix)) {
|
||||
savedProperties.put(property, System.getProperty(property));
|
||||
}
|
||||
}
|
||||
}
|
||||
String name = System.getProperty("name");
|
||||
if (name != null) {
|
||||
savedProperties.put("name", name);
|
||||
}
|
||||
for (String property : savedProperties.keySet()) {
|
||||
System.clearProperty(property);
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void restoreSettingsSystemProperties() {
|
||||
for (Map.Entry<String, String> property : savedProperties.entrySet()) {
|
||||
System.setProperty(property.getKey(), property.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
@Before
|
||||
public void createBaseEnvSettings() {
|
||||
baseEnvSettings = settingsBuilder()
|
||||
|
@ -93,13 +60,13 @@ public class InternalSettingsPreparerTests extends ESTestCase {
|
|||
Settings settings = InternalSettingsPreparer.prepareSettings(Settings.EMPTY);
|
||||
assertNotNull(settings.get("name")); // a name was set
|
||||
assertNotNull(settings.get(ClusterName.SETTING)); // a cluster name was set
|
||||
assertEquals(settings.toString(), 2, settings.names().size());
|
||||
int size = settings.names().size();
|
||||
|
||||
Environment env = InternalSettingsPreparer.prepareEnvironment(baseEnvSettings, null);
|
||||
settings = env.settings();
|
||||
assertNotNull(settings.get("name")); // a name was set
|
||||
assertNotNull(settings.get(ClusterName.SETTING)); // a cluster name was set
|
||||
assertEquals(settings.toString(), 3 /* path.home is in the base settings */, settings.names().size());
|
||||
assertEquals(settings.toString(), size + 1 /* path.home is in the base settings */, settings.names().size());
|
||||
String home = baseEnvSettings.get("path.home");
|
||||
String configDir = env.configFile().toString();
|
||||
assertTrue(configDir, configDir.startsWith(home));
|
||||
|
@ -112,30 +79,6 @@ public class InternalSettingsPreparerTests extends ESTestCase {
|
|||
assertEquals(ClusterName.DEFAULT.value(), settings.get(ClusterName.SETTING));
|
||||
}
|
||||
|
||||
public void testIgnoreSystemProperties() {
|
||||
try {
|
||||
System.setProperty("es.node.zone", "foo");
|
||||
Settings settings = settingsBuilder()
|
||||
.put("node.zone", "bar")
|
||||
.put(baseEnvSettings)
|
||||
.build();
|
||||
Environment env = InternalSettingsPreparer.prepareEnvironment(settings, null);
|
||||
// Should use setting from the system property
|
||||
assertThat(env.settings().get("node.zone"), equalTo("foo"));
|
||||
|
||||
settings = settingsBuilder()
|
||||
.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true)
|
||||
.put("node.zone", "bar")
|
||||
.put(baseEnvSettings)
|
||||
.build();
|
||||
env = InternalSettingsPreparer.prepareEnvironment(settings, null);
|
||||
// Should use setting from the system property
|
||||
assertThat(env.settings().get("node.zone"), equalTo("bar"));
|
||||
} finally {
|
||||
System.clearProperty("es.node.zone");
|
||||
}
|
||||
}
|
||||
|
||||
public void testReplacePromptPlaceholders() {
|
||||
final List<String> replacedSecretProperties = new ArrayList<>();
|
||||
final List<String> replacedTextProperties = new ArrayList<>();
|
||||
|
@ -205,74 +148,6 @@ public class InternalSettingsPreparerTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testNameSettingsPreference() {
|
||||
try {
|
||||
System.setProperty("name", "sys-prop-name");
|
||||
// Test system property overrides node.name
|
||||
Settings settings = settingsBuilder()
|
||||
.put("node.name", "node-name")
|
||||
.put(baseEnvSettings)
|
||||
.build();
|
||||
Environment env = InternalSettingsPreparer.prepareEnvironment(settings, null);
|
||||
assertThat(env.settings().get("name"), equalTo("sys-prop-name"));
|
||||
|
||||
// test name in settings overrides sys prop and node.name
|
||||
settings = settingsBuilder()
|
||||
.put("name", "name-in-settings")
|
||||
.put("node.name", "node-name")
|
||||
.put(baseEnvSettings)
|
||||
.build();
|
||||
env = InternalSettingsPreparer.prepareEnvironment(settings, null);
|
||||
assertThat(env.settings().get("name"), equalTo("name-in-settings"));
|
||||
|
||||
// test only node.name in settings
|
||||
System.clearProperty("name");
|
||||
settings = settingsBuilder()
|
||||
.put("node.name", "node-name")
|
||||
.put(baseEnvSettings)
|
||||
.build();
|
||||
env = InternalSettingsPreparer.prepareEnvironment(settings, null);
|
||||
assertThat(env.settings().get("name"), equalTo("node-name"));
|
||||
|
||||
// test no name at all results in name being set
|
||||
env = InternalSettingsPreparer.prepareEnvironment(baseEnvSettings, null);
|
||||
assertThat(env.settings().get("name"), not("name-in-settings"));
|
||||
assertThat(env.settings().get("name"), not("sys-prop-name"));
|
||||
assertThat(env.settings().get("name"), not("node-name"));
|
||||
assertThat(env.settings().get("name"), notNullValue());
|
||||
} finally {
|
||||
System.clearProperty("name");
|
||||
}
|
||||
}
|
||||
|
||||
public void testPromptForNodeNameOnlyPromptsOnce() {
|
||||
final AtomicInteger counter = new AtomicInteger();
|
||||
final Terminal terminal = new CliToolTestCase.MockTerminal() {
|
||||
@Override
|
||||
public char[] readSecret(String message, Object... args) {
|
||||
fail("readSecret should never be called by this test");
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String readText(String message, Object... args) {
|
||||
int count = counter.getAndIncrement();
|
||||
return "prompted name " + count;
|
||||
}
|
||||
};
|
||||
|
||||
System.clearProperty("name");
|
||||
Settings settings = Settings.builder()
|
||||
.put(baseEnvSettings)
|
||||
.put("node.name", InternalSettingsPreparer.TEXT_PROMPT_VALUE)
|
||||
.build();
|
||||
Environment env = InternalSettingsPreparer.prepareEnvironment(settings, terminal);
|
||||
settings = env.settings();
|
||||
assertThat(counter.intValue(), is(1));
|
||||
assertThat(settings.get("name"), is("prompted name 0"));
|
||||
assertThat(settings.get("node.name"), is("prompted name 0"));
|
||||
}
|
||||
|
||||
public void testGarbageIsNotSwallowed() throws IOException {
|
||||
try {
|
||||
InputStream garbage = getClass().getResourceAsStream("/config/garbage/garbage.yml");
|
||||
|
|
|
@ -46,6 +46,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFail
|
|||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class SimpleSearchIT extends ESIntegTestCase {
|
||||
public void testSearchNullIndex() {
|
||||
|
@ -336,6 +337,18 @@ public class SimpleSearchIT extends ESIntegTestCase {
|
|||
.setFrom(DefaultSearchContext.Defaults.MAX_RESULT_WINDOW * 10).get(), 1);
|
||||
}
|
||||
|
||||
public void testQueryNumericFieldWithRegex() throws Exception {
|
||||
createIndex("idx");
|
||||
indexRandom(true, client().prepareIndex("idx", "type").setSource("num", 34));
|
||||
|
||||
try {
|
||||
client().prepareSearch("idx").setQuery(QueryBuilders.regexpQuery("num", "34")).get();
|
||||
fail("SearchPhaseExecutionException should have been thrown");
|
||||
} catch (SearchPhaseExecutionException ex) {
|
||||
assertThat(ex.getCause().getCause().getMessage(), equalTo("Cannot use regular expression to filter numeric field [num]"));
|
||||
}
|
||||
}
|
||||
|
||||
private void assertWindowFails(SearchRequestBuilder search) {
|
||||
try {
|
||||
search.get();
|
||||
|
|
|
@ -41,7 +41,7 @@ buildscript {
|
|||
allprojects {
|
||||
project.ext {
|
||||
// this is common configuration for distributions, but we also add it here for the license check to use
|
||||
dependencyFiles = project("${projectsPrefix}:core").configurations.runtime.copyRecursive().exclude(module: 'slf4j-api')
|
||||
dependencyFiles = project(':core').configurations.runtime.copyRecursive().exclude(module: 'slf4j-api')
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -79,7 +79,7 @@ subprojects {
|
|||
*****************************************************************************/
|
||||
libFiles = copySpec {
|
||||
into 'lib'
|
||||
from project("${projectsPrefix}:core").jar
|
||||
from project(':core').jar
|
||||
from dependencyFiles
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,8 @@ include::migrate_3_0.asciidoc[]
|
|||
|
||||
include::migrate_2_1.asciidoc[]
|
||||
|
||||
include::migrate_2_2.asciidoc[]
|
||||
|
||||
include::migrate_2_0.asciidoc[]
|
||||
|
||||
include::migrate_1_6.asciidoc[]
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
[[breaking-changes-2.2]]
|
||||
== Breaking changes in 2.2
|
||||
|
||||
This section discusses the changes that you need to be aware of when migrating
|
||||
your application to Elasticsearch 2.2.
|
||||
|
||||
* <<breaking_22_index_apis>>
|
||||
|
||||
[[breaking_22_index_apis]]
|
||||
=== Index APIs
|
||||
|
||||
==== Field stats API
|
||||
|
||||
The field stats' response format has been changed for number based and date fields. The `min_value` and
|
||||
`max_value` elements now return values as number and the new `min_value_as_string` and `max_value_as_string`
|
||||
return the values as string.
|
|
@ -430,6 +430,10 @@ For simplicity, only one way of adding the ids to the existing list (empty by de
|
|||
error description). This will influence code that use the `IndexRequest.opType()` or `IndexRequest.create()`
|
||||
to index a document only if it doesn't already exist.
|
||||
|
||||
==== ShapeBuilders
|
||||
|
||||
`InternalLineStringBuilder` is removed in favour of `LineStringBuilder`, `InternalPolygonBuilder` in favour of PolygonBuilder` and `Ring` has been replaced with `LineStringBuilder`. Also the abstract base classes `BaseLineStringBuilder` and `BasePolygonBuilder` haven been merged with their corresponding implementations.
|
||||
|
||||
[[breaking_30_cache_concurrency]]
|
||||
=== Cache concurrency level settings removed
|
||||
|
||||
|
|
|
@ -79,11 +79,21 @@ document and field.
|
|||
|
||||
`min_value`::
|
||||
|
||||
The lowest value in the field represented in a displayable form.
|
||||
The lowest value in the field.
|
||||
|
||||
`min_value_as_string`::
|
||||
|
||||
The lowest value in the field represented in a displayable form. All fields,
|
||||
but string fields returns this. (since string fields, represent values already as strings)
|
||||
|
||||
`max_value`::
|
||||
|
||||
The highest value in the field represented in a displayable form.
|
||||
The highest value in the field.
|
||||
|
||||
`max_value_as_string`::
|
||||
|
||||
The highest value in the field represented in a displayable form. All fields,
|
||||
but string fields returns this. (since string fields, represent values already as strings)
|
||||
|
||||
NOTE: Documents marked as deleted (but not yet removed by the merge process)
|
||||
still affect all the mentioned statistics.
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
*/
|
|
@ -0,0 +1,10 @@
|
|||
|
||||
// This file exists solely for the purpose of allowing a nice error message
|
||||
// if someone tries running a gradle command from within the extra-plugins dir.
|
||||
|
||||
println '''
|
||||
Gradle commands are not supported from within the extra-plugins dir.
|
||||
Please run your command either at the root of the elasticsearch checkout
|
||||
or within a specific extra-plugins project.
|
||||
'''
|
||||
throw new GradleException('Cannot run commands in extra-plugins dir')
|
|
@ -26,11 +26,6 @@ import org.elasticsearch.common.settings.SettingsException;
|
|||
import org.elasticsearch.plugin.discovery.ec2.Ec2DiscoveryPlugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ThirdParty;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Base class for AWS tests that require credentials.
|
||||
|
@ -41,35 +36,6 @@ import java.util.Map;
|
|||
@ThirdParty
|
||||
public abstract class AbstractAwsTestCase extends ESIntegTestCase {
|
||||
|
||||
/**
|
||||
* Those properties are set by the AWS SDK v1.9.4 and if not ignored,
|
||||
* lead to tests failure (see AbstractRandomizedTest#IGNORED_INVARIANT_PROPERTIES)
|
||||
*/
|
||||
private static final String[] AWS_INVARIANT_PROPERTIES = {
|
||||
"com.sun.org.apache.xml.internal.dtm.DTMManager",
|
||||
"javax.xml.parsers.DocumentBuilderFactory"
|
||||
};
|
||||
|
||||
private Map<String, String> properties = new HashMap<>();
|
||||
|
||||
@Before
|
||||
public void saveProperties() {
|
||||
for (String p : AWS_INVARIANT_PROPERTIES) {
|
||||
properties.put(p, System.getProperty(p));
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void restoreProperties() {
|
||||
for (String p : AWS_INVARIANT_PROPERTIES) {
|
||||
if (properties.get(p) != null) {
|
||||
System.setProperty(p, properties.get(p));
|
||||
} else {
|
||||
System.clearProperty(p);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
Settings.Builder settings = Settings.builder()
|
||||
|
|
|
@ -48,9 +48,6 @@ public class PythonScriptEngineTests extends ESTestCase {
|
|||
|
||||
@After
|
||||
public void close() {
|
||||
// We need to clear some system properties
|
||||
System.clearProperty("python.cachedir.skip");
|
||||
System.clearProperty("python.console.encoding");
|
||||
se.close();
|
||||
}
|
||||
|
||||
|
|
|
@ -24,7 +24,6 @@ import org.elasticsearch.script.CompiledScript;
|
|||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.After;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
@ -39,12 +38,6 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
*
|
||||
*/
|
||||
public class PythonScriptMultiThreadedTests extends ESTestCase {
|
||||
@After
|
||||
public void close() {
|
||||
// We need to clear some system properties
|
||||
System.clearProperty("python.cachedir.skip");
|
||||
System.clearProperty("python.console.encoding");
|
||||
}
|
||||
|
||||
public void testExecutableNoRuntimeParams() throws Exception {
|
||||
final PythonScriptEngineService se = new PythonScriptEngineService(Settings.Builder.EMPTY_SETTINGS);
|
||||
|
|
|
@ -45,9 +45,6 @@ public class PythonSecurityTests extends ESTestCase {
|
|||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
// We need to clear some system properties
|
||||
System.clearProperty("python.cachedir.skip");
|
||||
System.clearProperty("python.console.encoding");
|
||||
se.close();
|
||||
super.tearDown();
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.mapper.attachments;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.cli.CliTool;
|
||||
import org.elasticsearch.common.cli.CliToolConfig;
|
||||
|
@ -59,6 +60,7 @@ import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
|
|||
* StandaloneRunner -u /tmp/mydoc.pdf
|
||||
* StandaloneRunner -u /tmp/mydoc.pdf --size 1000000
|
||||
*/
|
||||
@SuppressForbidden(reason = "commandline tool")
|
||||
public class StandaloneRunner extends CliTool {
|
||||
|
||||
private static final CliToolConfig CONFIG = CliToolConfig.config("tika", StandaloneRunner.class)
|
||||
|
|
|
@ -68,8 +68,6 @@ public class VariousDocTests extends AttachmentUnitTestCase {
|
|||
*/
|
||||
public void testEncryptedPDFDocument() throws Exception {
|
||||
assertException("encrypted.pdf", "is encrypted");
|
||||
// TODO Remove when this will be fixed in Tika. See https://issues.apache.org/jira/browse/TIKA-1548
|
||||
System.clearProperty("sun.font.fontmanager");
|
||||
testMapper("encrypted.pdf", true);
|
||||
}
|
||||
|
||||
|
|
|
@ -26,11 +26,6 @@ import org.elasticsearch.common.settings.SettingsException;
|
|||
import org.elasticsearch.plugin.repository.s3.S3RepositoryPlugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ThirdParty;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Base class for AWS tests that require credentials.
|
||||
|
@ -41,35 +36,6 @@ import java.util.Map;
|
|||
@ThirdParty
|
||||
public abstract class AbstractAwsTestCase extends ESIntegTestCase {
|
||||
|
||||
/**
|
||||
* Those properties are set by the AWS SDK v1.9.4 and if not ignored,
|
||||
* lead to tests failure (see AbstractRandomizedTest#IGNORED_INVARIANT_PROPERTIES)
|
||||
*/
|
||||
private static final String[] AWS_INVARIANT_PROPERTIES = {
|
||||
"com.sun.org.apache.xml.internal.dtm.DTMManager",
|
||||
"javax.xml.parsers.DocumentBuilderFactory"
|
||||
};
|
||||
|
||||
private Map<String, String> properties = new HashMap<>();
|
||||
|
||||
@Before
|
||||
public void saveProperties() {
|
||||
for (String p : AWS_INVARIANT_PROPERTIES) {
|
||||
properties.put(p, System.getProperty(p));
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void restoreProperties() {
|
||||
for (String p : AWS_INVARIANT_PROPERTIES) {
|
||||
if (properties.get(p) != null) {
|
||||
System.setProperty(p, properties.get(p));
|
||||
} else {
|
||||
System.clearProperty(p);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
Settings.Builder settings = Settings.builder()
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.bootstrap;
|
|||
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.cli.CliTool.ExitStatus;
|
||||
import org.elasticsearch.common.cli.CliToolTestCase;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
|
@ -36,6 +37,7 @@ import java.util.Locale;
|
|||
import static org.elasticsearch.common.cli.CliTool.ExitStatus.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
@SuppressForbidden(reason = "modifies system properties intentionally")
|
||||
public class BootstrapCliParserTests extends CliToolTestCase {
|
||||
|
||||
private CaptureOutputTerminal terminal = new CaptureOutputTerminal();
|
|
@ -0,0 +1,100 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.jar.Attributes;
|
||||
import java.util.jar.JarOutputStream;
|
||||
import java.util.jar.Manifest;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
/** Tests for Jarhell that change very important system properties... very evil! */
|
||||
@SuppressForbidden(reason = "modifies system properties intentionally")
|
||||
public class EvilJarHellTests extends ESTestCase {
|
||||
|
||||
URL makeJar(Path dir, String name, Manifest manifest, String... files) throws IOException {
|
||||
Path jarpath = dir.resolve(name);
|
||||
ZipOutputStream out;
|
||||
if (manifest == null) {
|
||||
out = new JarOutputStream(Files.newOutputStream(jarpath, StandardOpenOption.CREATE));
|
||||
} else {
|
||||
out = new JarOutputStream(Files.newOutputStream(jarpath, StandardOpenOption.CREATE), manifest);
|
||||
}
|
||||
for (String file : files) {
|
||||
out.putNextEntry(new ZipEntry(file));
|
||||
}
|
||||
out.close();
|
||||
return jarpath.toUri().toURL();
|
||||
}
|
||||
|
||||
public void testBootclasspathLeniency() throws Exception {
|
||||
Path dir = createTempDir();
|
||||
String previousJavaHome = System.getProperty("java.home");
|
||||
System.setProperty("java.home", dir.toString());
|
||||
URL[] jars = {makeJar(dir, "foo.jar", null, "DuplicateClass.class"), makeJar(dir, "bar.jar", null, "DuplicateClass.class")};
|
||||
try {
|
||||
JarHell.checkJarHell(jars);
|
||||
} finally {
|
||||
System.setProperty("java.home", previousJavaHome);
|
||||
}
|
||||
}
|
||||
|
||||
public void testRequiredJDKVersionIsOK() throws Exception {
|
||||
Path dir = createTempDir();
|
||||
String previousJavaVersion = System.getProperty("java.specification.version");
|
||||
System.setProperty("java.specification.version", "1.7");
|
||||
|
||||
Manifest manifest = new Manifest();
|
||||
Attributes attributes = manifest.getMainAttributes();
|
||||
attributes.put(Attributes.Name.MANIFEST_VERSION, "1.0.0");
|
||||
attributes.put(new Attributes.Name("X-Compile-Target-JDK"), "1.7");
|
||||
URL[] jars = {makeJar(dir, "foo.jar", manifest, "Foo.class")};
|
||||
try {
|
||||
JarHell.checkJarHell(jars);
|
||||
} finally {
|
||||
System.setProperty("java.specification.version", previousJavaVersion);
|
||||
}
|
||||
}
|
||||
|
||||
public void testBadJDKVersionProperty() throws Exception {
|
||||
Path dir = createTempDir();
|
||||
String previousJavaVersion = System.getProperty("java.specification.version");
|
||||
System.setProperty("java.specification.version", "bogus");
|
||||
|
||||
Manifest manifest = new Manifest();
|
||||
Attributes attributes = manifest.getMainAttributes();
|
||||
attributes.put(Attributes.Name.MANIFEST_VERSION, "1.0.0");
|
||||
attributes.put(new Attributes.Name("X-Compile-Target-JDK"), "1.7");
|
||||
URL[] jars = {makeJar(dir, "foo.jar", manifest, "Foo.class")};
|
||||
try {
|
||||
JarHell.checkJarHell(jars);
|
||||
} finally {
|
||||
System.setProperty("java.specification.version", previousJavaVersion);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,230 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.FilePermission;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.PermissionCollection;
|
||||
import java.security.Permissions;
|
||||
import java.util.Set;
|
||||
|
||||
@SuppressForbidden(reason = "modifies system properties and attempts to create symbolic links intentionally")
|
||||
public class EvilSecurityTests extends ESTestCase {
|
||||
|
||||
/** test generated permissions */
|
||||
public void testGeneratedPermissions() throws Exception {
|
||||
Path path = createTempDir();
|
||||
// make a fake ES home and ensure we only grant permissions to that.
|
||||
Path esHome = path.resolve("esHome");
|
||||
Settings.Builder settingsBuilder = Settings.builder();
|
||||
settingsBuilder.put("path.home", esHome.toString());
|
||||
Settings settings = settingsBuilder.build();
|
||||
|
||||
Path fakeTmpDir = createTempDir();
|
||||
String realTmpDir = System.getProperty("java.io.tmpdir");
|
||||
Permissions permissions;
|
||||
try {
|
||||
System.setProperty("java.io.tmpdir", fakeTmpDir.toString());
|
||||
Environment environment = new Environment(settings);
|
||||
permissions = Security.createPermissions(environment);
|
||||
} finally {
|
||||
System.setProperty("java.io.tmpdir", realTmpDir);
|
||||
}
|
||||
|
||||
// the fake es home
|
||||
assertNoPermissions(esHome, permissions);
|
||||
// its parent
|
||||
assertNoPermissions(esHome.getParent(), permissions);
|
||||
// some other sibling
|
||||
assertNoPermissions(esHome.getParent().resolve("other"), permissions);
|
||||
// double check we overwrote java.io.tmpdir correctly for the test
|
||||
assertNoPermissions(PathUtils.get(realTmpDir), permissions);
|
||||
}
|
||||
|
||||
/** test generated permissions for all configured paths */
|
||||
public void testEnvironmentPaths() throws Exception {
|
||||
Path path = createTempDir();
|
||||
// make a fake ES home and ensure we only grant permissions to that.
|
||||
Path esHome = path.resolve("esHome");
|
||||
|
||||
Settings.Builder settingsBuilder = Settings.builder();
|
||||
settingsBuilder.put("path.home", esHome.resolve("home").toString());
|
||||
settingsBuilder.put("path.conf", esHome.resolve("conf").toString());
|
||||
settingsBuilder.put("path.scripts", esHome.resolve("scripts").toString());
|
||||
settingsBuilder.put("path.plugins", esHome.resolve("plugins").toString());
|
||||
settingsBuilder.putArray("path.data", esHome.resolve("data1").toString(), esHome.resolve("data2").toString());
|
||||
settingsBuilder.put("path.shared_data", esHome.resolve("custom").toString());
|
||||
settingsBuilder.put("path.logs", esHome.resolve("logs").toString());
|
||||
settingsBuilder.put("pidfile", esHome.resolve("test.pid").toString());
|
||||
Settings settings = settingsBuilder.build();
|
||||
|
||||
Path fakeTmpDir = createTempDir();
|
||||
String realTmpDir = System.getProperty("java.io.tmpdir");
|
||||
Permissions permissions;
|
||||
Environment environment;
|
||||
try {
|
||||
System.setProperty("java.io.tmpdir", fakeTmpDir.toString());
|
||||
environment = new Environment(settings);
|
||||
permissions = Security.createPermissions(environment);
|
||||
} finally {
|
||||
System.setProperty("java.io.tmpdir", realTmpDir);
|
||||
}
|
||||
|
||||
// the fake es home
|
||||
assertNoPermissions(esHome, permissions);
|
||||
// its parent
|
||||
assertNoPermissions(esHome.getParent(), permissions);
|
||||
// some other sibling
|
||||
assertNoPermissions(esHome.getParent().resolve("other"), permissions);
|
||||
// double check we overwrote java.io.tmpdir correctly for the test
|
||||
assertNoPermissions(PathUtils.get(realTmpDir), permissions);
|
||||
|
||||
// check that all directories got permissions:
|
||||
|
||||
// bin file: ro
|
||||
assertExactPermissions(new FilePermission(environment.binFile().toString(), "read,readlink"), permissions);
|
||||
// lib file: ro
|
||||
assertExactPermissions(new FilePermission(environment.libFile().toString(), "read,readlink"), permissions);
|
||||
// config file: ro
|
||||
assertExactPermissions(new FilePermission(environment.configFile().toString(), "read,readlink"), permissions);
|
||||
// scripts file: ro
|
||||
assertExactPermissions(new FilePermission(environment.scriptsFile().toString(), "read,readlink"), permissions);
|
||||
// plugins: ro
|
||||
assertExactPermissions(new FilePermission(environment.pluginsFile().toString(), "read,readlink"), permissions);
|
||||
|
||||
// data paths: r/w
|
||||
for (Path dataPath : environment.dataFiles()) {
|
||||
assertExactPermissions(new FilePermission(dataPath.toString(), "read,readlink,write,delete"), permissions);
|
||||
}
|
||||
for (Path dataPath : environment.dataWithClusterFiles()) {
|
||||
assertExactPermissions(new FilePermission(dataPath.toString(), "read,readlink,write,delete"), permissions);
|
||||
}
|
||||
assertExactPermissions(new FilePermission(environment.sharedDataFile().toString(), "read,readlink,write,delete"), permissions);
|
||||
// logs: r/w
|
||||
assertExactPermissions(new FilePermission(environment.logsFile().toString(), "read,readlink,write,delete"), permissions);
|
||||
// temp dir: r/w
|
||||
assertExactPermissions(new FilePermission(fakeTmpDir.toString(), "read,readlink,write,delete"), permissions);
|
||||
// PID file: delete only (for the shutdown hook)
|
||||
assertExactPermissions(new FilePermission(environment.pidFile().toString(), "delete"), permissions);
|
||||
}
|
||||
|
||||
public void testEnsureSymlink() throws IOException {
|
||||
Path p = createTempDir();
|
||||
|
||||
Path exists = p.resolve("exists");
|
||||
Files.createDirectory(exists);
|
||||
|
||||
// symlink
|
||||
Path linkExists = p.resolve("linkExists");
|
||||
try {
|
||||
Files.createSymbolicLink(linkExists, exists);
|
||||
} catch (UnsupportedOperationException | IOException e) {
|
||||
assumeNoException("test requires filesystem that supports symbolic links", e);
|
||||
} catch (SecurityException e) {
|
||||
assumeNoException("test cannot create symbolic links with security manager enabled", e);
|
||||
}
|
||||
Security.ensureDirectoryExists(linkExists);
|
||||
Files.createTempFile(linkExists, null, null);
|
||||
}
|
||||
|
||||
public void testEnsureBrokenSymlink() throws IOException {
|
||||
Path p = createTempDir();
|
||||
|
||||
// broken symlink
|
||||
Path brokenLink = p.resolve("brokenLink");
|
||||
try {
|
||||
Files.createSymbolicLink(brokenLink, p.resolve("nonexistent"));
|
||||
} catch (UnsupportedOperationException | IOException e) {
|
||||
assumeNoException("test requires filesystem that supports symbolic links", e);
|
||||
} catch (SecurityException e) {
|
||||
assumeNoException("test cannot create symbolic links with security manager enabled", e);
|
||||
}
|
||||
try {
|
||||
Security.ensureDirectoryExists(brokenLink);
|
||||
fail("didn't get expected exception");
|
||||
} catch (IOException expected) {}
|
||||
}
|
||||
|
||||
/** When a configured dir is a symlink, test that permissions work on link target */
|
||||
public void testSymlinkPermissions() throws IOException {
|
||||
// see https://github.com/elastic/elasticsearch/issues/12170
|
||||
assumeFalse("windows does not automatically grant permission to the target of symlinks", Constants.WINDOWS);
|
||||
Path dir = createTempDir();
|
||||
|
||||
Path target = dir.resolve("target");
|
||||
Files.createDirectory(target);
|
||||
|
||||
// symlink
|
||||
Path link = dir.resolve("link");
|
||||
try {
|
||||
Files.createSymbolicLink(link, target);
|
||||
} catch (UnsupportedOperationException | IOException e) {
|
||||
assumeNoException("test requires filesystem that supports symbolic links", e);
|
||||
} catch (SecurityException e) {
|
||||
assumeNoException("test cannot create symbolic links with security manager enabled", e);
|
||||
}
|
||||
Permissions permissions = new Permissions();
|
||||
Security.addPath(permissions, "testing", link, "read");
|
||||
assertExactPermissions(new FilePermission(link.toString(), "read"), permissions);
|
||||
assertExactPermissions(new FilePermission(link.resolve("foo").toString(), "read"), permissions);
|
||||
assertExactPermissions(new FilePermission(target.toString(), "read"), permissions);
|
||||
assertExactPermissions(new FilePermission(target.resolve("foo").toString(), "read"), permissions);
|
||||
}
|
||||
|
||||
/**
|
||||
* checks exact file permissions, meaning those and only those for that path.
|
||||
*/
|
||||
static void assertExactPermissions(FilePermission expected, PermissionCollection actual) {
|
||||
String target = expected.getName(); // see javadocs
|
||||
Set<String> permissionSet = asSet(expected.getActions().split(","));
|
||||
boolean read = permissionSet.remove("read");
|
||||
boolean readlink = permissionSet.remove("readlink");
|
||||
boolean write = permissionSet.remove("write");
|
||||
boolean delete = permissionSet.remove("delete");
|
||||
boolean execute = permissionSet.remove("execute");
|
||||
assertTrue("unrecognized permission: " + permissionSet, permissionSet.isEmpty());
|
||||
assertEquals(read, actual.implies(new FilePermission(target, "read")));
|
||||
assertEquals(readlink, actual.implies(new FilePermission(target, "readlink")));
|
||||
assertEquals(write, actual.implies(new FilePermission(target, "write")));
|
||||
assertEquals(delete, actual.implies(new FilePermission(target, "delete")));
|
||||
assertEquals(execute, actual.implies(new FilePermission(target, "execute")));
|
||||
}
|
||||
|
||||
/**
|
||||
* checks that this path has no permissions
|
||||
*/
|
||||
static void assertNoPermissions(Path path, PermissionCollection actual) {
|
||||
String target = path.toString();
|
||||
assertFalse(actual.implies(new FilePermission(target, "read")));
|
||||
assertFalse(actual.implies(new FilePermission(target, "readlink")));
|
||||
assertFalse(actual.implies(new FilePermission(target, "write")));
|
||||
assertFalse(actual.implies(new FilePermission(target, "delete")));
|
||||
assertFalse(actual.implies(new FilePermission(target, "execute")));
|
||||
}
|
||||
}
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.common.cli;
|
|||
import org.apache.commons.cli.CommandLine;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
|
@ -45,6 +46,7 @@ import static org.hamcrest.Matchers.is;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
@SuppressForbidden(reason = "modifies system properties intentionally")
|
||||
public class CliToolTests extends CliToolTestCase {
|
||||
public void testOK() throws Exception {
|
||||
Terminal terminal = new MockTerminal();
|
|
@ -0,0 +1,145 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.node.internal;
|
||||
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
@SuppressForbidden(reason = "modifies system properties intentionally")
|
||||
public class EvilInternalSettingsPreparerTests extends ESTestCase {
|
||||
|
||||
Map<String, String> savedProperties = new HashMap<>();
|
||||
Settings baseEnvSettings;
|
||||
|
||||
@Before
|
||||
public void saveSettingsSystemProperties() {
|
||||
// clear out any properties the settings preparer may look for
|
||||
savedProperties.clear();
|
||||
for (Object propObj : System.getProperties().keySet()) {
|
||||
String property = (String)propObj;
|
||||
// NOTE: these prefixes are prefixes of the defaults, so both are handled here
|
||||
for (String prefix : InternalSettingsPreparer.PROPERTY_PREFIXES) {
|
||||
if (property.startsWith(prefix)) {
|
||||
savedProperties.put(property, System.getProperty(property));
|
||||
}
|
||||
}
|
||||
}
|
||||
String name = System.getProperty("name");
|
||||
if (name != null) {
|
||||
savedProperties.put("name", name);
|
||||
}
|
||||
for (String property : savedProperties.keySet()) {
|
||||
System.clearProperty(property);
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void restoreSettingsSystemProperties() {
|
||||
for (Map.Entry<String, String> property : savedProperties.entrySet()) {
|
||||
System.setProperty(property.getKey(), property.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
@Before
|
||||
public void createBaseEnvSettings() {
|
||||
baseEnvSettings = settingsBuilder()
|
||||
.put("path.home", createTempDir())
|
||||
.build();
|
||||
}
|
||||
|
||||
@After
|
||||
public void clearBaseEnvSettings() {
|
||||
baseEnvSettings = null;
|
||||
}
|
||||
|
||||
public void testIgnoreSystemProperties() {
|
||||
try {
|
||||
System.setProperty("es.node.zone", "foo");
|
||||
Settings settings = settingsBuilder()
|
||||
.put("node.zone", "bar")
|
||||
.put(baseEnvSettings)
|
||||
.build();
|
||||
Environment env = InternalSettingsPreparer.prepareEnvironment(settings, null);
|
||||
// Should use setting from the system property
|
||||
assertThat(env.settings().get("node.zone"), equalTo("foo"));
|
||||
|
||||
settings = settingsBuilder()
|
||||
.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING, true)
|
||||
.put("node.zone", "bar")
|
||||
.put(baseEnvSettings)
|
||||
.build();
|
||||
env = InternalSettingsPreparer.prepareEnvironment(settings, null);
|
||||
// Should use setting from the system property
|
||||
assertThat(env.settings().get("node.zone"), equalTo("bar"));
|
||||
} finally {
|
||||
System.clearProperty("es.node.zone");
|
||||
}
|
||||
}
|
||||
|
||||
public void testNameSettingsPreference() {
|
||||
try {
|
||||
System.setProperty("name", "sys-prop-name");
|
||||
// Test system property overrides node.name
|
||||
Settings settings = settingsBuilder()
|
||||
.put("node.name", "node-name")
|
||||
.put(baseEnvSettings)
|
||||
.build();
|
||||
Environment env = InternalSettingsPreparer.prepareEnvironment(settings, null);
|
||||
assertThat(env.settings().get("name"), equalTo("sys-prop-name"));
|
||||
|
||||
// test name in settings overrides sys prop and node.name
|
||||
settings = settingsBuilder()
|
||||
.put("name", "name-in-settings")
|
||||
.put("node.name", "node-name")
|
||||
.put(baseEnvSettings)
|
||||
.build();
|
||||
env = InternalSettingsPreparer.prepareEnvironment(settings, null);
|
||||
assertThat(env.settings().get("name"), equalTo("name-in-settings"));
|
||||
|
||||
// test only node.name in settings
|
||||
System.clearProperty("name");
|
||||
settings = settingsBuilder()
|
||||
.put("node.name", "node-name")
|
||||
.put(baseEnvSettings)
|
||||
.build();
|
||||
env = InternalSettingsPreparer.prepareEnvironment(settings, null);
|
||||
assertThat(env.settings().get("name"), equalTo("node-name"));
|
||||
|
||||
// test no name at all results in name being set
|
||||
env = InternalSettingsPreparer.prepareEnvironment(baseEnvSettings, null);
|
||||
assertThat(env.settings().get("name"), not("name-in-settings"));
|
||||
assertThat(env.settings().get("name"), not("sys-prop-name"));
|
||||
assertThat(env.settings().get("name"), not("node-name"));
|
||||
assertThat(env.settings().get("name"), notNullValue());
|
||||
} finally {
|
||||
System.clearProperty("name");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -22,6 +22,7 @@ import org.apache.http.impl.client.HttpClients;
|
|||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Base64;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.cli.CliTool;
|
||||
import org.elasticsearch.common.cli.CliTool.ExitStatus;
|
||||
import org.elasticsearch.common.cli.CliToolTestCase.CaptureOutputTerminal;
|
||||
|
@ -82,6 +83,7 @@ import static org.jboss.netty.handler.codec.http.HttpVersion.HTTP_1_1;
|
|||
@LuceneTestCase.SuppressFileSystems("*") // TODO: clean up this test to allow extra files
|
||||
// TODO: jimfs is really broken here (throws wrong exception from detection method).
|
||||
// if its in your classpath, then do not use plugins!!!!!!
|
||||
@SuppressForbidden(reason = "modifies system properties intentionally")
|
||||
public class PluginManagerTests extends ESIntegTestCase {
|
||||
|
||||
private Environment environment;
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.plugins;
|
|||
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.http.client.HttpDownloadHelper;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -42,6 +43,7 @@ import static org.hamcrest.Matchers.is;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
@SuppressForbidden(reason = "modifies system properties intentionally")
|
||||
public class PluginManagerUnitTests extends ESTestCase {
|
||||
@After
|
||||
public void cleanSystemProperty() {
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.tribe;
|
|||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.node.NodeBuilder;
|
||||
|
@ -42,6 +43,7 @@ import static org.hamcrest.CoreMatchers.equalTo;
|
|||
* all the time, while we need to make the tribe node accept them in this case, so that we can verify that they are not read again as part
|
||||
* of the tribe client nodes initialization. Note that the started nodes will obey to the 'node.mode' settings as the internal cluster does.
|
||||
*/
|
||||
@SuppressForbidden(reason = "modifies system properties intentionally")
|
||||
public class TribeUnitTests extends ESTestCase {
|
||||
|
||||
private static Node tribe1;
|
|
@ -0,0 +1,8 @@
|
|||
|
||||
apply plugin: 'elasticsearch.rest-test'
|
||||
|
||||
integTest {
|
||||
cluster {
|
||||
numNodes = 2
|
||||
}
|
||||
}
|
|
@ -1,44 +0,0 @@
|
|||
<?xml version="1.0"?>
|
||||
<project name="smoke-test-multinode"
|
||||
xmlns:ac="antlib:net.sf.antcontrib">
|
||||
|
||||
<import file="${elasticsearch.integ.antfile.default}"/>
|
||||
<property name="integ.pidfile.sec" location="${integ.scratch}/es-secondary.pid"/>
|
||||
<available property="integ.pidfile.sec.exists" file="${integ.pidfile.sec}"/>
|
||||
|
||||
<target name="stop-secondary-node" if="integ.pidfile.sec.exists">
|
||||
<stop-node es.pidfile="${integ.pidfile.sec}"/>
|
||||
</target>
|
||||
<target name="stop-primary-node" if="integ.pidfile.exists">
|
||||
<stop-node es.pidfile="${integ.pidfile}"/>
|
||||
</target>
|
||||
|
||||
<target name="stop-external-multi-node-cluster" depends="stop-primary-node, stop-secondary-node"/>
|
||||
|
||||
<target name="start-external-multi-node-no-plugins" depends="stop-secondary-node, setup-workspace" unless="${shouldskip}">
|
||||
<start-unicast-node es.peer.list="127.0.0.1:9700"/>
|
||||
<ac:trycatch property="failure.message">
|
||||
<ac:try>
|
||||
<start-unicast-node es.http.port="9600" es.transport.port="9700"
|
||||
es.pidfile="${integ.pidfile.sec}"
|
||||
es.peer.list="127.0.0.1:${integ.transport.port}"/>
|
||||
</ac:try>
|
||||
<ac:catch>
|
||||
<echo>Failed to start second node with message: ${failure.message}</echo>
|
||||
<stop-node es.pidfile="${integ.pidfile}"/>
|
||||
</ac:catch>
|
||||
</ac:trycatch>
|
||||
<ac:trycatch>
|
||||
<ac:try>
|
||||
<local name="failed.to.form.cluster"/>
|
||||
<waitfor-two-nodes port="${integ.http.port}"
|
||||
timeoutproperty="failed.to.form.cluster"/>
|
||||
<fail message="Instances did not form a cluster" if="failed.to.form.cluster"/>
|
||||
</ac:try>
|
||||
<ac:catch>
|
||||
<stop-node es.pidfile="${integ.pidfile}"/>
|
||||
<stop-node es.pidfile="${integ.pidfile.sec}"/>
|
||||
</ac:catch>
|
||||
</ac:trycatch>
|
||||
</target>
|
||||
</project>
|
|
@ -1,284 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>org.elasticsearch.qa</groupId>
|
||||
<artifactId>elasticsearch-qa</artifactId>
|
||||
<version>3.0.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<!--
|
||||
This test unzips elasticsearch, installs each plugin,
|
||||
starts 2 elasticsearch nodes, verifies that they form a cluster.
|
||||
-->
|
||||
|
||||
<artifactId>smoke-test-multinode</artifactId>
|
||||
<name>QA: Smoke Test Multi-Node IT</name>
|
||||
<description>Tests that multi node IT tests work</description>
|
||||
|
||||
<properties>
|
||||
<skip.unit.tests>true</skip.unit.tests>
|
||||
<elasticsearch.integ.antfile>${project.basedir}/integration-tests.xml</elasticsearch.integ.antfile>
|
||||
<tests.rest.suite>smoke_test_multinode</tests.rest.suite>
|
||||
<tests.rest.load_packaged>false</tests.rest.load_packaged>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.elasticsearch</groupId>
|
||||
<artifactId>elasticsearch</artifactId>
|
||||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- Provided dependencies by elasticsearch itself -->
|
||||
<dependency>
|
||||
<groupId>org.elasticsearch</groupId>
|
||||
<artifactId>elasticsearch</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-core</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-backward-codecs</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-common</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-queries</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-memory</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-highlighter</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-queryparser</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-suggest</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-join</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-spatial</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.spatial4j</groupId>
|
||||
<artifactId>spatial4j</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.vividsolutions</groupId>
|
||||
<artifactId>jts</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.github.spullara.mustache.java</groupId>
|
||||
<artifactId>compiler</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.carrotsearch</groupId>
|
||||
<artifactId>hppc</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.joda</groupId>
|
||||
<artifactId>joda-convert</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.dataformat</groupId>
|
||||
<artifactId>jackson-dataformat-smile</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.dataformat</groupId>
|
||||
<artifactId>jackson-dataformat-yaml</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.dataformat</groupId>
|
||||
<artifactId>jackson-dataformat-cbor</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
<artifactId>netty</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.ning</groupId>
|
||||
<artifactId>compress-lzf</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.tdunning</groupId>
|
||||
<artifactId>t-digest</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-cli</groupId>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>apache-log4j-extras</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>net.java.dev.jna</groupId>
|
||||
<artifactId>jna</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- Required by the REST test framework -->
|
||||
<!-- TODO: remove this dependency when we will have a REST Test module -->
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>integ-setup-dependencies</id>
|
||||
<phase>pre-integration-test</phase>
|
||||
<goals>
|
||||
<goal>copy</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<skip>${skip.integ.tests}</skip>
|
||||
<useBaseVersion>true</useBaseVersion>
|
||||
<outputDirectory>${integ.deps}/plugins</outputDirectory>
|
||||
|
||||
<artifactItems>
|
||||
<!-- elasticsearch distribution -->
|
||||
<artifactItem>
|
||||
<groupId>org.elasticsearch.distribution.zip</groupId>
|
||||
<artifactId>elasticsearch</artifactId>
|
||||
<version>${elasticsearch.version}</version>
|
||||
<type>zip</type>
|
||||
<overWrite>true</overWrite>
|
||||
<outputDirectory>${integ.deps}</outputDirectory>
|
||||
</artifactItem>
|
||||
</artifactItems>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<!-- integration tests -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-antrun-plugin</artifactId>
|
||||
<executions>
|
||||
<!-- start up external cluster -->
|
||||
<execution>
|
||||
<id>integ-setup</id>
|
||||
<phase>pre-integration-test</phase>
|
||||
<goals>
|
||||
<goal>run</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<target>
|
||||
<ant antfile="${elasticsearch.integ.antfile}" target="start-external-multi-node-no-plugins">
|
||||
<property name="tests.jvm.argline" value="${tests.jvm.argline}"/>
|
||||
<property name="integ.multi.node" value="true"/>
|
||||
</ant>
|
||||
</target>
|
||||
<skip>${skip.integ.tests}</skip>
|
||||
</configuration>
|
||||
</execution>
|
||||
<!-- shut down external cluster -->
|
||||
<execution>
|
||||
<id>integ-teardown</id>
|
||||
<phase>post-integration-test</phase>
|
||||
<goals>
|
||||
<goal>run</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<target>
|
||||
<ant antfile="${elasticsearch.integ.antfile}" target="stop-external-multi-node-cluster"/>
|
||||
</target>
|
||||
<skip>${skip.integ.tests}</skip>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>ant-contrib</groupId>
|
||||
<artifactId>ant-contrib</artifactId>
|
||||
<version>1.0b3</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>ant</groupId>
|
||||
<artifactId>ant</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.ant</groupId>
|
||||
<artifactId>ant-nodeps</artifactId>
|
||||
<version>1.8.1</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
|
@ -42,14 +42,14 @@ repositories {
|
|||
}
|
||||
|
||||
dependencies {
|
||||
test project(path: "${projectsPrefix}:distribution:tar", configuration: 'archives')
|
||||
test project(path: "${projectsPrefix}:distribution:rpm", configuration: 'archives')
|
||||
test project(path: "${projectsPrefix}:distribution:deb", configuration: 'archives')
|
||||
test project(path: ':distribution:tar', configuration: 'archives')
|
||||
test project(path: ':distribution:rpm', configuration: 'archives')
|
||||
test project(path: ':distribution:deb', configuration: 'archives')
|
||||
|
||||
// Collect all the plugins
|
||||
for (Project subproj : project.rootProject.subprojects) {
|
||||
if (subproj.path.startsWith(':plugins:')) {
|
||||
test project("${projectsPrefix}${subproj.path}")
|
||||
test project("${subproj.path}")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -22,7 +22,9 @@
|
|||
- match: { indices._all.fields.number.max_doc: 1 }
|
||||
- match: { indices._all.fields.number.doc_count: 1 }
|
||||
- match: { indices._all.fields.number.min_value: 123 }
|
||||
- match: { indices._all.fields.number.min_value_as_string: "123" }
|
||||
- match: { indices._all.fields.number.max_value: 123 }
|
||||
- match: { indices._all.fields.number.max_value_as_string: "123" }
|
||||
|
||||
---
|
||||
"Basic field stats with level set to indices":
|
||||
|
@ -49,7 +51,9 @@
|
|||
- match: { indices.test_1.fields.number.max_doc: 1 }
|
||||
- match: { indices.test_1.fields.number.doc_count: 1 }
|
||||
- match: { indices.test_1.fields.number.min_value: 123 }
|
||||
- match: { indices.test_1.fields.number.min_value_as_string: "123" }
|
||||
- match: { indices.test_1.fields.number.max_value: 123 }
|
||||
- match: { indices.test_1.fields.number.max_value_as_string: "123" }
|
||||
|
||||
---
|
||||
"Field stats with filtering":
|
||||
|
|
|
@ -32,6 +32,7 @@ List projects = [
|
|||
'plugins:store-smb',
|
||||
'qa:evil-tests',
|
||||
'qa:smoke-test-client',
|
||||
'qa:smoke-test-multinode',
|
||||
'qa:smoke-test-plugins',
|
||||
'qa:vagrant',
|
||||
]
|
||||
|
@ -52,3 +53,27 @@ if (isEclipse) {
|
|||
project(":core-tests").buildFileName = 'eclipse-build.gradle'
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates over sub directories, looking for build.gradle, and adds a project if found
|
||||
* for that dir with the given path prefix. Note that this requires each level
|
||||
* of the dir hiearchy to have a build.gradle. Otherwise we would have to iterate
|
||||
* all files/directories in the source tree to find all projects.
|
||||
*/
|
||||
void addSubProjects(String path, File dir) {
|
||||
if (dir.isDirectory() == false) return;
|
||||
if (new File(dir, 'build.gradle').exists() == false) return;
|
||||
|
||||
String projectName = "${path}:${dir.name}"
|
||||
include projectName
|
||||
project(projectName).projectDir = dir
|
||||
|
||||
for (File subdir : dir.listFiles()) {
|
||||
addSubProjects(projectName, subdir)
|
||||
}
|
||||
}
|
||||
|
||||
File extraPlugins = new File(rootProject.projectDir, 'extra-plugins')
|
||||
for (File extraPluginDir : extraPlugins.listFiles()) {
|
||||
addSubProjects('', extraPluginDir)
|
||||
}
|
||||
|
||||
|
|
|
@ -84,6 +84,9 @@ public class BootstrapForTesting {
|
|||
// initialize probes
|
||||
Bootstrap.initializeProbes();
|
||||
|
||||
// initialize sysprops
|
||||
BootstrapInfo.getSystemProperties();
|
||||
|
||||
// check for jar hell
|
||||
try {
|
||||
JarHell.checkJarHell();
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.common.cli;
|
|||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.StreamsUtils;
|
||||
import org.junit.After;
|
||||
|
@ -40,11 +41,13 @@ import static org.hamcrest.Matchers.hasSize;
|
|||
public abstract class CliToolTestCase extends ESTestCase {
|
||||
|
||||
@Before
|
||||
@SuppressForbidden(reason = "sets es.default.path.home during tests")
|
||||
public void setPathHome() {
|
||||
System.setProperty("es.default.path.home", createTempDir().toString());
|
||||
}
|
||||
|
||||
@After
|
||||
@SuppressForbidden(reason = "clears es.default.path.home during tests")
|
||||
public void clearPathHome() {
|
||||
System.clearProperty("es.default.path.home");
|
||||
}
|
||||
|
|
|
@ -42,6 +42,7 @@ import org.elasticsearch.bootstrap.BootstrapForTesting;
|
|||
import org.elasticsearch.cache.recycler.MockPageCacheRecycler;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.io.PathUtilsForTesting;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
|
@ -163,6 +164,7 @@ public abstract class ESTestCase extends LuceneTestCase {
|
|||
// randomize and override the number of cpus so tests reproduce regardless of real number of cpus
|
||||
|
||||
@BeforeClass
|
||||
@SuppressForbidden(reason = "sets the number of cpus during tests")
|
||||
public static void setProcessors() {
|
||||
int numCpu = TestUtil.nextInt(random(), 1, 4);
|
||||
System.setProperty(EsExecutors.DEFAULT_SYSPROP, Integer.toString(numCpu));
|
||||
|
@ -170,6 +172,7 @@ public abstract class ESTestCase extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@AfterClass
|
||||
@SuppressForbidden(reason = "clears the number of cpus during tests")
|
||||
public static void restoreProcessors() {
|
||||
System.clearProperty(EsExecutors.DEFAULT_SYSPROP);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue