Merge branch 'master' into fork_compile
This commit is contained in:
commit
63a7b155d8
|
@ -125,9 +125,6 @@ if (projectsPrefix.isEmpty()) {
|
|||
vcs = 'Git'
|
||||
}
|
||||
}
|
||||
tasks.cleanIdea {
|
||||
delete '.idea'
|
||||
}
|
||||
}
|
||||
|
||||
// eclipse configuration
|
||||
|
@ -166,11 +163,6 @@ task buildSrcEclipse(type: GradleBuild) {
|
|||
}
|
||||
tasks.eclipse.dependsOn(buildSrcEclipse)
|
||||
|
||||
task clean(type: GradleBuild) {
|
||||
buildFile = 'buildSrc/build.gradle'
|
||||
tasks = ['clean']
|
||||
}
|
||||
|
||||
// we need to add the same --debug-jvm option as
|
||||
// the real RunTask has, so we can pass it through
|
||||
class Run extends DefaultTask {
|
||||
|
@ -188,5 +180,6 @@ task run(type: Run) {
|
|||
dependsOn ':distribution:run'
|
||||
description = 'Runs elasticsearch in the foreground'
|
||||
group = 'Verification'
|
||||
impliesSubProjects = true
|
||||
}
|
||||
|
||||
|
|
|
@ -3,6 +3,9 @@ package com.carrotsearch.gradle.junit4
|
|||
import com.carrotsearch.ant.tasks.junit4.ListenersList
|
||||
import com.carrotsearch.ant.tasks.junit4.listeners.AggregatedEventListener
|
||||
import groovy.xml.NamespaceBuilder
|
||||
import groovy.xml.NamespaceBuilderSupport
|
||||
import org.apache.tools.ant.BuildException
|
||||
import org.apache.tools.ant.DefaultLogger
|
||||
import org.apache.tools.ant.RuntimeConfigurable
|
||||
import org.apache.tools.ant.UnknownElement
|
||||
import org.gradle.api.DefaultTask
|
||||
|
@ -180,39 +183,78 @@ class RandomizedTestingTask extends DefaultTask {
|
|||
heartbeat: testLoggingConfig.slowTests.heartbeat,
|
||||
dir: workingDir,
|
||||
tempdir: new File(workingDir, 'temp'),
|
||||
haltOnFailure: haltOnFailure,
|
||||
haltOnFailure: true, // we want to capture when a build failed, but will decide whether to rethrow later
|
||||
shuffleOnSlave: shuffleOnSlave
|
||||
]
|
||||
|
||||
def junit4 = NamespaceBuilder.newInstance(ant, 'junit4')
|
||||
junit4.junit4(attributes) {
|
||||
classpath {
|
||||
pathElement(path: classpath.asPath)
|
||||
DefaultLogger listener = null
|
||||
ByteArrayOutputStream antLoggingBuffer = null
|
||||
if (logger.isInfoEnabled() == false) {
|
||||
// in info logging, ant already outputs info level, so we see everything
|
||||
// but on errors or when debugging, we want to see info level messages
|
||||
// because junit4 emits jvm output with ant logging
|
||||
if (testLoggingConfig.outputMode == TestLoggingConfiguration.OutputMode.ALWAYS) {
|
||||
// we want all output, so just stream directly
|
||||
listener = new DefaultLogger(
|
||||
errorPrintStream: System.err,
|
||||
outputPrintStream: System.out,
|
||||
messageOutputLevel: org.apache.tools.ant.Project.MSG_INFO)
|
||||
} else {
|
||||
// we want to buffer the info, and emit it if the test fails
|
||||
antLoggingBuffer = new ByteArrayOutputStream()
|
||||
PrintStream stream = new PrintStream(antLoggingBuffer, true, "UTF-8")
|
||||
listener = new DefaultLogger(
|
||||
errorPrintStream: stream,
|
||||
outputPrintStream: stream,
|
||||
messageOutputLevel: org.apache.tools.ant.Project.MSG_INFO)
|
||||
}
|
||||
if (enableAssertions) {
|
||||
jvmarg(value: '-ea')
|
||||
}
|
||||
if (enableSystemAssertions) {
|
||||
jvmarg(value: '-esa')
|
||||
}
|
||||
for (String arg : jvmArgs) {
|
||||
jvmarg(value: arg)
|
||||
}
|
||||
if (argLine != null) {
|
||||
jvmarg(line: argLine)
|
||||
}
|
||||
fileset(dir: testClassesDir) {
|
||||
for (String includePattern : patternSet.getIncludes()) {
|
||||
include(name: includePattern)
|
||||
project.ant.project.addBuildListener(listener)
|
||||
}
|
||||
|
||||
NamespaceBuilderSupport junit4 = NamespaceBuilder.newInstance(ant, 'junit4')
|
||||
try {
|
||||
junit4.junit4(attributes) {
|
||||
classpath {
|
||||
pathElement(path: classpath.asPath)
|
||||
}
|
||||
for (String excludePattern : patternSet.getExcludes()) {
|
||||
exclude(name: excludePattern)
|
||||
if (enableAssertions) {
|
||||
jvmarg(value: '-ea')
|
||||
}
|
||||
if (enableSystemAssertions) {
|
||||
jvmarg(value: '-esa')
|
||||
}
|
||||
for (String arg : jvmArgs) {
|
||||
jvmarg(value: arg)
|
||||
}
|
||||
if (argLine != null) {
|
||||
jvmarg(line: argLine)
|
||||
}
|
||||
fileset(dir: testClassesDir) {
|
||||
for (String includePattern : patternSet.getIncludes()) {
|
||||
include(name: includePattern)
|
||||
}
|
||||
for (String excludePattern : patternSet.getExcludes()) {
|
||||
exclude(name: excludePattern)
|
||||
}
|
||||
}
|
||||
for (Map.Entry<String, String> prop : systemProperties) {
|
||||
sysproperty key: prop.getKey(), value: prop.getValue()
|
||||
}
|
||||
makeListeners()
|
||||
}
|
||||
for (Map.Entry<String, String> prop : systemProperties) {
|
||||
sysproperty key: prop.getKey(), value: prop.getValue()
|
||||
} catch (BuildException e) {
|
||||
if (antLoggingBuffer != null) {
|
||||
logger.error('JUnit4 test failed, ant output was:')
|
||||
logger.error(antLoggingBuffer.toString('UTF-8'))
|
||||
}
|
||||
makeListeners()
|
||||
if (haltOnFailure) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
if (listener != null) {
|
||||
// remove the listener we added so other ant tasks dont have verbose logging!
|
||||
project.ant.project.removeBuildListener(listener)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -79,7 +79,7 @@ class TestReportLogger extends TestsSummaryEventListener implements AggregatedEv
|
|||
forkedJvmCount = e.getSlaveCount();
|
||||
jvmIdFormat = " J%-" + (1 + (int) Math.floor(Math.log10(forkedJvmCount))) + "d";
|
||||
|
||||
outStream = new LoggingOutputStream(logger: logger, level: LogLevel.ERROR, prefix: " 1> ")
|
||||
outStream = new LoggingOutputStream(logger: logger, level: LogLevel.LIFECYCLE, prefix: " 1> ")
|
||||
errStream = new LoggingOutputStream(logger: logger, level: LogLevel.ERROR, prefix: " 2> ")
|
||||
|
||||
for (String contains : config.stackTraceFilters.contains) {
|
||||
|
@ -152,13 +152,13 @@ class TestReportLogger extends TestsSummaryEventListener implements AggregatedEv
|
|||
void onSuiteStart(AggregatedSuiteStartedEvent e) throws IOException {
|
||||
if (isPassthrough()) {
|
||||
SuiteStartedEvent evt = e.getSuiteStartedEvent();
|
||||
emitSuiteStart(LogLevel.INFO, evt.getDescription());
|
||||
emitSuiteStart(LogLevel.LIFECYCLE, evt.getDescription());
|
||||
}
|
||||
}
|
||||
|
||||
@Subscribe
|
||||
void onOutput(PartialOutputEvent e) throws IOException {
|
||||
if (isPassthrough() && logger.isInfoEnabled()) {
|
||||
if (isPassthrough()) {
|
||||
// We only allow passthrough output if there is one JVM.
|
||||
switch (e.getEvent().getType()) {
|
||||
case EventType.APPEND_STDERR:
|
||||
|
@ -187,7 +187,6 @@ class TestReportLogger extends TestsSummaryEventListener implements AggregatedEv
|
|||
|
||||
@Subscribe
|
||||
void onSuiteResult(AggregatedSuiteResultEvent e) throws IOException {
|
||||
try {
|
||||
final int completed = suitesCompleted.incrementAndGet();
|
||||
|
||||
if (e.isSuccessful() && e.getTests().isEmpty()) {
|
||||
|
@ -197,7 +196,8 @@ class TestReportLogger extends TestsSummaryEventListener implements AggregatedEv
|
|||
suiteTimes.put(e.getDescription().getDisplayName(), e.getExecutionTime())
|
||||
}
|
||||
|
||||
LogLevel level = e.isSuccessful() ? LogLevel.INFO : LogLevel.ERROR
|
||||
LogLevel level = e.isSuccessful() && config.outputMode != OutputMode.ALWAYS ? LogLevel.INFO : LogLevel.LIFECYCLE
|
||||
|
||||
// We must emit buffered test and stream events (in case of failures).
|
||||
if (!isPassthrough()) {
|
||||
emitSuiteStart(level, e.getDescription())
|
||||
|
@ -214,9 +214,6 @@ class TestReportLogger extends TestsSummaryEventListener implements AggregatedEv
|
|||
}
|
||||
|
||||
emitSuiteEnd(level, e, completed)
|
||||
} catch (Exception exc) {
|
||||
logger.lifecycle('EXCEPTION: ', exc)
|
||||
}
|
||||
}
|
||||
|
||||
/** Suite prologue. */
|
||||
|
@ -348,9 +345,9 @@ class TestReportLogger extends TestsSummaryEventListener implements AggregatedEv
|
|||
errStream.flush()
|
||||
}
|
||||
|
||||
/** Returns true if output should be logged immediately. Only relevant when running with INFO log level. */
|
||||
/** Returns true if output should be logged immediately. */
|
||||
boolean isPassthrough() {
|
||||
return forkedJvmCount == 1 && config.outputMode == OutputMode.ALWAYS && logger.isInfoEnabled()
|
||||
return forkedJvmCount == 1 && config.outputMode == OutputMode.ALWAYS
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -373,7 +373,12 @@ class BuildPlugin implements Plugin<Project> {
|
|||
regex(/^(\s+at )(org\.apache\.lucene\.util\.TestRule)/)
|
||||
regex(/^(\s+at )(org\.apache\.lucene\.util\.AbstractBeforeAfterRule)/)
|
||||
}
|
||||
outputMode System.getProperty('tests.output', 'onerror')
|
||||
if (System.getProperty('tests.class') != null && System.getProperty('tests.output') == null) {
|
||||
// if you are debugging, you want to see the output!
|
||||
outputMode 'always'
|
||||
} else {
|
||||
outputMode System.getProperty('tests.output', 'onerror')
|
||||
}
|
||||
}
|
||||
|
||||
balancers {
|
||||
|
|
|
@ -86,6 +86,7 @@ class ClusterFormationTasks {
|
|||
// tasks are chained so their execution order is maintained
|
||||
Task setup = project.tasks.create(name: "${task.name}#clean", type: Delete, dependsOn: task.dependsOn.collect()) {
|
||||
delete home
|
||||
delete cwd
|
||||
doLast {
|
||||
cwd.mkdirs()
|
||||
}
|
||||
|
@ -212,7 +213,7 @@ class ClusterFormationTasks {
|
|||
static Task configureStartTask(String name, Project project, Task setup, File cwd, ClusterConfiguration config, String clusterName, File pidFile, File home) {
|
||||
Map esEnv = [
|
||||
'JAVA_HOME' : project.javaHome,
|
||||
'JAVA_OPTS': config.jvmArgs
|
||||
'ES_GC_OPTS': config.jvmArgs // we pass these with the undocumented gc opts so the argline can set gc, etc
|
||||
]
|
||||
List<String> esProps = config.systemProperties.collect { key, value -> "-D${key}=${value}" }
|
||||
for (Map.Entry<String, String> property : System.properties.entrySet()) {
|
||||
|
@ -222,6 +223,8 @@ class ClusterFormationTasks {
|
|||
}
|
||||
|
||||
String executable
|
||||
// running with cmd on windows will look for this with the .bat extension
|
||||
String esScript = new File(home, 'bin/elasticsearch').toString()
|
||||
List<String> esArgs = []
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
executable = 'cmd'
|
||||
|
@ -230,8 +233,8 @@ class ClusterFormationTasks {
|
|||
} else {
|
||||
executable = 'sh'
|
||||
}
|
||||
// running with cmd on windows will look for this with the .bat extension
|
||||
esArgs.add(new File(home, 'bin/elasticsearch').toString())
|
||||
|
||||
File failedMarker = new File(cwd, 'run.failed')
|
||||
|
||||
// this closure is converted into ant nodes by groovy's AntBuilder
|
||||
Closure antRunner = {
|
||||
|
@ -239,19 +242,45 @@ class ClusterFormationTasks {
|
|||
// gradle task options are not processed until the end of the configuration phase
|
||||
if (config.debug) {
|
||||
println 'Running elasticsearch in debug mode, suspending until connected on port 8000'
|
||||
esEnv['JAVA_OPTS'] += ' -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000'
|
||||
esEnv['JAVA_OPTS'] = '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000'
|
||||
}
|
||||
|
||||
// Due to how ant exec works with the spawn option, we lose all stdout/stderr from the
|
||||
// process executed. To work around this, when spawning, we wrap the elasticsearch start
|
||||
// command inside another shell script, which simply internally redirects the output
|
||||
// of the real elasticsearch script. This allows ant to keep the streams open with the
|
||||
// dummy process, but us to have the output available if there is an error in the
|
||||
// elasticsearch start script
|
||||
if (config.daemonize) {
|
||||
String scriptName = 'run'
|
||||
String argsPasser = '"$@"'
|
||||
String exitMarker = '; if [ $? != 0 ]; then touch run.failed; fi'
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
scriptName += '.bat'
|
||||
argsPasser = '%*'
|
||||
exitMarker = '\r\n if "%errorlevel%" neq "0" ( type nul >> run.failed )'
|
||||
}
|
||||
File wrapperScript = new File(cwd, scriptName)
|
||||
wrapperScript.setText("\"${esScript}\" ${argsPasser} > run.log 2>&1 ${exitMarker}", 'UTF-8')
|
||||
esScript = wrapperScript.toString()
|
||||
}
|
||||
|
||||
exec(executable: executable, spawn: config.daemonize, dir: cwd, taskname: 'elasticsearch') {
|
||||
esEnv.each { key, value -> env(key: key, value: value) }
|
||||
(esArgs + esProps).each { arg(value: it) }
|
||||
arg(value: esScript)
|
||||
esProps.each { arg(value: it) }
|
||||
}
|
||||
waitfor(maxwait: '30', maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond', timeoutproperty: "failed${name}") {
|
||||
and {
|
||||
or {
|
||||
resourceexists {
|
||||
file(file: pidFile.toString())
|
||||
file(file: failedMarker.toString())
|
||||
}
|
||||
and {
|
||||
resourceexists {
|
||||
file(file: pidFile.toString())
|
||||
}
|
||||
http(url: "http://localhost:${config.httpPort}")
|
||||
}
|
||||
http(url: "http://localhost:${config.httpPort}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -259,8 +288,8 @@ class ClusterFormationTasks {
|
|||
// this closure is the actual code to run elasticsearch
|
||||
Closure elasticsearchRunner = {
|
||||
// Command as string for logging
|
||||
String esCommandString = "Elasticsearch command: ${executable} "
|
||||
esCommandString += (esArgs + esProps).join(' ')
|
||||
String esCommandString = "Elasticsearch command: ${esScript} "
|
||||
esCommandString += esProps.join(' ')
|
||||
if (esEnv.isEmpty() == false) {
|
||||
esCommandString += '\nenvironment:'
|
||||
esEnv.each { k, v -> esCommandString += "\n ${k}: ${v}" }
|
||||
|
@ -277,19 +306,17 @@ class ClusterFormationTasks {
|
|||
runAntCommand(project, antRunner, captureStream, captureStream)
|
||||
}
|
||||
|
||||
if (ant.properties.containsKey("failed${name}".toString())) {
|
||||
// the waitfor failed, so dump any output we got (may be empty if info logging, but that is ok)
|
||||
logger.error(buffer.toString('UTF-8'))
|
||||
// also dump the cluster's log file, it may be useful
|
||||
File logFile = new File(home, "logs/${clusterName}.log")
|
||||
if (logFile.exists()) {
|
||||
logFile.eachLine { line -> logger.error(line) }
|
||||
} else {
|
||||
logger.error("Couldn't start elasticsearch and couldn't find ${logFile}")
|
||||
}
|
||||
if (ant.properties.containsKey("failed${name}".toString()) || failedMarker.exists()) {
|
||||
if (logger.isInfoEnabled() == false) {
|
||||
// We already log the command at info level. No need to do it twice.
|
||||
logger.error(esCommandString)
|
||||
esCommandString.eachLine { line -> logger.error(line) }
|
||||
}
|
||||
// the waitfor failed, so dump any output we got (may be empty if info logging, but that is ok)
|
||||
buffer.toString('UTF-8').eachLine { line -> logger.error(line) }
|
||||
// also dump the log file for the startup script (which will include ES logging output to stdout)
|
||||
File startLog = new File(cwd, 'run.log')
|
||||
if (startLog.exists()) {
|
||||
startLog.eachLine { line -> logger.error(line) }
|
||||
}
|
||||
throw new GradleException('Failed to start elasticsearch')
|
||||
}
|
||||
|
|
|
@ -20,10 +20,9 @@ package org.elasticsearch.gradle.vagrant
|
|||
|
||||
import com.carrotsearch.gradle.junit4.LoggingOutputStream
|
||||
import org.gradle.api.GradleScriptException
|
||||
import org.gradle.api.InvalidUserDataException
|
||||
import org.gradle.api.logging.Logger
|
||||
import org.gradle.logging.ProgressLogger
|
||||
import org.gradle.logging.ProgressLoggerFactory
|
||||
|
||||
import java.util.regex.Matcher
|
||||
|
||||
/**
|
||||
|
|
|
@ -20,7 +20,6 @@ package org.elasticsearch.gradle.vagrant
|
|||
|
||||
import com.carrotsearch.gradle.junit4.LoggingOutputStream
|
||||
import org.gradle.logging.ProgressLogger
|
||||
import org.gradle.logging.ProgressLoggerFactory
|
||||
|
||||
/**
|
||||
* Adapts an OutputStream being written to by vagrant into a ProcessLogger. It
|
||||
|
|
|
@ -273,13 +273,13 @@ public class TransportClusterHealthAction extends TransportMasterNodeReadAction<
|
|||
} catch (IndexNotFoundException e) {
|
||||
// one of the specified indices is not there - treat it as RED.
|
||||
ClusterHealthResponse response = new ClusterHealthResponse(clusterName.value(), Strings.EMPTY_ARRAY, clusterState,
|
||||
numberOfPendingTasks, numberOfInFlightFetch, UnassignedInfo.getNumberOfDelayedUnassigned(System.currentTimeMillis(), settings, clusterState),
|
||||
numberOfPendingTasks, numberOfInFlightFetch, UnassignedInfo.getNumberOfDelayedUnassigned(clusterState),
|
||||
pendingTaskTimeInQueue);
|
||||
response.setStatus(ClusterHealthStatus.RED);
|
||||
return response;
|
||||
}
|
||||
|
||||
return new ClusterHealthResponse(clusterName.value(), concreteIndices, clusterState, numberOfPendingTasks,
|
||||
numberOfInFlightFetch, UnassignedInfo.getNumberOfDelayedUnassigned(System.currentTimeMillis(), settings, clusterState), pendingTaskTimeInQueue);
|
||||
numberOfInFlightFetch, UnassignedInfo.getNumberOfDelayedUnassigned(clusterState), pendingTaskTimeInQueue);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
|||
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.ScheduledFuture;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
|
@ -56,9 +55,8 @@ public class RoutingService extends AbstractLifecycleComponent<RoutingService> i
|
|||
private final AllocationService allocationService;
|
||||
|
||||
private AtomicBoolean rerouting = new AtomicBoolean();
|
||||
private volatile long registeredNextDelaySetting = Long.MAX_VALUE;
|
||||
private volatile long minDelaySettingAtLastScheduling = Long.MAX_VALUE;
|
||||
private volatile ScheduledFuture registeredNextDelayFuture;
|
||||
private volatile long unassignedShardsAllocatedTimestamp = 0;
|
||||
|
||||
@Inject
|
||||
public RoutingService(Settings settings, ThreadPool threadPool, ClusterService clusterService, AllocationService allocationService) {
|
||||
|
@ -89,19 +87,6 @@ public class RoutingService extends AbstractLifecycleComponent<RoutingService> i
|
|||
return this.allocationService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the last time the allocator tried to assign unassigned shards
|
||||
*
|
||||
* This is used so that both the GatewayAllocator and RoutingService use a
|
||||
* consistent timestamp for comparing which shards have been delayed to
|
||||
* avoid a race condition where GatewayAllocator thinks the shard should
|
||||
* be delayed and the RoutingService thinks it has already passed the delay
|
||||
* and that the GatewayAllocator has/will handle it.
|
||||
*/
|
||||
public void setUnassignedShardsAllocatedTimestamp(long timeInMillis) {
|
||||
this.unassignedShardsAllocatedTimestamp = timeInMillis;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initiates a reroute.
|
||||
*/
|
||||
|
@ -112,51 +97,43 @@ public class RoutingService extends AbstractLifecycleComponent<RoutingService> i
|
|||
@Override
|
||||
public void clusterChanged(ClusterChangedEvent event) {
|
||||
if (event.state().nodes().localNodeMaster()) {
|
||||
// figure out when the next unassigned allocation need to happen from now. If this is larger or equal
|
||||
// then the last time we checked and scheduled, we are guaranteed to have a reroute until then, so no need
|
||||
// to schedule again
|
||||
long nextDelaySetting = UnassignedInfo.findSmallestDelayedAllocationSetting(settings, event.state());
|
||||
if (nextDelaySetting > 0 && nextDelaySetting < registeredNextDelaySetting) {
|
||||
// Figure out if an existing scheduled reroute is good enough or whether we need to cancel and reschedule.
|
||||
// If the minimum of the currently relevant delay settings is larger than something we scheduled in the past,
|
||||
// we are guaranteed that the planned schedule will happen before any of the current shard delays are expired.
|
||||
long minDelaySetting = UnassignedInfo.findSmallestDelayedAllocationSetting(settings, event.state());
|
||||
if (minDelaySetting <= 0) {
|
||||
logger.trace("no need to schedule reroute - no delayed unassigned shards, minDelaySetting [{}], scheduled [{}]", minDelaySetting, minDelaySettingAtLastScheduling);
|
||||
minDelaySettingAtLastScheduling = Long.MAX_VALUE;
|
||||
FutureUtils.cancel(registeredNextDelayFuture);
|
||||
registeredNextDelaySetting = nextDelaySetting;
|
||||
// We calculate nextDelay based on System.currentTimeMillis() here because we want the next delay from the "now" perspective
|
||||
// rather than the delay from the last time the GatewayAllocator tried to assign/delay the shard.
|
||||
// The actual calculation is based on the latter though, to account for shards that should have been allocated
|
||||
// between unassignedShardsAllocatedTimestamp and System.currentTimeMillis()
|
||||
long nextDelayBasedOnUnassignedShardsAllocatedTimestamp = UnassignedInfo.findNextDelayedAllocationIn(unassignedShardsAllocatedTimestamp, settings, event.state());
|
||||
// adjust from unassignedShardsAllocatedTimestamp to now
|
||||
long nextDelayMillis = nextDelayBasedOnUnassignedShardsAllocatedTimestamp - (System.currentTimeMillis() - unassignedShardsAllocatedTimestamp);
|
||||
if (nextDelayMillis < 0) {
|
||||
nextDelayMillis = 0;
|
||||
}
|
||||
TimeValue nextDelay = TimeValue.timeValueMillis(nextDelayMillis);
|
||||
int unassignedDelayedShards = UnassignedInfo.getNumberOfDelayedUnassigned(unassignedShardsAllocatedTimestamp, settings, event.state());
|
||||
if (unassignedDelayedShards > 0) {
|
||||
logger.info("delaying allocation for [{}] unassigned shards, next check in [{}]",
|
||||
unassignedDelayedShards, nextDelay);
|
||||
registeredNextDelayFuture = threadPool.schedule(nextDelay, ThreadPool.Names.SAME, new AbstractRunnable() {
|
||||
@Override
|
||||
protected void doRun() throws Exception {
|
||||
registeredNextDelaySetting = Long.MAX_VALUE;
|
||||
reroute("assign delayed unassigned shards");
|
||||
}
|
||||
} else if (minDelaySetting < minDelaySettingAtLastScheduling) {
|
||||
FutureUtils.cancel(registeredNextDelayFuture);
|
||||
minDelaySettingAtLastScheduling = minDelaySetting;
|
||||
TimeValue nextDelay = TimeValue.timeValueNanos(UnassignedInfo.findNextDelayedAllocationIn(event.state()));
|
||||
assert nextDelay.nanos() > 0 : "next delay must be non 0 as minDelaySetting is [" + minDelaySetting + "]";
|
||||
logger.info("delaying allocation for [{}] unassigned shards, next check in [{}]",
|
||||
UnassignedInfo.getNumberOfDelayedUnassigned(event.state()), nextDelay);
|
||||
registeredNextDelayFuture = threadPool.schedule(nextDelay, ThreadPool.Names.SAME, new AbstractRunnable() {
|
||||
@Override
|
||||
protected void doRun() throws Exception {
|
||||
minDelaySettingAtLastScheduling = Long.MAX_VALUE;
|
||||
reroute("assign delayed unassigned shards");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
logger.warn("failed to schedule/execute reroute post unassigned shard", t);
|
||||
registeredNextDelaySetting = Long.MAX_VALUE;
|
||||
}
|
||||
});
|
||||
}
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
logger.warn("failed to schedule/execute reroute post unassigned shard", t);
|
||||
minDelaySettingAtLastScheduling = Long.MAX_VALUE;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
logger.trace("no need to schedule reroute due to delayed unassigned, next_delay_setting [{}], registered [{}]", nextDelaySetting, registeredNextDelaySetting);
|
||||
logger.trace("no need to schedule reroute - current schedule reroute is enough. minDelaySetting [{}], scheduled [{}]", minDelaySetting, minDelaySettingAtLastScheduling);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// visible for testing
|
||||
long getRegisteredNextDelaySetting() {
|
||||
return this.registeredNextDelaySetting;
|
||||
long getMinDelaySettingAtLastScheduling() {
|
||||
return this.minDelaySettingAtLastScheduling;
|
||||
}
|
||||
|
||||
// visible for testing
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Holds additional information as to why the shard is in unassigned state.
|
||||
|
@ -103,21 +104,24 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
|
|||
}
|
||||
|
||||
private final Reason reason;
|
||||
private final long timestamp;
|
||||
private final long unassignedTimeMillis; // used for display and log messages, in milliseconds
|
||||
private final long unassignedTimeNanos; // in nanoseconds, used to calculate delay for delayed shard allocation
|
||||
private volatile long lastComputedLeftDelayNanos = 0l; // how long to delay shard allocation, not serialized (always positive, 0 means no delay)
|
||||
private final String message;
|
||||
private final Throwable failure;
|
||||
|
||||
public UnassignedInfo(Reason reason, String message) {
|
||||
this(reason, System.currentTimeMillis(), message, null);
|
||||
this(reason, System.currentTimeMillis(), System.nanoTime(), message, null);
|
||||
}
|
||||
|
||||
public UnassignedInfo(Reason reason, @Nullable String message, @Nullable Throwable failure) {
|
||||
this(reason, System.currentTimeMillis(), message, failure);
|
||||
this(reason, System.currentTimeMillis(), System.nanoTime(), message, failure);
|
||||
}
|
||||
|
||||
private UnassignedInfo(Reason reason, long timestamp, String message, Throwable failure) {
|
||||
private UnassignedInfo(Reason reason, long unassignedTimeMillis, long timestampNanos, String message, Throwable failure) {
|
||||
this.reason = reason;
|
||||
this.timestamp = timestamp;
|
||||
this.unassignedTimeMillis = unassignedTimeMillis;
|
||||
this.unassignedTimeNanos = timestampNanos;
|
||||
this.message = message;
|
||||
this.failure = failure;
|
||||
assert !(message == null && failure != null) : "provide a message if a failure exception is provided";
|
||||
|
@ -125,14 +129,18 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
|
|||
|
||||
UnassignedInfo(StreamInput in) throws IOException {
|
||||
this.reason = Reason.values()[(int) in.readByte()];
|
||||
this.timestamp = in.readLong();
|
||||
this.unassignedTimeMillis = in.readLong();
|
||||
// As System.nanoTime() cannot be compared across different JVMs, reset it to now.
|
||||
// This means that in master failover situations, elapsed delay time is forgotten.
|
||||
this.unassignedTimeNanos = System.nanoTime();
|
||||
this.message = in.readOptionalString();
|
||||
this.failure = in.readThrowable();
|
||||
}
|
||||
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeByte((byte) reason.ordinal());
|
||||
out.writeLong(timestamp);
|
||||
out.writeLong(unassignedTimeMillis);
|
||||
// Do not serialize unassignedTimeNanos as System.nanoTime() cannot be compared across different JVMs
|
||||
out.writeOptionalString(message);
|
||||
out.writeThrowable(failure);
|
||||
}
|
||||
|
@ -149,13 +157,20 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
|
|||
}
|
||||
|
||||
/**
|
||||
* The timestamp in milliseconds since epoch. Note, we use timestamp here since
|
||||
* we want to make sure its preserved across node serializations. Extra care need
|
||||
* to be made if its used to calculate diff (handle negative values) in case of
|
||||
* time drift.
|
||||
* The timestamp in milliseconds when the shard became unassigned, based on System.currentTimeMillis().
|
||||
* Note, we use timestamp here since we want to make sure its preserved across node serializations.
|
||||
*/
|
||||
public long getTimestampInMillis() {
|
||||
return this.timestamp;
|
||||
public long getUnassignedTimeInMillis() {
|
||||
return this.unassignedTimeMillis;
|
||||
}
|
||||
|
||||
/**
|
||||
* The timestamp in nanoseconds when the shard became unassigned, based on System.nanoTime().
|
||||
* Used to calculate the delay for delayed shard allocation.
|
||||
* ONLY EXPOSED FOR TESTS!
|
||||
*/
|
||||
public long getUnassignedTimeInNanos() {
|
||||
return this.unassignedTimeNanos;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -186,7 +201,7 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
|
|||
}
|
||||
|
||||
/**
|
||||
* The allocation delay value associated with the index (defaulting to node settings if not set).
|
||||
* The allocation delay value in milliseconds associated with the index (defaulting to node settings if not set).
|
||||
*/
|
||||
public long getAllocationDelayTimeoutSetting(Settings settings, Settings indexSettings) {
|
||||
if (reason != Reason.NODE_LEFT) {
|
||||
|
@ -197,31 +212,40 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
|
|||
}
|
||||
|
||||
/**
|
||||
* The time in millisecond until this unassigned shard can be reassigned.
|
||||
* The delay in nanoseconds until this unassigned shard can be reassigned. This value is cached and might be slightly out-of-date.
|
||||
* See also the {@link #updateDelay(long, Settings, Settings)} method.
|
||||
*/
|
||||
public long getDelayAllocationExpirationIn(long unassignedShardsAllocatedTimestamp, Settings settings, Settings indexSettings) {
|
||||
long delayTimeout = getAllocationDelayTimeoutSetting(settings, indexSettings);
|
||||
if (delayTimeout == 0) {
|
||||
return 0;
|
||||
}
|
||||
long delta = unassignedShardsAllocatedTimestamp - timestamp;
|
||||
// account for time drift, treat it as no timeout
|
||||
if (delta < 0) {
|
||||
return 0;
|
||||
}
|
||||
return delayTimeout - delta;
|
||||
public long getLastComputedLeftDelayNanos() {
|
||||
return lastComputedLeftDelayNanos;
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates delay left based on current time (in nanoseconds) and index/node settings.
|
||||
* Should only be called from ReplicaShardAllocator.
|
||||
* @return updated delay in nanoseconds
|
||||
*/
|
||||
public long updateDelay(long nanoTimeNow, Settings settings, Settings indexSettings) {
|
||||
long delayTimeoutMillis = getAllocationDelayTimeoutSetting(settings, indexSettings);
|
||||
final long newComputedLeftDelayNanos;
|
||||
if (delayTimeoutMillis == 0l) {
|
||||
newComputedLeftDelayNanos = 0l;
|
||||
} else {
|
||||
assert nanoTimeNow >= unassignedTimeNanos;
|
||||
long delayTimeoutNanos = TimeUnit.NANOSECONDS.convert(delayTimeoutMillis, TimeUnit.MILLISECONDS);
|
||||
newComputedLeftDelayNanos = Math.max(0l, delayTimeoutNanos - (nanoTimeNow - unassignedTimeNanos));
|
||||
}
|
||||
lastComputedLeftDelayNanos = newComputedLeftDelayNanos;
|
||||
return newComputedLeftDelayNanos;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of shards that are unassigned and currently being delayed.
|
||||
*/
|
||||
public static int getNumberOfDelayedUnassigned(long unassignedShardsAllocatedTimestamp, Settings settings, ClusterState state) {
|
||||
public static int getNumberOfDelayedUnassigned(ClusterState state) {
|
||||
int count = 0;
|
||||
for (ShardRouting shard : state.routingTable().shardsWithState(ShardRoutingState.UNASSIGNED)) {
|
||||
if (shard.primary() == false) {
|
||||
IndexMetaData indexMetaData = state.metaData().index(shard.getIndex());
|
||||
long delay = shard.unassignedInfo().getDelayAllocationExpirationIn(unassignedShardsAllocatedTimestamp, settings, indexMetaData.getSettings());
|
||||
long delay = shard.unassignedInfo().getLastComputedLeftDelayNanos();
|
||||
if (delay > 0) {
|
||||
count++;
|
||||
}
|
||||
|
@ -231,15 +255,16 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
|
|||
}
|
||||
|
||||
/**
|
||||
* Finds the smallest delay expiration setting of an unassigned shard. Returns 0 if there are none.
|
||||
* Finds the smallest delay expiration setting in milliseconds of all unassigned shards that are still delayed. Returns 0 if there are none.
|
||||
*/
|
||||
public static long findSmallestDelayedAllocationSetting(Settings settings, ClusterState state) {
|
||||
long nextDelaySetting = Long.MAX_VALUE;
|
||||
for (ShardRouting shard : state.routingTable().shardsWithState(ShardRoutingState.UNASSIGNED)) {
|
||||
if (shard.primary() == false) {
|
||||
IndexMetaData indexMetaData = state.metaData().index(shard.getIndex());
|
||||
long leftDelayNanos = shard.unassignedInfo().getLastComputedLeftDelayNanos();
|
||||
long delayTimeoutSetting = shard.unassignedInfo().getAllocationDelayTimeoutSetting(settings, indexMetaData.getSettings());
|
||||
if (delayTimeoutSetting > 0 && delayTimeoutSetting < nextDelaySetting) {
|
||||
if (leftDelayNanos > 0 && delayTimeoutSetting > 0 && delayTimeoutSetting < nextDelaySetting) {
|
||||
nextDelaySetting = delayTimeoutSetting;
|
||||
}
|
||||
}
|
||||
|
@ -249,14 +274,13 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
|
|||
|
||||
|
||||
/**
|
||||
* Finds the next (closest) delay expiration of an unassigned shard. Returns 0 if there are none.
|
||||
* Finds the next (closest) delay expiration of an unassigned shard in nanoseconds. Returns 0 if there are none.
|
||||
*/
|
||||
public static long findNextDelayedAllocationIn(long unassignedShardsAllocatedTimestamp, Settings settings, ClusterState state) {
|
||||
public static long findNextDelayedAllocationIn(ClusterState state) {
|
||||
long nextDelay = Long.MAX_VALUE;
|
||||
for (ShardRouting shard : state.routingTable().shardsWithState(ShardRoutingState.UNASSIGNED)) {
|
||||
if (shard.primary() == false) {
|
||||
IndexMetaData indexMetaData = state.metaData().index(shard.getIndex());
|
||||
long nextShardDelay = shard.unassignedInfo().getDelayAllocationExpirationIn(unassignedShardsAllocatedTimestamp, settings, indexMetaData.getSettings());
|
||||
long nextShardDelay = shard.unassignedInfo().getLastComputedLeftDelayNanos();
|
||||
if (nextShardDelay > 0 && nextShardDelay < nextDelay) {
|
||||
nextDelay = nextShardDelay;
|
||||
}
|
||||
|
@ -268,7 +292,7 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
|
|||
public String shortSummary() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("[reason=").append(reason).append("]");
|
||||
sb.append(", at[").append(DATE_TIME_FORMATTER.printer().print(timestamp)).append("]");
|
||||
sb.append(", at[").append(DATE_TIME_FORMATTER.printer().print(unassignedTimeMillis)).append("]");
|
||||
String details = getDetails();
|
||||
if (details != null) {
|
||||
sb.append(", details[").append(details).append("]");
|
||||
|
@ -285,7 +309,7 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
|
|||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject("unassigned_info");
|
||||
builder.field("reason", reason);
|
||||
builder.field("at", DATE_TIME_FORMATTER.printer().print(timestamp));
|
||||
builder.field("at", DATE_TIME_FORMATTER.printer().print(unassignedTimeMillis));
|
||||
String details = getDetails();
|
||||
if (details != null) {
|
||||
builder.field("details", details);
|
||||
|
@ -301,7 +325,7 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
|
|||
|
||||
UnassignedInfo that = (UnassignedInfo) o;
|
||||
|
||||
if (timestamp != that.timestamp) return false;
|
||||
if (unassignedTimeMillis != that.unassignedTimeMillis) return false;
|
||||
if (reason != that.reason) return false;
|
||||
if (message != null ? !message.equals(that.message) : that.message != null) return false;
|
||||
return !(failure != null ? !failure.equals(that.failure) : that.failure != null);
|
||||
|
@ -311,7 +335,7 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
|
|||
@Override
|
||||
public int hashCode() {
|
||||
int result = reason != null ? reason.hashCode() : 0;
|
||||
result = 31 * result + Long.hashCode(timestamp);
|
||||
result = 31 * result + Long.hashCode(unassignedTimeMillis);
|
||||
result = 31 * result + (message != null ? message.hashCode() : 0);
|
||||
result = 31 * result + (failure != null ? failure.hashCode() : 0);
|
||||
return result;
|
||||
|
|
|
@ -23,7 +23,6 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
||||
import com.spatial4j.core.shape.ShapeCollection;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import com.spatial4j.core.shape.Shape;
|
||||
|
@ -34,11 +33,7 @@ import com.vividsolutions.jts.geom.LineString;
|
|||
|
||||
public abstract class BaseLineStringBuilder<E extends BaseLineStringBuilder<E>> extends PointCollection<E> {
|
||||
|
||||
protected BaseLineStringBuilder() {
|
||||
this(new ArrayList<Coordinate>());
|
||||
}
|
||||
|
||||
protected BaseLineStringBuilder(ArrayList<Coordinate> points) {
|
||||
public BaseLineStringBuilder(ArrayList<Coordinate> points) {
|
||||
super(points);
|
||||
}
|
||||
|
||||
|
@ -78,15 +73,15 @@ public abstract class BaseLineStringBuilder<E extends BaseLineStringBuilder<E>>
|
|||
|
||||
/**
|
||||
* Decompose a linestring given as array of coordinates at a vertical line.
|
||||
*
|
||||
*
|
||||
* @param dateline x-axis intercept of the vertical line
|
||||
* @param coordinates coordinates forming the linestring
|
||||
* @return array of linestrings given as coordinate arrays
|
||||
* @return array of linestrings given as coordinate arrays
|
||||
*/
|
||||
protected static Coordinate[][] decompose(double dateline, Coordinate[] coordinates) {
|
||||
int offset = 0;
|
||||
ArrayList<Coordinate[]> parts = new ArrayList<>();
|
||||
|
||||
|
||||
double shift = coordinates[0].x > DATELINE ? DATELINE : (coordinates[0].x < -DATELINE ? -DATELINE : 0);
|
||||
|
||||
for (int i = 1; i < coordinates.length; i++) {
|
||||
|
|
|
@ -42,8 +42,8 @@ import java.util.Iterator;
|
|||
* The {@link BasePolygonBuilder} implements the groundwork to create polygons. This contains
|
||||
* Methods to wrap polygons at the dateline and building shapes from the data held by the
|
||||
* builder.
|
||||
* Since this Builder can be embedded to other builders (i.e. {@link MultiPolygonBuilder})
|
||||
* the class of the embedding builder is given by the generic argument <code>E</code>
|
||||
* Since this Builder can be embedded to other builders (i.e. {@link MultiPolygonBuilder})
|
||||
* the class of the embedding builder is given by the generic argument <code>E</code>
|
||||
|
||||
* @param <E> type of the embedding class
|
||||
*/
|
||||
|
@ -51,11 +51,11 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.POLYGON;
|
||||
|
||||
// Linear ring defining the shell of the polygon
|
||||
protected Ring<E> shell;
|
||||
// line string defining the shell of the polygon
|
||||
protected LineStringBuilder shell;
|
||||
|
||||
// List of linear rings defining the holes of the polygon
|
||||
protected final ArrayList<BaseLineStringBuilder<?>> holes = new ArrayList<>();
|
||||
// List of line strings defining the holes of the polygon
|
||||
protected final ArrayList<LineStringBuilder> holes = new ArrayList<>();
|
||||
|
||||
public BasePolygonBuilder(Orientation orientation) {
|
||||
super(orientation);
|
||||
|
@ -65,7 +65,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
private E thisRef() {
|
||||
return (E)this;
|
||||
}
|
||||
|
||||
|
||||
public E point(double longitude, double latitude) {
|
||||
shell.point(longitude, latitude);
|
||||
return thisRef();
|
||||
|
@ -96,27 +96,17 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
* @param hole linear ring defining the hole
|
||||
* @return this
|
||||
*/
|
||||
public E hole(BaseLineStringBuilder<?> hole) {
|
||||
public E hole(LineStringBuilder hole) {
|
||||
holes.add(hole);
|
||||
return thisRef();
|
||||
}
|
||||
|
||||
/**
|
||||
* build new hole to the polygon
|
||||
* @return this
|
||||
*/
|
||||
public Ring<E> hole() {
|
||||
Ring<E> hole = new Ring<>(thisRef());
|
||||
this.holes.add(hole);
|
||||
return hole;
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the shell of the polygon
|
||||
* @return parent
|
||||
*/
|
||||
public ShapeBuilder close() {
|
||||
return shell.close();
|
||||
public BasePolygonBuilder close() {
|
||||
shell.close();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -138,11 +128,11 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
* within the polygon.
|
||||
* This Method also wraps the polygons at the dateline. In order to this fact the result may
|
||||
* contains more polygons and less holes than defined in the builder it self.
|
||||
*
|
||||
*
|
||||
* @return coordinates of the polygon
|
||||
*/
|
||||
public Coordinate[][][] coordinates() {
|
||||
int numEdges = shell.points.size()-1; // Last point is repeated
|
||||
int numEdges = shell.points.size()-1; // Last point is repeated
|
||||
for (int i = 0; i < holes.size(); i++) {
|
||||
numEdges += holes.get(i).points.size()-1;
|
||||
validateHole(shell, this.holes.get(i));
|
||||
|
@ -172,12 +162,12 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
|
||||
protected XContentBuilder coordinatesArray(XContentBuilder builder, Params params) throws IOException {
|
||||
shell.coordinatesToXcontent(builder, true);
|
||||
for(BaseLineStringBuilder<?> hole : holes) {
|
||||
for(BaseLineStringBuilder hole : holes) {
|
||||
hole.coordinatesToXcontent(builder, true);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
|
@ -188,7 +178,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
||||
public Geometry buildGeometry(GeometryFactory factory, boolean fixDateline) {
|
||||
if(fixDateline) {
|
||||
Coordinate[][][] polygons = coordinates();
|
||||
|
@ -207,7 +197,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
protected Polygon toPolygon(GeometryFactory factory) {
|
||||
final LinearRing shell = linearRing(factory, this.shell.points);
|
||||
final LinearRing[] holes = new LinearRing[this.holes.size()];
|
||||
Iterator<BaseLineStringBuilder<?>> iterator = this.holes.iterator();
|
||||
Iterator<LineStringBuilder> iterator = this.holes.iterator();
|
||||
for (int i = 0; iterator.hasNext(); i++) {
|
||||
holes[i] = linearRing(factory, iterator.next().points);
|
||||
}
|
||||
|
@ -226,7 +216,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
protected static Polygon polygon(GeometryFactory factory, Coordinate[][] polygon) {
|
||||
LinearRing shell = factory.createLinearRing(polygon[0]);
|
||||
LinearRing[] holes;
|
||||
|
||||
|
||||
if(polygon.length > 1) {
|
||||
holes = new LinearRing[polygon.length-1];
|
||||
for (int i = 0; i < holes.length; i++) {
|
||||
|
@ -243,7 +233,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
* in turn contains an array of linestrings. These line Strings are represented as an array of
|
||||
* coordinates. The first linestring will be the shell of the polygon the others define holes
|
||||
* within the polygon.
|
||||
*
|
||||
*
|
||||
* @param factory {@link GeometryFactory} to use
|
||||
* @param polygons definition of polygons
|
||||
* @return a new Multipolygon
|
||||
|
@ -258,19 +248,19 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
|
||||
/**
|
||||
* This method sets the component id of all edges in a ring to a given id and shifts the
|
||||
* coordinates of this component according to the dateline
|
||||
*
|
||||
* coordinates of this component according to the dateline
|
||||
*
|
||||
* @param edge An arbitrary edge of the component
|
||||
* @param id id to apply to the component
|
||||
* @param edges a list of edges to which all edges of the component will be added (could be <code>null</code>)
|
||||
* @return number of edges that belong to this component
|
||||
*/
|
||||
private static int component(final Edge edge, final int id, final ArrayList<Edge> edges) {
|
||||
// find a coordinate that is not part of the dateline
|
||||
// find a coordinate that is not part of the dateline
|
||||
Edge any = edge;
|
||||
while(any.coordinate.x == +DATELINE || any.coordinate.x == -DATELINE) {
|
||||
if((any = any.next) == edge) {
|
||||
break;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -362,7 +352,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
private static final Coordinate[][] EMPTY = new Coordinate[0][];
|
||||
|
||||
|
@ -378,7 +368,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
}
|
||||
|
||||
return points;
|
||||
}
|
||||
}
|
||||
|
||||
private static Edge[] edges(Edge[] edges, int numHoles, ArrayList<ArrayList<Coordinate[]>> components) {
|
||||
ArrayList<Edge> mainEdges = new ArrayList<>(edges.length);
|
||||
|
@ -412,7 +402,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
}
|
||||
for (int i = 0; i < numHoles; i++) {
|
||||
final Edge current = new Edge(holes[i].coordinate, holes[i].next);
|
||||
// the edge intersects with itself at its own coordinate. We need intersect to be set this way so the binary search
|
||||
// the edge intersects with itself at its own coordinate. We need intersect to be set this way so the binary search
|
||||
// will get the correct position in the edge list and therefore the correct component to add the hole
|
||||
current.intersect = current.coordinate;
|
||||
final int intersections = intersections(current.coordinate.x, edges);
|
||||
|
@ -457,20 +447,20 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
holes[e2.component-1] = holes[numHoles];
|
||||
holes[numHoles] = null;
|
||||
}
|
||||
// only connect edges if intersections are pairwise
|
||||
// only connect edges if intersections are pairwise
|
||||
// 1. per the comment above, the edge array is sorted by y-value of the intersection
|
||||
// with the dateline. Two edges have the same y intercept when they cross the
|
||||
// with the dateline. Two edges have the same y intercept when they cross the
|
||||
// dateline thus they appear sequentially (pairwise) in the edge array. Two edges
|
||||
// do not have the same y intercept when we're forming a multi-poly from a poly
|
||||
// that wraps the dateline (but there are 2 ordered intercepts).
|
||||
// The connect method creates a new edge for these paired edges in the linked list.
|
||||
// For boundary conditions (e.g., intersect but not crossing) there is no sibling edge
|
||||
// that wraps the dateline (but there are 2 ordered intercepts).
|
||||
// The connect method creates a new edge for these paired edges in the linked list.
|
||||
// For boundary conditions (e.g., intersect but not crossing) there is no sibling edge
|
||||
// to connect. Thus the first logic check enforces the pairwise rule
|
||||
// 2. the second logic check ensures the two candidate edges aren't already connected by an
|
||||
// existing edge along the dateline - this is necessary due to a logic change in
|
||||
// ShapeBuilder.intersection that computes dateline edges as valid intersect points
|
||||
// ShapeBuilder.intersection that computes dateline edges as valid intersect points
|
||||
// in support of OGC standards
|
||||
if (e1.intersect != Edge.MAX_COORDINATE && e2.intersect != Edge.MAX_COORDINATE
|
||||
if (e1.intersect != Edge.MAX_COORDINATE && e2.intersect != Edge.MAX_COORDINATE
|
||||
&& !(e1.next.next.coordinate.equals3D(e2.coordinate) && Math.abs(e1.next.coordinate.x) == DATELINE
|
||||
&& Math.abs(e2.coordinate.x) == DATELINE) ) {
|
||||
connect(e1, e2);
|
||||
|
@ -489,7 +479,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
// NOTE: the order of the object creation is crucial here! Don't change it!
|
||||
// first edge has no point on dateline
|
||||
Edge e1 = new Edge(in.intersect, in.next);
|
||||
|
||||
|
||||
if(out.intersect != out.next.coordinate) {
|
||||
// second edge has no point on dateline
|
||||
Edge e2 = new Edge(out.intersect, out.next);
|
||||
|
@ -507,7 +497,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
// second edge has no point on dateline
|
||||
Edge e1 = new Edge(out.intersect, out.next);
|
||||
in.next = new Edge(in.intersect, e1, in.intersect);
|
||||
|
||||
|
||||
} else {
|
||||
// second edge intersects with dateline
|
||||
in.next = new Edge(in.intersect, out.next, in.intersect);
|
||||
|
@ -516,8 +506,8 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
}
|
||||
}
|
||||
|
||||
private static int createEdges(int component, Orientation orientation, BaseLineStringBuilder<?> shell,
|
||||
BaseLineStringBuilder<?> hole,
|
||||
private static int createEdges(int component, Orientation orientation, BaseLineStringBuilder shell,
|
||||
BaseLineStringBuilder hole,
|
||||
Edge[] edges, int offset) {
|
||||
// inner rings (holes) have an opposite direction than the outer rings
|
||||
// XOR will invert the orientation for outer ring cases (Truth Table:, T/T = F, T/F = T, F/T = T, F/F = F)
|
||||
|
@ -527,32 +517,4 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
Edge.ring(component, direction, orientation == Orientation.LEFT, shell, points, 0, edges, offset, points.length-1);
|
||||
return points.length-1;
|
||||
}
|
||||
|
||||
public static class Ring<P extends ShapeBuilder> extends BaseLineStringBuilder<Ring<P>> {
|
||||
|
||||
private final P parent;
|
||||
|
||||
protected Ring(P parent) {
|
||||
this(parent, new ArrayList<Coordinate>());
|
||||
}
|
||||
|
||||
protected Ring(P parent, ArrayList<Coordinate> points) {
|
||||
super(points);
|
||||
this.parent = parent;
|
||||
}
|
||||
|
||||
public P close() {
|
||||
Coordinate start = points.get(0);
|
||||
Coordinate end = points.get(points.size()-1);
|
||||
if(start.x != end.x || start.y != end.y) {
|
||||
points.add(start);
|
||||
}
|
||||
return parent;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoShapeType type() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
|
||||
public class GeometryCollectionBuilder extends ShapeBuilder {
|
||||
|
||||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.GEOMETRYCOLLECTION;
|
||||
|
||||
protected final ArrayList<ShapeBuilder> shapes = new ArrayList<>();
|
||||
|
@ -46,42 +46,42 @@ public class GeometryCollectionBuilder extends ShapeBuilder {
|
|||
this.shapes.add(shape);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public GeometryCollectionBuilder point(PointBuilder point) {
|
||||
this.shapes.add(point);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public GeometryCollectionBuilder multiPoint(MultiPointBuilder multiPoint) {
|
||||
this.shapes.add(multiPoint);
|
||||
return this;
|
||||
}
|
||||
|
||||
public GeometryCollectionBuilder line(BaseLineStringBuilder<?> line) {
|
||||
|
||||
public GeometryCollectionBuilder line(BaseLineStringBuilder line) {
|
||||
this.shapes.add(line);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public GeometryCollectionBuilder multiLine(MultiLineStringBuilder multiLine) {
|
||||
this.shapes.add(multiLine);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public GeometryCollectionBuilder polygon(BasePolygonBuilder<?> polygon) {
|
||||
this.shapes.add(polygon);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public GeometryCollectionBuilder multiPolygon(MultiPolygonBuilder multiPolygon) {
|
||||
this.shapes.add(multiPolygon);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public GeometryCollectionBuilder envelope(EnvelopeBuilder envelope) {
|
||||
this.shapes.add(envelope);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public GeometryCollectionBuilder circle(CircleBuilder circle) {
|
||||
this.shapes.add(circle);
|
||||
return this;
|
||||
|
@ -120,11 +120,11 @@ public class GeometryCollectionBuilder extends ShapeBuilder {
|
|||
public Shape build() {
|
||||
|
||||
List<Shape> shapes = new ArrayList<>(this.shapes.size());
|
||||
|
||||
|
||||
for (ShapeBuilder shape : this.shapes) {
|
||||
shapes.add(shape.build());
|
||||
}
|
||||
|
||||
|
||||
if (shapes.size() == 1)
|
||||
return shapes.get(0);
|
||||
else
|
||||
|
|
|
@ -19,12 +19,23 @@
|
|||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
||||
public class LineStringBuilder extends BaseLineStringBuilder<LineStringBuilder> {
|
||||
|
||||
public LineStringBuilder() {
|
||||
this(new ArrayList<Coordinate>());
|
||||
}
|
||||
|
||||
public LineStringBuilder(ArrayList<Coordinate> points) {
|
||||
super(points);
|
||||
}
|
||||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.LINESTRING;
|
||||
|
||||
@Override
|
||||
|
@ -42,4 +53,16 @@ public class LineStringBuilder extends BaseLineStringBuilder<LineStringBuilder>
|
|||
return TYPE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the current lineString by adding the starting point as the end point
|
||||
*/
|
||||
public LineStringBuilder close() {
|
||||
Coordinate start = points.get(0);
|
||||
Coordinate end = points.get(points.size()-1);
|
||||
if(start.x != end.x || start.y != end.y) {
|
||||
points.add(start);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.common.geo.builders;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import com.spatial4j.core.shape.Shape;
|
||||
import com.spatial4j.core.shape.jts.JtsGeometry;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
import com.vividsolutions.jts.geom.LineString;
|
||||
|
@ -35,15 +34,9 @@ public class MultiLineStringBuilder extends ShapeBuilder {
|
|||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.MULTILINESTRING;
|
||||
|
||||
private final ArrayList<BaseLineStringBuilder<?>> lines = new ArrayList<>();
|
||||
private final ArrayList<LineStringBuilder> lines = new ArrayList<>();
|
||||
|
||||
public InternalLineStringBuilder linestring() {
|
||||
InternalLineStringBuilder line = new InternalLineStringBuilder(this);
|
||||
this.lines.add(line);
|
||||
return line;
|
||||
}
|
||||
|
||||
public MultiLineStringBuilder linestring(BaseLineStringBuilder<?> line) {
|
||||
public MultiLineStringBuilder linestring(LineStringBuilder line) {
|
||||
this.lines.add(line);
|
||||
return this;
|
||||
}
|
||||
|
@ -67,7 +60,7 @@ public class MultiLineStringBuilder extends ShapeBuilder {
|
|||
builder.field(FIELD_TYPE, TYPE.shapename);
|
||||
builder.field(FIELD_COORDINATES);
|
||||
builder.startArray();
|
||||
for(BaseLineStringBuilder<?> line : lines) {
|
||||
for(BaseLineStringBuilder line : lines) {
|
||||
line.coordinatesToXcontent(builder, false);
|
||||
}
|
||||
builder.endArray();
|
||||
|
@ -80,7 +73,7 @@ public class MultiLineStringBuilder extends ShapeBuilder {
|
|||
final Geometry geometry;
|
||||
if(wrapdateline) {
|
||||
ArrayList<LineString> parts = new ArrayList<>();
|
||||
for (BaseLineStringBuilder<?> line : lines) {
|
||||
for (BaseLineStringBuilder line : lines) {
|
||||
BaseLineStringBuilder.decompose(FACTORY, line.coordinates(false), parts);
|
||||
}
|
||||
if(parts.size() == 1) {
|
||||
|
@ -91,7 +84,7 @@ public class MultiLineStringBuilder extends ShapeBuilder {
|
|||
}
|
||||
} else {
|
||||
LineString[] lineStrings = new LineString[lines.size()];
|
||||
Iterator<BaseLineStringBuilder<?>> iterator = lines.iterator();
|
||||
Iterator<LineStringBuilder> iterator = lines.iterator();
|
||||
for (int i = 0; iterator.hasNext(); i++) {
|
||||
lineStrings[i] = FACTORY.createLineString(iterator.next().coordinates(false));
|
||||
}
|
||||
|
@ -99,27 +92,4 @@ public class MultiLineStringBuilder extends ShapeBuilder {
|
|||
}
|
||||
return jtsGeometry(geometry);
|
||||
}
|
||||
|
||||
public static class InternalLineStringBuilder extends BaseLineStringBuilder<InternalLineStringBuilder> {
|
||||
|
||||
private final MultiLineStringBuilder collection;
|
||||
|
||||
public InternalLineStringBuilder(MultiLineStringBuilder collection) {
|
||||
super();
|
||||
this.collection = collection;
|
||||
}
|
||||
|
||||
public MultiLineStringBuilder end() {
|
||||
return collection;
|
||||
}
|
||||
|
||||
public Coordinate[] coordinates() {
|
||||
return super.coordinates(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoShapeType type() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,16 +48,6 @@ public class MultiPolygonBuilder extends ShapeBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
public InternalPolygonBuilder polygon() {
|
||||
return polygon(Orientation.RIGHT);
|
||||
}
|
||||
|
||||
public InternalPolygonBuilder polygon(Orientation orientation) {
|
||||
InternalPolygonBuilder polygon = new InternalPolygonBuilder(this, orientation);
|
||||
this.polygon(polygon);
|
||||
return polygon;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
|
@ -81,7 +71,7 @@ public class MultiPolygonBuilder extends ShapeBuilder {
|
|||
public Shape build() {
|
||||
|
||||
List<Shape> shapes = new ArrayList<>(this.polygons.size());
|
||||
|
||||
|
||||
if(wrapdateline) {
|
||||
for (BasePolygonBuilder<?> polygon : this.polygons) {
|
||||
for(Coordinate[][] part : polygon.coordinates()) {
|
||||
|
@ -100,20 +90,5 @@ public class MultiPolygonBuilder extends ShapeBuilder {
|
|||
//note: ShapeCollection is probably faster than a Multi* geom.
|
||||
}
|
||||
|
||||
public static class InternalPolygonBuilder extends BasePolygonBuilder<InternalPolygonBuilder> {
|
||||
|
||||
private final MultiPolygonBuilder collection;
|
||||
|
||||
private InternalPolygonBuilder(MultiPolygonBuilder collection, Orientation orientation) {
|
||||
super(orientation);
|
||||
this.collection = collection;
|
||||
this.shell = new Ring<>(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MultiPolygonBuilder close() {
|
||||
super.close();
|
||||
return collection;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
/**
|
||||
* The {@link PointCollection} is an abstract base implementation for all GeoShapes. It simply handles a set of points.
|
||||
* The {@link PointCollection} is an abstract base implementation for all GeoShapes. It simply handles a set of points.
|
||||
*/
|
||||
public abstract class PointCollection<E extends PointCollection<E>> extends ShapeBuilder {
|
||||
|
||||
|
@ -43,7 +43,7 @@ public abstract class PointCollection<E extends PointCollection<E>> extends Shap
|
|||
protected PointCollection(ArrayList<Coordinate> points) {
|
||||
this.points = points;
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private E thisRef() {
|
||||
return (E)this;
|
||||
|
@ -57,7 +57,7 @@ public abstract class PointCollection<E extends PointCollection<E>> extends Shap
|
|||
*/
|
||||
public E point(double longitude, double latitude) {
|
||||
return this.point(coordinate(longitude, latitude));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new point to the collection
|
||||
|
@ -71,7 +71,7 @@ public abstract class PointCollection<E extends PointCollection<E>> extends Shap
|
|||
|
||||
/**
|
||||
* Add a array of points to the collection
|
||||
*
|
||||
*
|
||||
* @param coordinates array of {@link Coordinate}s to add
|
||||
* @return this
|
||||
*/
|
||||
|
@ -81,7 +81,7 @@ public abstract class PointCollection<E extends PointCollection<E>> extends Shap
|
|||
|
||||
/**
|
||||
* Add a collection of points to the collection
|
||||
*
|
||||
*
|
||||
* @param coordinates array of {@link Coordinate}s to add
|
||||
* @return this
|
||||
*/
|
||||
|
@ -92,7 +92,7 @@ public abstract class PointCollection<E extends PointCollection<E>> extends Shap
|
|||
|
||||
/**
|
||||
* Copy all points to a new Array
|
||||
*
|
||||
*
|
||||
* @param closed if set to true the first point of the array is repeated as last element
|
||||
* @return Array of coordinates
|
||||
*/
|
||||
|
@ -106,9 +106,9 @@ public abstract class PointCollection<E extends PointCollection<E>> extends Shap
|
|||
|
||||
/**
|
||||
* builds an array of coordinates to a {@link XContentBuilder}
|
||||
*
|
||||
* @param builder builder to use
|
||||
* @param closed repeat the first point at the end of the array if it's not already defines as last element of the array
|
||||
*
|
||||
* @param builder builder to use
|
||||
* @param closed repeat the first point at the end of the array if it's not already defines as last element of the array
|
||||
* @return the builder
|
||||
*/
|
||||
protected XContentBuilder coordinatesToXcontent(XContentBuilder builder, boolean closed) throws IOException {
|
||||
|
|
|
@ -35,7 +35,7 @@ public class PolygonBuilder extends BasePolygonBuilder<PolygonBuilder> {
|
|||
|
||||
protected PolygonBuilder(ArrayList<Coordinate> points, Orientation orientation) {
|
||||
super(orientation);
|
||||
this.shell = new Ring<>(this, points);
|
||||
this.shell = new LineStringBuilder(points);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -444,7 +444,7 @@ public abstract class ShapeBuilder extends ToXContentToBytes {
|
|||
* number of points
|
||||
* @return Array of edges
|
||||
*/
|
||||
protected static Edge[] ring(int component, boolean direction, boolean handedness, BaseLineStringBuilder<?> shell,
|
||||
protected static Edge[] ring(int component, boolean direction, boolean handedness, BaseLineStringBuilder shell,
|
||||
Coordinate[] points, int offset, Edge[] edges, int toffset, int length) {
|
||||
// calculate the direction of the points:
|
||||
// find the point a the top of the set and check its
|
||||
|
|
|
@ -113,10 +113,6 @@ public class GatewayAllocator extends AbstractComponent {
|
|||
}
|
||||
|
||||
public boolean allocateUnassigned(final RoutingAllocation allocation) {
|
||||
// Take a snapshot of the current time and tell the RoutingService
|
||||
// about it, so it will use a consistent timestamp for delays
|
||||
long lastAllocateUnassignedRun = System.currentTimeMillis();
|
||||
this.routingService.setUnassignedShardsAllocatedTimestamp(lastAllocateUnassignedRun);
|
||||
boolean changed = false;
|
||||
|
||||
RoutingNodes.UnassignedShards unassigned = allocation.routingNodes().unassigned();
|
||||
|
@ -124,7 +120,7 @@ public class GatewayAllocator extends AbstractComponent {
|
|||
|
||||
changed |= primaryShardAllocator.allocateUnassigned(allocation);
|
||||
changed |= replicaShardAllocator.processExistingRecoveries(allocation);
|
||||
changed |= replicaShardAllocator.allocateUnassigned(allocation, lastAllocateUnassignedRun);
|
||||
changed |= replicaShardAllocator.allocateUnassigned(allocation);
|
||||
return changed;
|
||||
}
|
||||
|
||||
|
|
|
@ -111,10 +111,7 @@ public abstract class ReplicaShardAllocator extends AbstractComponent {
|
|||
}
|
||||
|
||||
public boolean allocateUnassigned(RoutingAllocation allocation) {
|
||||
return allocateUnassigned(allocation, System.currentTimeMillis());
|
||||
}
|
||||
|
||||
public boolean allocateUnassigned(RoutingAllocation allocation, long allocateUnassignedTimestapm) {
|
||||
long nanoTimeNow = System.nanoTime();
|
||||
boolean changed = false;
|
||||
final RoutingNodes routingNodes = allocation.routingNodes();
|
||||
final RoutingNodes.UnassignedShards.UnassignedIterator unassignedIterator = routingNodes.unassigned().iterator();
|
||||
|
@ -173,27 +170,43 @@ public abstract class ReplicaShardAllocator extends AbstractComponent {
|
|||
unassignedIterator.initialize(nodeWithHighestMatch.nodeId(), shard.version(), allocation.clusterInfo().getShardSize(shard, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE));
|
||||
}
|
||||
} else if (matchingNodes.hasAnyData() == false) {
|
||||
// if we didn't manage to find *any* data (regardless of matching sizes), check if the allocation
|
||||
// of the replica shard needs to be delayed, and if so, add it to the ignore unassigned list
|
||||
// note: we only care about replica in delayed allocation, since if we have an unassigned primary it
|
||||
// will anyhow wait to find an existing copy of the shard to be allocated
|
||||
// note: the other side of the equation is scheduling a reroute in a timely manner, which happens in the RoutingService
|
||||
IndexMetaData indexMetaData = allocation.metaData().index(shard.getIndex());
|
||||
long delay = shard.unassignedInfo().getDelayAllocationExpirationIn(allocateUnassignedTimestapm, settings, indexMetaData.getSettings());
|
||||
if (delay > 0) {
|
||||
logger.debug("[{}][{}]: delaying allocation of [{}] for [{}]", shard.index(), shard.id(), shard, TimeValue.timeValueMillis(delay));
|
||||
/**
|
||||
* mark it as changed, since we want to kick a publishing to schedule future allocation,
|
||||
* see {@link org.elasticsearch.cluster.routing.RoutingService#clusterChanged(ClusterChangedEvent)}).
|
||||
*/
|
||||
changed = true;
|
||||
unassignedIterator.removeAndIgnore();
|
||||
}
|
||||
// if we didn't manage to find *any* data (regardless of matching sizes), check if the allocation of the replica shard needs to be delayed
|
||||
changed |= ignoreUnassignedIfDelayed(nanoTimeNow, allocation, unassignedIterator, shard);
|
||||
}
|
||||
}
|
||||
return changed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the allocation of the replica is to be delayed. Compute the delay and if it is delayed, add it to the ignore unassigned list
|
||||
* Note: we only care about replica in delayed allocation, since if we have an unassigned primary it
|
||||
* will anyhow wait to find an existing copy of the shard to be allocated
|
||||
* Note: the other side of the equation is scheduling a reroute in a timely manner, which happens in the RoutingService
|
||||
*
|
||||
* PUBLIC FOR TESTS!
|
||||
*
|
||||
* @param timeNowNanos Timestamp in nanoseconds representing "now"
|
||||
* @param allocation the routing allocation
|
||||
* @param unassignedIterator iterator over unassigned shards
|
||||
* @param shard the shard which might be delayed
|
||||
* @return true iff allocation is delayed for this shard
|
||||
*/
|
||||
public boolean ignoreUnassignedIfDelayed(long timeNowNanos, RoutingAllocation allocation, RoutingNodes.UnassignedShards.UnassignedIterator unassignedIterator, ShardRouting shard) {
|
||||
IndexMetaData indexMetaData = allocation.metaData().index(shard.getIndex());
|
||||
// calculate delay and store it in UnassignedInfo to be used by RoutingService
|
||||
long delay = shard.unassignedInfo().updateDelay(timeNowNanos, settings, indexMetaData.getSettings());
|
||||
if (delay > 0) {
|
||||
logger.debug("[{}][{}]: delaying allocation of [{}] for [{}]", shard.index(), shard.id(), shard, TimeValue.timeValueNanos(delay));
|
||||
/**
|
||||
* mark it as changed, since we want to kick a publishing to schedule future allocation,
|
||||
* see {@link org.elasticsearch.cluster.routing.RoutingService#clusterChanged(ClusterChangedEvent)}).
|
||||
*/
|
||||
unassignedIterator.removeAndIgnore();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Can the shard be allocated on at least one node based on the allocation deciders.
|
||||
*/
|
||||
|
|
|
@ -28,9 +28,10 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.network.InetAddresses;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Numbers;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.network.InetAddresses;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -46,10 +47,13 @@ import org.elasticsearch.index.mapper.ParseContext;
|
|||
import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.ipField;
|
||||
|
@ -61,6 +65,7 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField;
|
|||
public class IpFieldMapper extends NumberFieldMapper {
|
||||
|
||||
public static final String CONTENT_TYPE = "ip";
|
||||
public static final long MAX_IP = 4294967296l;
|
||||
|
||||
public static String longToIp(long longIp) {
|
||||
int octet3 = (int) ((longIp >> 24) % 256);
|
||||
|
@ -71,6 +76,7 @@ public class IpFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
|
||||
private static final Pattern pattern = Pattern.compile("\\.");
|
||||
private static final Pattern MASK_PATTERN = Pattern.compile("(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,3})");
|
||||
|
||||
public static long ipToLong(String ip) {
|
||||
try {
|
||||
|
@ -91,6 +97,64 @@ public class IpFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the min & max ip addresses (represented as long values -
|
||||
* same way as stored in index) represented by the given CIDR mask
|
||||
* expression. The returned array has the length of 2, where the first entry
|
||||
* represents the {@code min} address and the second the {@code max}. A
|
||||
* {@code -1} value for either the {@code min} or the {@code max},
|
||||
* represents an unbounded end. In other words:
|
||||
*
|
||||
* <p>
|
||||
* {@code min == -1 == "0.0.0.0" }
|
||||
* </p>
|
||||
*
|
||||
* and
|
||||
*
|
||||
* <p>
|
||||
* {@code max == -1 == "255.255.255.255" }
|
||||
* </p>
|
||||
*/
|
||||
public static long[] cidrMaskToMinMax(String cidr) {
|
||||
Matcher matcher = MASK_PATTERN.matcher(cidr);
|
||||
if (!matcher.matches()) {
|
||||
return null;
|
||||
}
|
||||
int addr = ((Integer.parseInt(matcher.group(1)) << 24) & 0xFF000000) | ((Integer.parseInt(matcher.group(2)) << 16) & 0xFF0000)
|
||||
| ((Integer.parseInt(matcher.group(3)) << 8) & 0xFF00) | (Integer.parseInt(matcher.group(4)) & 0xFF);
|
||||
|
||||
int mask = (-1) << (32 - Integer.parseInt(matcher.group(5)));
|
||||
|
||||
if (Integer.parseInt(matcher.group(5)) == 0) {
|
||||
mask = 0 << 32;
|
||||
}
|
||||
|
||||
int from = addr & mask;
|
||||
long longFrom = intIpToLongIp(from);
|
||||
if (longFrom == 0) {
|
||||
longFrom = -1;
|
||||
}
|
||||
|
||||
int to = from + (~mask);
|
||||
long longTo = intIpToLongIp(to) + 1; // we have to +1 here as the range
|
||||
// is non-inclusive on the "to"
|
||||
// side
|
||||
|
||||
if (longTo == MAX_IP) {
|
||||
longTo = -1;
|
||||
}
|
||||
|
||||
return new long[] { longFrom, longTo };
|
||||
}
|
||||
|
||||
private static long intIpToLongIp(int i) {
|
||||
long p1 = ((long) ((i >> 24) & 0xFF)) << 24;
|
||||
int p2 = ((i >> 16) & 0xFF) << 16;
|
||||
int p3 = ((i >> 8) & 0xFF) << 8;
|
||||
int p4 = i & 0xFF;
|
||||
return p1 + p2 + p3 + p4;
|
||||
}
|
||||
|
||||
public static class Defaults extends NumberFieldMapper.Defaults {
|
||||
public static final String NULL_VALUE = null;
|
||||
|
||||
|
@ -205,6 +269,23 @@ public class IpFieldMapper extends NumberFieldMapper {
|
|||
return bytesRef.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termQuery(Object value, @Nullable QueryShardContext context) {
|
||||
if (value != null) {
|
||||
long[] fromTo;
|
||||
if (value instanceof BytesRef) {
|
||||
fromTo = cidrMaskToMinMax(((BytesRef) value).utf8ToString());
|
||||
} else {
|
||||
fromTo = cidrMaskToMinMax(value.toString());
|
||||
}
|
||||
if (fromTo != null) {
|
||||
return rangeQuery(fromTo[0] < 0 ? null : fromTo[0],
|
||||
fromTo[1] < 0 ? null : fromTo[1], true, false);
|
||||
}
|
||||
}
|
||||
return super.termQuery(value, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(),
|
||||
|
|
|
@ -29,14 +29,12 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.TwoPhaseIterator;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
|
@ -129,11 +129,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
};
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Creates a new Translog instance. This method will create a new transaction log unless the given {@link TranslogConfig} has
|
||||
* a non-null {@link org.elasticsearch.index.translog.Translog.TranslogGeneration}. If the generation is null this method
|
||||
* us destructive and will delete all files in the translog path given.
|
||||
*
|
||||
* @see TranslogConfig#getTranslogPath()
|
||||
*/
|
||||
public Translog(TranslogConfig config) throws IOException {
|
||||
|
@ -141,7 +141,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
this.config = config;
|
||||
TranslogGeneration translogGeneration = config.getTranslogGeneration();
|
||||
|
||||
if (translogGeneration == null || translogGeneration.translogUUID == null) { // legacy case
|
||||
if (translogGeneration == null || translogGeneration.translogUUID == null) { // legacy case
|
||||
translogUUID = Strings.randomBase64UUID();
|
||||
} else {
|
||||
translogUUID = translogGeneration.translogUUID;
|
||||
|
@ -190,6 +190,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
boolean success = false;
|
||||
ArrayList<ImmutableTranslogReader> foundTranslogs = new ArrayList<>();
|
||||
final Path tempFile = Files.createTempFile(location, TRANSLOG_FILE_PREFIX, TRANSLOG_FILE_SUFFIX); // a temp file to copy checkpoint to - note it must be in on the same FS otherwise atomic move won't work
|
||||
boolean tempFileRenamed = false;
|
||||
try (ReleasableLock lock = writeLock.acquire()) {
|
||||
logger.debug("open uncommitted translog checkpoint {}", checkpoint);
|
||||
final String checkpointTranslogFile = getFilename(checkpoint.generation);
|
||||
|
@ -215,6 +216,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
Files.copy(location.resolve(CHECKPOINT_FILE_NAME), tempFile, StandardCopyOption.REPLACE_EXISTING);
|
||||
IOUtils.fsync(tempFile, false);
|
||||
Files.move(tempFile, commitCheckpoint, StandardCopyOption.ATOMIC_MOVE);
|
||||
tempFileRenamed = true;
|
||||
// we only fsync the directory the tempFile was already fsynced
|
||||
IOUtils.fsync(commitCheckpoint.getParent(), true);
|
||||
}
|
||||
|
@ -223,10 +225,12 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
if (success == false) {
|
||||
IOUtils.closeWhileHandlingException(foundTranslogs);
|
||||
}
|
||||
try {
|
||||
Files.delete(tempFile);
|
||||
} catch (IOException ex) {
|
||||
logger.warn("failed to delete temp file {}", ex, tempFile);
|
||||
if (tempFileRenamed == false) {
|
||||
try {
|
||||
Files.delete(tempFile);
|
||||
} catch (IOException ex) {
|
||||
logger.warn("failed to delete temp file {}", ex, tempFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
return foundTranslogs;
|
||||
|
@ -347,7 +351,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
}
|
||||
|
||||
|
||||
|
||||
TranslogWriter createWriter(long fileGeneration) throws IOException {
|
||||
TranslogWriter newFile;
|
||||
try {
|
||||
|
@ -508,6 +511,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
|
||||
/**
|
||||
* Ensures that the given location has be synced / written to the underlying storage.
|
||||
*
|
||||
* @return Returns <code>true</code> iff this call caused an actual sync operation otherwise <code>false</code>
|
||||
*/
|
||||
public boolean ensureSynced(Location location) throws IOException {
|
||||
|
@ -749,13 +753,21 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Location location = (Location) o;
|
||||
|
||||
if (generation != location.generation) return false;
|
||||
if (translogLocation != location.translogLocation) return false;
|
||||
if (generation != location.generation) {
|
||||
return false;
|
||||
}
|
||||
if (translogLocation != location.translogLocation) {
|
||||
return false;
|
||||
}
|
||||
return size == location.size;
|
||||
|
||||
}
|
||||
|
@ -1089,7 +1101,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
}
|
||||
|
||||
@Override
|
||||
public Source getSource(){
|
||||
public Source getSource() {
|
||||
throw new IllegalStateException("trying to read doc source from delete operation");
|
||||
}
|
||||
|
||||
|
@ -1198,7 +1210,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
// to prevent this unfortunately.
|
||||
in.mark(opSize);
|
||||
|
||||
in.skip(opSize-4);
|
||||
in.skip(opSize - 4);
|
||||
verifyChecksum(in);
|
||||
in.reset();
|
||||
}
|
||||
|
@ -1250,7 +1262,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
out.writeByte(op.opType().id());
|
||||
op.writeTo(out);
|
||||
long checksum = out.getChecksum();
|
||||
out.writeInt((int)checksum);
|
||||
out.writeInt((int) checksum);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -246,7 +246,7 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
|
|||
} catch (FlushNotAllowedEngineException e) {
|
||||
// ignore
|
||||
} catch (Exception e) {
|
||||
logger.warn("failed to set shard {} index buffer to [{}]", shardId, shardIndexingBufferSize);
|
||||
logger.warn("failed to set shard {} index buffer to [{}]", e, shardId, shardIndexingBufferSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -234,8 +234,8 @@ public class RestShardsAction extends AbstractCatAction {
|
|||
|
||||
if (shard.unassignedInfo() != null) {
|
||||
table.addCell(shard.unassignedInfo().getReason());
|
||||
table.addCell(UnassignedInfo.DATE_TIME_FORMATTER.printer().print(shard.unassignedInfo().getTimestampInMillis()));
|
||||
table.addCell(TimeValue.timeValueMillis(System.currentTimeMillis() - shard.unassignedInfo().getTimestampInMillis()));
|
||||
table.addCell(UnassignedInfo.DATE_TIME_FORMATTER.printer().print(shard.unassignedInfo().getUnassignedTimeInMillis()));
|
||||
table.addCell(TimeValue.timeValueMillis(System.currentTimeMillis() - shard.unassignedInfo().getUnassignedTimeInMillis()));
|
||||
table.addCell(shard.unassignedInfo().getDetails());
|
||||
} else {
|
||||
table.addCell(null);
|
||||
|
|
|
@ -22,15 +22,13 @@ package org.elasticsearch.search.aggregations.bucket.range.ipv4;
|
|||
import org.elasticsearch.search.aggregations.bucket.range.AbstractRangeBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilderException;
|
||||
|
||||
import java.util.regex.Pattern;
|
||||
import static org.elasticsearch.index.mapper.ip.IpFieldMapper.cidrMaskToMinMax;
|
||||
|
||||
/**
|
||||
* Builder for the {@code IPv4Range} aggregation.
|
||||
*/
|
||||
public class IPv4RangeBuilder extends AbstractRangeBuilder<IPv4RangeBuilder> {
|
||||
|
||||
private static final Pattern MASK_PATTERN = Pattern.compile("[\\.|/]");
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
|
@ -109,58 +107,4 @@ public class IPv4RangeBuilder extends AbstractRangeBuilder<IPv4RangeBuilder> {
|
|||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the min & max ip addresses (represented as long values - same way as stored in index) represented by the given CIDR mask
|
||||
* expression. The returned array has the length of 2, where the first entry represents the {@code min} address and the second the {@code max}.
|
||||
* A {@code -1} value for either the {@code min} or the {@code max}, represents an unbounded end. In other words:
|
||||
*
|
||||
* <p>
|
||||
* {@code min == -1 == "0.0.0.0" }
|
||||
* </p>
|
||||
*
|
||||
* and
|
||||
*
|
||||
* <p>
|
||||
* {@code max == -1 == "255.255.255.255" }
|
||||
* </p>
|
||||
*/
|
||||
static long[] cidrMaskToMinMax(String cidr) {
|
||||
String[] parts = MASK_PATTERN.split(cidr);
|
||||
if (parts.length != 5) {
|
||||
return null;
|
||||
}
|
||||
int addr = (( Integer.parseInt(parts[0]) << 24 ) & 0xFF000000)
|
||||
| (( Integer.parseInt(parts[1]) << 16 ) & 0xFF0000)
|
||||
| (( Integer.parseInt(parts[2]) << 8 ) & 0xFF00)
|
||||
| ( Integer.parseInt(parts[3]) & 0xFF);
|
||||
|
||||
int mask = (-1) << (32 - Integer.parseInt(parts[4]));
|
||||
|
||||
if (Integer.parseInt(parts[4]) == 0) {
|
||||
mask = 0 << 32;
|
||||
}
|
||||
|
||||
int from = addr & mask;
|
||||
long longFrom = intIpToLongIp(from);
|
||||
if (longFrom == 0) {
|
||||
longFrom = -1;
|
||||
}
|
||||
|
||||
int to = from + (~mask);
|
||||
long longTo = intIpToLongIp(to) + 1; // we have to +1 here as the range is non-inclusive on the "to" side
|
||||
|
||||
if (longTo == InternalIPv4Range.MAX_IP) {
|
||||
longTo = -1;
|
||||
}
|
||||
|
||||
return new long[] { longFrom, longTo };
|
||||
}
|
||||
|
||||
private static long intIpToLongIp(int i) {
|
||||
long p1 = ((long) ((i >> 24 ) & 0xFF)) << 24;
|
||||
int p2 = ((i >> 16 ) & 0xFF) << 16;
|
||||
int p3 = ((i >> 8 ) & 0xFF) << 8;
|
||||
int p4 = i & 0xFF;
|
||||
return p1 + p2 + p3 + p4;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,13 +32,13 @@ import java.io.IOException;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.ip.IpFieldMapper.MAX_IP;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class InternalIPv4Range extends InternalRange<InternalIPv4Range.Bucket, InternalIPv4Range> {
|
||||
|
||||
public static final long MAX_IP = 4294967296l;
|
||||
|
||||
public final static Type TYPE = new Type("ip_range", "iprange");
|
||||
|
||||
private final static AggregationStreams.Stream STREAM = new AggregationStreams.Stream() {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.elasticsearch.search.aggregations.bucket.range.ipv4;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
|
||||
import org.elasticsearch.search.SearchParseException;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
|
@ -124,7 +125,7 @@ public class IpRangeParser implements Aggregator.Parser {
|
|||
}
|
||||
|
||||
private static void parseMaskRange(String cidr, RangeAggregator.Range range, String aggregationName, SearchContext ctx) {
|
||||
long[] fromTo = IPv4RangeBuilder.cidrMaskToMinMax(cidr);
|
||||
long[] fromTo = IpFieldMapper.cidrMaskToMinMax(cidr);
|
||||
if (fromTo == null) {
|
||||
throw new SearchParseException(ctx, "invalid CIDR mask [" + cidr + "] in aggregation [" + aggregationName + "]",
|
||||
null);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.cluster.routing;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
|
@ -36,6 +37,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
/**
|
||||
*/
|
||||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0)
|
||||
@LuceneTestCase.AwaitsFix(bugUrl = "http://build-us-00.elastic.co/job/es_core_master_windows-2012-r2/2074/testReport/ (boaz on it)")
|
||||
public class DelayedAllocationIT extends ESIntegTestCase {
|
||||
|
||||
/**
|
||||
|
|
|
@ -23,28 +23,18 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.cluster.ClusterChangedEvent;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.EmptyClusterInfoService;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.routing.allocation.AllocationService;
|
||||
import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation;
|
||||
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
|
||||
import org.elasticsearch.cluster.routing.allocation.StartedRerouteAllocation;
|
||||
import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.gateway.GatewayAllocator;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.test.ESAllocationTestCase;
|
||||
import org.elasticsearch.test.cluster.TestClusterService;
|
||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
@ -77,7 +67,7 @@ public class RoutingServiceTests extends ESAllocationTestCase {
|
|||
}
|
||||
|
||||
public void testNoDelayedUnassigned() throws Exception {
|
||||
AllocationService allocation = createAllocationService();
|
||||
AllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator());
|
||||
MetaData metaData = MetaData.builder()
|
||||
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "0"))
|
||||
.numberOfShards(1).numberOfReplicas(1))
|
||||
|
@ -98,15 +88,15 @@ public class RoutingServiceTests extends ESAllocationTestCase {
|
|||
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build();
|
||||
ClusterState newState = clusterState;
|
||||
|
||||
assertThat(routingService.getRegisteredNextDelaySetting(), equalTo(Long.MAX_VALUE));
|
||||
assertThat(routingService.getMinDelaySettingAtLastScheduling(), equalTo(Long.MAX_VALUE));
|
||||
routingService.clusterChanged(new ClusterChangedEvent("test", newState, prevState));
|
||||
assertThat(routingService.getRegisteredNextDelaySetting(), equalTo(Long.MAX_VALUE));
|
||||
assertThat(routingService.getMinDelaySettingAtLastScheduling(), equalTo(Long.MAX_VALUE));
|
||||
assertThat(routingService.hasReroutedAndClear(), equalTo(false));
|
||||
}
|
||||
|
||||
@TestLogging("_root:DEBUG")
|
||||
public void testDelayedUnassignedScheduleReroute() throws Exception {
|
||||
AllocationService allocation = createAllocationService();
|
||||
DelayedShardsMockGatewayAllocator mockGatewayAllocator = new DelayedShardsMockGatewayAllocator();
|
||||
AllocationService allocation = createAllocationService(Settings.EMPTY, mockGatewayAllocator);
|
||||
MetaData metaData = MetaData.builder()
|
||||
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "100ms"))
|
||||
.numberOfShards(1).numberOfReplicas(1))
|
||||
|
@ -131,24 +121,20 @@ public class RoutingServiceTests extends ESAllocationTestCase {
|
|||
}
|
||||
}
|
||||
assertNotNull(nodeId);
|
||||
// remove node2 and reroute
|
||||
|
||||
// remove nodeId and reroute
|
||||
ClusterState prevState = clusterState;
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove(nodeId)).build();
|
||||
// make sure the replica is marked as delayed (i.e. not reallocated)
|
||||
mockGatewayAllocator.setTimeSource(shard -> shard.unassignedInfo().getUnassignedTimeInNanos() + TimeValue.timeValueMillis(randomIntBetween(0, 99)).nanos());
|
||||
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build();
|
||||
// We need to update the routing service's last attempted run to
|
||||
// signal that the GatewayAllocator tried to allocated it but
|
||||
// it was delayed
|
||||
RoutingNodes.UnassignedShards unassigned = clusterState.getRoutingNodes().unassigned();
|
||||
assertEquals(1, unassigned.size());
|
||||
ShardRouting next = unassigned.iterator().next();
|
||||
routingService.setUnassignedShardsAllocatedTimestamp(next.unassignedInfo().getTimestampInMillis() + randomIntBetween(0, 99));
|
||||
assertEquals(1, clusterState.getRoutingNodes().unassigned().size());
|
||||
|
||||
ClusterState newState = clusterState;
|
||||
routingService.clusterChanged(new ClusterChangedEvent("test", newState, prevState));
|
||||
assertBusy(() -> assertTrue("routing service should have run a reroute", routingService.hasReroutedAndClear()));
|
||||
// verify the registration has been reset
|
||||
assertThat(routingService.getRegisteredNextDelaySetting(), equalTo(Long.MAX_VALUE));
|
||||
assertThat(routingService.getMinDelaySettingAtLastScheduling(), equalTo(Long.MAX_VALUE));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -159,10 +145,7 @@ public class RoutingServiceTests extends ESAllocationTestCase {
|
|||
|
||||
try {
|
||||
DelayedShardsMockGatewayAllocator mockGatewayAllocator = new DelayedShardsMockGatewayAllocator();
|
||||
AllocationService allocation = new AllocationService(Settings.Builder.EMPTY_SETTINGS,
|
||||
randomAllocationDeciders(Settings.Builder.EMPTY_SETTINGS, new NodeSettingsService(Settings.Builder.EMPTY_SETTINGS), getRandom()),
|
||||
new ShardsAllocators(Settings.Builder.EMPTY_SETTINGS, mockGatewayAllocator), EmptyClusterInfoService.INSTANCE);
|
||||
|
||||
AllocationService allocation = createAllocationService(Settings.EMPTY, mockGatewayAllocator);
|
||||
MetaData metaData = MetaData.builder()
|
||||
.put(IndexMetaData.builder("short_delay").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "100ms"))
|
||||
.numberOfShards(1).numberOfReplicas(1))
|
||||
|
@ -206,7 +189,7 @@ public class RoutingServiceTests extends ESAllocationTestCase {
|
|||
ClusterState prevState = clusterState;
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove(shortDelayReplica.currentNodeId()).remove(longDelayReplica.currentNodeId())).build();
|
||||
// make sure both replicas are marked as delayed (i.e. not reallocated)
|
||||
mockGatewayAllocator.setShardsToDelay(Arrays.asList(shortDelayReplica, longDelayReplica));
|
||||
mockGatewayAllocator.setTimeSource(shard -> shard.unassignedInfo().getUnassignedTimeInNanos() + 1);
|
||||
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build();
|
||||
|
||||
// check that shortDelayReplica and longDelayReplica have been marked unassigned
|
||||
|
@ -232,10 +215,8 @@ public class RoutingServiceTests extends ESAllocationTestCase {
|
|||
// create routing service, also registers listener on cluster service
|
||||
RoutingService routingService = new RoutingService(Settings.EMPTY, testThreadPool, clusterService, allocation);
|
||||
routingService.start(); // just so performReroute does not prematurely return
|
||||
// ensure routing service has proper timestamp before triggering
|
||||
routingService.setUnassignedShardsAllocatedTimestamp(shortDelayUnassignedReplica.unassignedInfo().getTimestampInMillis() + randomIntBetween(0, 50));
|
||||
// next (delayed) reroute should only delay longDelayReplica/longDelayUnassignedReplica
|
||||
mockGatewayAllocator.setShardsToDelay(Arrays.asList(longDelayUnassignedReplica));
|
||||
// next (delayed) reroute should only delay longDelayReplica/longDelayUnassignedReplica, simulate that we are now 1 second after shards became unassigned
|
||||
mockGatewayAllocator.setTimeSource(shard -> shard.unassignedInfo().getUnassignedTimeInNanos() + TimeValue.timeValueSeconds(1).nanos());
|
||||
// register listener on cluster state so we know when cluster state has been changed
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
clusterService.addLast(event -> latch.countDown());
|
||||
|
@ -244,14 +225,15 @@ public class RoutingServiceTests extends ESAllocationTestCase {
|
|||
// cluster service should have updated state and called routingService with clusterChanged
|
||||
latch.await();
|
||||
// verify the registration has been set to the delay of longDelayReplica/longDelayUnassignedReplica
|
||||
assertThat(routingService.getRegisteredNextDelaySetting(), equalTo(10000L));
|
||||
assertThat(routingService.getMinDelaySettingAtLastScheduling(), equalTo(TimeValue.timeValueSeconds(10).millis()));
|
||||
} finally {
|
||||
terminate(testThreadPool);
|
||||
}
|
||||
}
|
||||
|
||||
public void testDelayedUnassignedDoesNotRerouteForNegativeDelays() throws Exception {
|
||||
AllocationService allocation = createAllocationService();
|
||||
DelayedShardsMockGatewayAllocator mockGatewayAllocator = new DelayedShardsMockGatewayAllocator();
|
||||
AllocationService allocation = createAllocationService(Settings.EMPTY, mockGatewayAllocator);
|
||||
MetaData metaData = MetaData.builder()
|
||||
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "100ms"))
|
||||
.numberOfShards(1).numberOfReplicas(1))
|
||||
|
@ -271,7 +253,7 @@ public class RoutingServiceTests extends ESAllocationTestCase {
|
|||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove("node2")).build();
|
||||
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build();
|
||||
// Set it in the future so the delay will be negative
|
||||
routingService.setUnassignedShardsAllocatedTimestamp(System.currentTimeMillis() + TimeValue.timeValueMinutes(1).millis());
|
||||
mockGatewayAllocator.setTimeSource(shard -> shard.unassignedInfo().getUnassignedTimeInNanos() + TimeValue.timeValueMinutes(1).nanos());
|
||||
|
||||
ClusterState newState = clusterState;
|
||||
|
||||
|
@ -282,7 +264,7 @@ public class RoutingServiceTests extends ESAllocationTestCase {
|
|||
assertThat(routingService.hasReroutedAndClear(), equalTo(false));
|
||||
|
||||
// verify the registration has been updated
|
||||
assertThat(routingService.getRegisteredNextDelaySetting(), equalTo(100L));
|
||||
assertThat(routingService.getMinDelaySettingAtLastScheduling(), equalTo(100L));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -309,46 +291,4 @@ public class RoutingServiceTests extends ESAllocationTestCase {
|
|||
rerouted.set(true);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mocks behavior in ReplicaShardAllocator to remove delayed shards from list of unassigned shards so they don't get reassigned yet.
|
||||
* It does not implement the full logic but shards that are to be delayed need to be explicitly set using the method setShardsToDelay(...).
|
||||
*/
|
||||
private static class DelayedShardsMockGatewayAllocator extends GatewayAllocator {
|
||||
volatile List<ShardRouting> delayedShards = Collections.emptyList();
|
||||
|
||||
public DelayedShardsMockGatewayAllocator() {
|
||||
super(Settings.EMPTY, null, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applyStartedShards(StartedRerouteAllocation allocation) {}
|
||||
|
||||
@Override
|
||||
public void applyFailedShards(FailedRerouteAllocation allocation) {}
|
||||
|
||||
/**
|
||||
* Explicitly set which shards should be delayed in the next allocateUnassigned calls
|
||||
*/
|
||||
public void setShardsToDelay(List<ShardRouting> delayedShards) {
|
||||
this.delayedShards = delayedShards;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean allocateUnassigned(RoutingAllocation allocation) {
|
||||
final RoutingNodes routingNodes = allocation.routingNodes();
|
||||
final RoutingNodes.UnassignedShards.UnassignedIterator unassignedIterator = routingNodes.unassigned().iterator();
|
||||
boolean changed = false;
|
||||
while (unassignedIterator.hasNext()) {
|
||||
ShardRouting shard = unassignedIterator.next();
|
||||
for (ShardRouting shardToDelay : delayedShards) {
|
||||
if (shard.isSameShard(shardToDelay)) {
|
||||
changed = true;
|
||||
unassignedIterator.removeAndIgnore();
|
||||
}
|
||||
}
|
||||
}
|
||||
return changed;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -81,7 +81,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
|
|||
|
||||
UnassignedInfo read = new UnassignedInfo(StreamInput.wrap(out.bytes()));
|
||||
assertThat(read.getReason(), equalTo(meta.getReason()));
|
||||
assertThat(read.getTimestampInMillis(), equalTo(meta.getTimestampInMillis()));
|
||||
assertThat(read.getUnassignedTimeInMillis(), equalTo(meta.getUnassignedTimeInMillis()));
|
||||
assertThat(read.getMessage(), equalTo(meta.getMessage()));
|
||||
assertThat(read.getDetails(), equalTo(meta.getDetails()));
|
||||
}
|
||||
|
@ -222,7 +222,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
|
|||
assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).size(), equalTo(1));
|
||||
assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo(), notNullValue());
|
||||
assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.NODE_LEFT));
|
||||
assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getTimestampInMillis(), greaterThan(0l));
|
||||
assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getUnassignedTimeInMillis(), greaterThan(0l));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -253,7 +253,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
|
|||
assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getReason(), equalTo(UnassignedInfo.Reason.ALLOCATION_FAILED));
|
||||
assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getMessage(), equalTo("test fail"));
|
||||
assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getDetails(), equalTo("test fail"));
|
||||
assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getTimestampInMillis(), greaterThan(0l));
|
||||
assertThat(clusterState.getRoutingNodes().shardsWithState(UNASSIGNED).get(0).unassignedInfo().getUnassignedTimeInMillis(), greaterThan(0l));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -261,17 +261,11 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
|
|||
*/
|
||||
public void testUnassignedDelayedOnlyOnNodeLeft() throws Exception {
|
||||
final UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, null);
|
||||
long delay = unassignedInfo.getAllocationDelayTimeoutSetting(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), Settings.EMPTY);
|
||||
assertThat(delay, equalTo(TimeValue.timeValueHours(10).millis()));
|
||||
assertBusy(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
long delay = unassignedInfo.getDelayAllocationExpirationIn(System.currentTimeMillis(),
|
||||
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), Settings.EMPTY);
|
||||
assertThat(delay, greaterThan(0l));
|
||||
assertThat(delay, lessThan(TimeValue.timeValueHours(10).millis()));
|
||||
}
|
||||
});
|
||||
long delay = unassignedInfo.updateDelay(unassignedInfo.getUnassignedTimeInNanos() + 1, // add 1 tick delay
|
||||
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), Settings.EMPTY);
|
||||
long cachedDelay = unassignedInfo.getLastComputedLeftDelayNanos();
|
||||
assertThat(delay, equalTo(cachedDelay));
|
||||
assertThat(delay, equalTo(TimeValue.timeValueHours(10).nanos() - 1));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -281,15 +275,16 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
|
|||
EnumSet<UnassignedInfo.Reason> reasons = EnumSet.allOf(UnassignedInfo.Reason.class);
|
||||
reasons.remove(UnassignedInfo.Reason.NODE_LEFT);
|
||||
UnassignedInfo unassignedInfo = new UnassignedInfo(RandomPicks.randomFrom(getRandom(), reasons), null);
|
||||
long delay = unassignedInfo.getAllocationDelayTimeoutSetting(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), Settings.EMPTY);
|
||||
assertThat(delay, equalTo(0l));
|
||||
delay = unassignedInfo.getDelayAllocationExpirationIn(System.currentTimeMillis(),
|
||||
long delay = unassignedInfo.updateDelay(unassignedInfo.getUnassignedTimeInNanos() + 1, // add 1 tick delay
|
||||
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), Settings.EMPTY);
|
||||
assertThat(delay, equalTo(0l));
|
||||
delay = unassignedInfo.getLastComputedLeftDelayNanos();
|
||||
assertThat(delay, equalTo(0l));
|
||||
}
|
||||
|
||||
public void testNumberOfDelayedUnassigned() throws Exception {
|
||||
AllocationService allocation = createAllocationService();
|
||||
DelayedShardsMockGatewayAllocator mockGatewayAllocator = new DelayedShardsMockGatewayAllocator();
|
||||
AllocationService allocation = createAllocationService(Settings.EMPTY, mockGatewayAllocator);
|
||||
MetaData metaData = MetaData.builder()
|
||||
.put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
|
||||
.put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
|
||||
|
@ -299,8 +294,7 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
|
|||
.routingTable(RoutingTable.builder().addAsNew(metaData.index("test1")).addAsNew(metaData.index("test2")).build()).build();
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))).build();
|
||||
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build();
|
||||
assertThat(UnassignedInfo.getNumberOfDelayedUnassigned(System.currentTimeMillis(),
|
||||
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), clusterState), equalTo(0));
|
||||
assertThat(UnassignedInfo.getNumberOfDelayedUnassigned(clusterState), equalTo(0));
|
||||
// starting primaries
|
||||
clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build();
|
||||
// starting replicas
|
||||
|
@ -308,24 +302,25 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
|
|||
assertThat(clusterState.getRoutingNodes().unassigned().size() > 0, equalTo(false));
|
||||
// remove node2 and reroute
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove("node2")).build();
|
||||
// make sure both replicas are marked as delayed (i.e. not reallocated)
|
||||
mockGatewayAllocator.setTimeSource(shard -> shard.unassignedInfo().getUnassignedTimeInNanos() + 1);
|
||||
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build();
|
||||
assertThat(clusterState.prettyPrint(), UnassignedInfo.getNumberOfDelayedUnassigned(System.currentTimeMillis(),
|
||||
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), clusterState), equalTo(2));
|
||||
assertThat(clusterState.prettyPrint(), UnassignedInfo.getNumberOfDelayedUnassigned(clusterState), equalTo(2));
|
||||
}
|
||||
|
||||
public void testFindNextDelayedAllocation() {
|
||||
AllocationService allocation = createAllocationService();
|
||||
DelayedShardsMockGatewayAllocator mockGatewayAllocator = new DelayedShardsMockGatewayAllocator();
|
||||
AllocationService allocation = createAllocationService(Settings.EMPTY, mockGatewayAllocator);
|
||||
MetaData metaData = MetaData.builder()
|
||||
.put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
|
||||
.put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1))
|
||||
.put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h")).numberOfShards(1).numberOfReplicas(1))
|
||||
.put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h")).numberOfShards(1).numberOfReplicas(1))
|
||||
.build();
|
||||
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
|
||||
.metaData(metaData)
|
||||
.routingTable(RoutingTable.builder().addAsNew(metaData.index("test1")).addAsNew(metaData.index("test2")).build()).build();
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))).build();
|
||||
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build();
|
||||
assertThat(UnassignedInfo.getNumberOfDelayedUnassigned(System.currentTimeMillis(),
|
||||
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), clusterState), equalTo(0));
|
||||
assertThat(UnassignedInfo.getNumberOfDelayedUnassigned(clusterState), equalTo(0));
|
||||
// starting primaries
|
||||
clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build();
|
||||
// starting replicas
|
||||
|
@ -333,14 +328,15 @@ public class UnassignedInfoTests extends ESAllocationTestCase {
|
|||
assertThat(clusterState.getRoutingNodes().unassigned().size() > 0, equalTo(false));
|
||||
// remove node2 and reroute
|
||||
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove("node2")).build();
|
||||
// make sure both replicas are marked as delayed (i.e. not reallocated)
|
||||
mockGatewayAllocator.setTimeSource(shard -> shard.unassignedInfo().getUnassignedTimeInNanos() + 1);
|
||||
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build();
|
||||
|
||||
long nextDelaySetting = UnassignedInfo.findSmallestDelayedAllocationSetting(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), clusterState);
|
||||
assertThat(nextDelaySetting, equalTo(TimeValue.timeValueHours(10).millis()));
|
||||
|
||||
long nextDelay = UnassignedInfo.findNextDelayedAllocationIn(System.currentTimeMillis(),
|
||||
Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), clusterState);
|
||||
assertThat(nextDelay, greaterThan(TimeValue.timeValueHours(9).millis()));
|
||||
assertThat(nextDelay, lessThanOrEqualTo(TimeValue.timeValueHours(10).millis()));
|
||||
long nextDelay = UnassignedInfo.findNextDelayedAllocationIn(clusterState);
|
||||
assertThat(nextDelay, greaterThan(TimeValue.timeValueHours(9).nanos()));
|
||||
assertThat(nextDelay, lessThanOrEqualTo(TimeValue.timeValueHours(10).nanos()));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
import com.vividsolutions.jts.geom.LineString;
|
||||
import com.vividsolutions.jts.geom.Polygon;
|
||||
|
||||
import org.elasticsearch.common.geo.builders.LineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.PolygonBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
||||
|
@ -141,35 +142,34 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
|
||||
public void testMultiLineString() {
|
||||
ShapeBuilders.newMultiLinestring()
|
||||
.linestring()
|
||||
.linestring(new LineStringBuilder()
|
||||
.point(-100.0, 50.0)
|
||||
.point(50.0, 50.0)
|
||||
.point(50.0, 20.0)
|
||||
.point(-100.0, 20.0)
|
||||
.end()
|
||||
.linestring()
|
||||
)
|
||||
.linestring(new LineStringBuilder()
|
||||
.point(-100.0, 20.0)
|
||||
.point(50.0, 20.0)
|
||||
.point(50.0, 0.0)
|
||||
.point(-100.0, 0.0)
|
||||
.end()
|
||||
)
|
||||
.build();
|
||||
|
||||
|
||||
// LineString that needs to be wrappped
|
||||
ShapeBuilders.newMultiLinestring()
|
||||
.linestring()
|
||||
.linestring(new LineStringBuilder()
|
||||
.point(150.0, 60.0)
|
||||
.point(200.0, 60.0)
|
||||
.point(200.0, 40.0)
|
||||
.point(150.0, 40.0)
|
||||
.end()
|
||||
.linestring()
|
||||
)
|
||||
.linestring(new LineStringBuilder()
|
||||
.point(150.0, 20.0)
|
||||
.point(200.0, 20.0)
|
||||
.point(200.0, 0.0)
|
||||
.point(150.0, 0.0)
|
||||
.end()
|
||||
)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
@ -251,7 +251,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(174,0);
|
||||
|
||||
// 3/4 of an embedded 'c', crossing dateline once
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(175, 1)
|
||||
.point(175, 7)
|
||||
.point(-178, 7)
|
||||
|
@ -260,15 +260,15 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(176, 2)
|
||||
.point(179, 2)
|
||||
.point(179,1)
|
||||
.point(175, 1);
|
||||
.point(175, 1));
|
||||
|
||||
// embedded hole right of the dateline
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(-179, 1)
|
||||
.point(-179, 2)
|
||||
.point(-177, 2)
|
||||
.point(-177,1)
|
||||
.point(-179,1);
|
||||
.point(-179,1));
|
||||
|
||||
Shape shape = builder.close().build();
|
||||
assertMultiPolygon(shape);
|
||||
|
@ -292,7 +292,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(-186,0);
|
||||
|
||||
// 3/4 of an embedded 'c', crossing dateline once
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(-185,1)
|
||||
.point(-181,1)
|
||||
.point(-181,2)
|
||||
|
@ -301,15 +301,15 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(-178,6)
|
||||
.point(-178,7)
|
||||
.point(-185,7)
|
||||
.point(-185,1);
|
||||
.point(-185,1));
|
||||
|
||||
// embedded hole right of the dateline
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(-179,1)
|
||||
.point(-177,1)
|
||||
.point(-177,2)
|
||||
.point(-179,2)
|
||||
.point(-179,1);
|
||||
.point(-179,1));
|
||||
|
||||
Shape shape = builder.close().build();
|
||||
assertMultiPolygon(shape);
|
||||
|
@ -356,7 +356,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(-85.0016455,37.1310491)
|
||||
.point(-85.0018514,37.1311314);
|
||||
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(-85.0000002,37.1317672)
|
||||
.point(-85.0001983,37.1317538)
|
||||
.point(-85.0003378,37.1317582)
|
||||
|
@ -382,7 +382,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(-84.9993527,37.1317788)
|
||||
.point(-84.9994931,37.1318061)
|
||||
.point(-84.9996815,37.1317979)
|
||||
.point(-85.0000002,37.1317672);
|
||||
.point(-85.0000002,37.1317672));
|
||||
|
||||
Shape shape = builder.close().build();
|
||||
assertPolygon(shape);
|
||||
|
@ -398,12 +398,12 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(-6, 0)
|
||||
.point(-4, 2);
|
||||
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(4, 1)
|
||||
.point(4, -1)
|
||||
.point(-4, -1)
|
||||
.point(-4, 1)
|
||||
.point(4, 1);
|
||||
.point(4, 1));
|
||||
|
||||
Shape shape = builder.close().build();
|
||||
assertPolygon(shape);
|
||||
|
@ -451,12 +451,12 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(176, -15)
|
||||
.point(-177, -10)
|
||||
.point(-177, 10);
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(176, 10)
|
||||
.point(180, 5)
|
||||
.point(180, -5)
|
||||
.point(176, -10)
|
||||
.point(176, 10);
|
||||
.point(176, 10));
|
||||
Shape shape = builder.close().build();
|
||||
assertMultiPolygon(shape);
|
||||
|
||||
|
@ -467,12 +467,12 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(179, -10)
|
||||
.point(-176, -15)
|
||||
.point(-172, 0);
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(-176, 10)
|
||||
.point(-176, -10)
|
||||
.point(-180, -5)
|
||||
.point(-180, 5)
|
||||
.point(-176, 10);
|
||||
.point(-176, 10));
|
||||
shape = builder.close().build();
|
||||
assertMultiPolygon(shape);
|
||||
}
|
||||
|
@ -486,12 +486,12 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(166, -15)
|
||||
.point(179, -10)
|
||||
.point(179, 10);
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(-177, 10)
|
||||
.point(-178, -10)
|
||||
.point(-180, -5)
|
||||
.point(-180, 5)
|
||||
.point(-177, 10);
|
||||
.point(-177, 10));
|
||||
Shape shape = builder.close().build();
|
||||
assertMultiPolygon(shape);
|
||||
}
|
||||
|
@ -505,12 +505,12 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(166, -15)
|
||||
.point(179, -10)
|
||||
.point(179, 10);
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(164, 0)
|
||||
.point(175, 10)
|
||||
.point(175, 5)
|
||||
.point(179, -10)
|
||||
.point(164, 0);
|
||||
.point(164, 0));
|
||||
try {
|
||||
builder.close().build();
|
||||
fail("Expected InvalidShapeException");
|
||||
|
@ -528,17 +528,17 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(176, -15)
|
||||
.point(-177, -10)
|
||||
.point(-177, 10);
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(-177, 10)
|
||||
.point(-178, -10)
|
||||
.point(-180, -5)
|
||||
.point(-180, 5)
|
||||
.point(-177, 10);
|
||||
builder.hole()
|
||||
.point(-177, 10));
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(172, 0)
|
||||
.point(176, 10)
|
||||
.point(176, -5)
|
||||
.point(172, 0);
|
||||
.point(172, 0));
|
||||
Shape shape = builder.close().build();
|
||||
assertMultiPolygon(shape);
|
||||
}
|
||||
|
@ -552,12 +552,12 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(176, -15)
|
||||
.point(-177, -10)
|
||||
.point(-177, 10);
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(-177, 10)
|
||||
.point(172, 0)
|
||||
.point(180, -5)
|
||||
.point(176, -10)
|
||||
.point(-177, 10);
|
||||
.point(-177, 10));
|
||||
try {
|
||||
builder.close().build();
|
||||
fail("Expected InvalidShapeException");
|
||||
|
|
|
@ -0,0 +1,110 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.AbstractSearchScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.NativeScriptFactory;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.scriptQuery;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
*/
|
||||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE)
|
||||
public class SearchTimeoutIT extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(ScriptedTimeoutPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)).build();
|
||||
}
|
||||
|
||||
public void testSimpleTimeout() throws Exception {
|
||||
client().prepareIndex("test", "type", "1").setSource("field", "value").setRefresh(true).execute().actionGet();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS))
|
||||
.setQuery(scriptQuery(new Script(NativeTestScriptedTimeout.TEST_NATIVE_SCRIPT_TIMEOUT, ScriptType.INLINE, "native", null)))
|
||||
.execute().actionGet();
|
||||
assertThat(searchResponse.isTimedOut(), equalTo(true));
|
||||
}
|
||||
|
||||
public static class ScriptedTimeoutPlugin extends Plugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "test-scripted-search-timeout";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "Test for scripted timeouts on searches";
|
||||
}
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.registerScript(NativeTestScriptedTimeout.TEST_NATIVE_SCRIPT_TIMEOUT, NativeTestScriptedTimeout.Factory.class);
|
||||
}
|
||||
}
|
||||
|
||||
public static class NativeTestScriptedTimeout extends AbstractSearchScript {
|
||||
|
||||
public static final String TEST_NATIVE_SCRIPT_TIMEOUT = "native_test_search_timeout_script";
|
||||
|
||||
public static class Factory implements NativeScriptFactory {
|
||||
|
||||
@Override
|
||||
public ExecutableScript newScript(Map<String, Object> params) {
|
||||
return new NativeTestScriptedTimeout();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object run() {
|
||||
try {
|
||||
Thread.sleep(500);
|
||||
} catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -16,8 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.search.aggregations.bucket;
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
|
@ -33,9 +32,7 @@ import org.elasticsearch.index.query.QueryShardException;
|
|||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
|
||||
|
@ -83,23 +80,16 @@ import static org.hamcrest.Matchers.is;
|
|||
*
|
||||
*/
|
||||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE)
|
||||
public class SignificantTermsSignificanceScoreTests extends ESIntegTestCase {
|
||||
public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
||||
static final String INDEX_NAME = "testidx";
|
||||
static final String DOC_TYPE = "doc";
|
||||
static final String TEXT_FIELD = "text";
|
||||
static final String CLASS_FIELD = "class";
|
||||
|
||||
@Override
|
||||
public Settings nodeSettings(int nodeOrdinal) {
|
||||
return settingsBuilder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.put("path.conf", this.getDataPath("conf"))
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return pluginList(CustomSignificanceHeuristicPlugin.class, GroovyPlugin.class);
|
||||
return pluginList(CustomSignificanceHeuristicPlugin.class);
|
||||
}
|
||||
|
||||
public String randomExecutionHint() {
|
||||
|
@ -505,91 +495,15 @@ public class SignificantTermsSignificanceScoreTests extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testNoNumberFormatExceptionWithDefaultScriptingEngine() throws ExecutionException, InterruptedException, IOException {
|
||||
assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 1)));
|
||||
index("test", "doc", "1", "{\"field\":\"a\"}");
|
||||
index("test", "doc", "11", "{\"field\":\"a\"}");
|
||||
index("test", "doc", "2", "{\"field\":\"b\"}");
|
||||
index("test", "doc", "22", "{\"field\":\"b\"}");
|
||||
index("test", "doc", "3", "{\"field\":\"a b\"}");
|
||||
index("test", "doc", "33", "{\"field\":\"a b\"}");
|
||||
ScriptHeuristic.ScriptHeuristicBuilder scriptHeuristicBuilder = new ScriptHeuristic.ScriptHeuristicBuilder();
|
||||
scriptHeuristicBuilder.setScript(new Script("_subset_freq/(_superset_freq - _subset_freq + 1)"));
|
||||
ensureYellow();
|
||||
refresh();
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("test")
|
||||
.addAggregation(
|
||||
new TermsBuilder("letters").field("field").subAggregation(
|
||||
new SignificantTermsBuilder("mySignificantTerms").field("field").executionHint(randomExecutionHint())
|
||||
.significanceHeuristic(scriptHeuristicBuilder).minDocCount(1).shardSize(2).size(2))).execute()
|
||||
.actionGet();
|
||||
assertSearchResponse(response);
|
||||
assertThat(((Terms) response.getAggregations().get("letters")).getBuckets().size(), equalTo(2));
|
||||
for (Terms.Bucket classBucket : ((Terms) response.getAggregations().get("letters")).getBuckets()) {
|
||||
assertThat(((SignificantStringTerms) classBucket.getAggregations().get("mySignificantTerms")).getBuckets().size(), equalTo(2));
|
||||
for (SignificantTerms.Bucket bucket : ((SignificantTerms) classBucket.getAggregations().get("mySignificantTerms")).getBuckets()) {
|
||||
assertThat(bucket.getSignificanceScore(),
|
||||
closeTo((double) bucket.getSubsetDf() / (bucket.getSupersetDf() - bucket.getSubsetDf() + 1), 1.e-6));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private ScriptHeuristic.ScriptHeuristicBuilder getScriptSignificanceHeuristicBuilder() throws IOException {
|
||||
Map<String, Object> params = null;
|
||||
Script script = null;
|
||||
String lang = null;
|
||||
if (randomBoolean()) {
|
||||
Map<String, Object> params = null;
|
||||
params = new HashMap<>();
|
||||
params.put("param", randomIntBetween(1, 100));
|
||||
}
|
||||
int randomScriptKind = randomIntBetween(0, 3);
|
||||
if (randomBoolean()) {
|
||||
lang = "groovy";
|
||||
}
|
||||
switch (randomScriptKind) {
|
||||
case 0: {
|
||||
if (params == null) {
|
||||
script = new Script("return _subset_freq + _subset_size + _superset_freq + _superset_size");
|
||||
} else {
|
||||
script = new Script("return param*(_subset_freq + _subset_size + _superset_freq + _superset_size)/param",
|
||||
ScriptType.INLINE, lang, params);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 1: {
|
||||
String scriptString;
|
||||
if (params == null) {
|
||||
scriptString = "return _subset_freq + _subset_size + _superset_freq + _superset_size";
|
||||
} else {
|
||||
scriptString = "return param*(_subset_freq + _subset_size + _superset_freq + _superset_size)/param";
|
||||
}
|
||||
client().prepareIndex().setIndex(ScriptService.SCRIPT_INDEX).setType(ScriptService.DEFAULT_LANG).setId("my_script")
|
||||
.setSource(XContentFactory.jsonBuilder().startObject().field("script", scriptString).endObject()).get();
|
||||
refresh();
|
||||
script = new Script("my_script", ScriptType.INDEXED, lang, params);
|
||||
break;
|
||||
}
|
||||
case 2: {
|
||||
if (params == null) {
|
||||
script = new Script("significance_script_no_params", ScriptType.FILE, lang, null);
|
||||
} else {
|
||||
script = new Script("significance_script_with_params", ScriptType.FILE, lang, params);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 3: {
|
||||
logger.info("NATIVE SCRIPT");
|
||||
if (params == null) {
|
||||
script = new Script("native_significance_score_script_no_params", ScriptType.INLINE, "native", null);
|
||||
} else {
|
||||
script = new Script("native_significance_score_script_with_params", ScriptType.INLINE, "native", params);
|
||||
}
|
||||
lang = "native";
|
||||
if (randomBoolean()) {
|
||||
}
|
||||
break;
|
||||
}
|
||||
script = new Script("native_significance_score_script_with_params", ScriptType.INLINE, "native", params);
|
||||
} else {
|
||||
script = new Script("native_significance_score_script_no_params", ScriptType.INLINE, "native", null);
|
||||
}
|
||||
ScriptHeuristic.ScriptHeuristicBuilder builder = new ScriptHeuristic.ScriptHeuristicBuilder().setScript(script);
|
||||
|
|
@ -16,21 +16,33 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.search.aggregations.metrics;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.LeafSearchScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.Global;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.Avg;
|
||||
import org.elasticsearch.search.lookup.LeafSearchLookup;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
|
@ -45,10 +57,13 @@ import static org.hamcrest.Matchers.notNullValue;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class AvgTests extends AbstractNumericTestCase {
|
||||
public class AvgIT extends AbstractNumericTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
return Arrays.asList(
|
||||
ExtractFieldScriptPlugin.class,
|
||||
FieldValueScriptPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -145,7 +160,8 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
public void testSingleValuedFieldWithValueScript() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").field("value").script(new Script("_value + 1")))
|
||||
.addAggregation(avg("avg").field("value")
|
||||
.script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -153,16 +169,16 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
assertThat(avg.getName(), equalTo("avg"));
|
||||
assertThat(avg.getValue(), equalTo((double) (2+3+4+5+6+7+8+9+10+11) / 10));
|
||||
assertThat(avg.getValue(), equalTo((double) (1+2+3+4+5+6+7+8+9+10) / 10));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testSingleValuedFieldWithValueScriptWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
Map<String, Object> params = Collections.singletonMap("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").field("value").script(new Script("_value + inc", ScriptType.INLINE, null, params)))
|
||||
.addAggregation(avg("avg").field("value")
|
||||
.script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -205,7 +221,8 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
public void testMultiValuedFieldWithValueScript() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").field("values").script(new Script("_value + 1")))
|
||||
.addAggregation(avg("avg").field("values")
|
||||
.script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -213,16 +230,16 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
assertThat(avg.getName(), equalTo("avg"));
|
||||
assertThat(avg.getValue(), equalTo((double) (3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12+12+13) / 20));
|
||||
assertThat(avg.getValue(), equalTo((double) (2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12) / 20));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testMultiValuedFieldWithValueScriptWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
Map<String, Object> params = Collections.singletonMap("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").field("values").script(new Script("_value + inc", ScriptType.INLINE, null, params)))
|
||||
.addAggregation(avg("avg").field("values")
|
||||
.script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -237,7 +254,8 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
public void testScriptSingleValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").script(new Script("doc['value'].value")))
|
||||
.addAggregation(avg("avg")
|
||||
.script(new Script("value", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -250,28 +268,11 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
|
||||
@Override
|
||||
public void testScriptSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
Map<String, Object> params = Collections.singletonMap("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
assertThat(avg.getName(), equalTo("avg"));
|
||||
assertThat(avg.getValue(), equalTo((double) (2+3+4+5+6+7+8+9+10+11) / 10));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.addAggregation(avg("avg")
|
||||
.script(new Script("value", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -286,7 +287,8 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
public void testScriptMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").script(new Script("[ doc['value'].value, doc['value'].value + 1 ]")))
|
||||
.addAggregation(avg("avg")
|
||||
.script(new Script("values", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -294,32 +296,16 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
assertThat(avg.getName(), equalTo("avg"));
|
||||
assertThat(avg.getValue(), equalTo((double) (1+2+2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11) / 20));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").script(new Script("[ doc['value'].value, doc['value'].value + 1 ]")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
assertThat(avg.getName(), equalTo("avg"));
|
||||
assertThat(avg.getValue(), equalTo((double) (1+2+2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11) / 20));
|
||||
assertThat(avg.getValue(), equalTo((double) (2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12) / 20));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
Map<String, Object> params = Collections.singletonMap("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
avg("avg").script(new Script("[ doc['value'].value, doc['value'].value + inc ]", ScriptType.INLINE, null, params)))
|
||||
.addAggregation(avg("avg")
|
||||
.script(new Script("values", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -327,6 +313,276 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
assertThat(avg.getName(), equalTo("avg"));
|
||||
assertThat(avg.getValue(), equalTo((double) (1+2+2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11) / 20));
|
||||
assertThat(avg.getValue(), equalTo((double) (3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12+12+13) / 20));
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock plugin for the {@link ExtractFieldScriptEngine}
|
||||
*/
|
||||
public static class ExtractFieldScriptPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return ExtractFieldScriptEngine.NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "Mock script engine for " + AvgIT.class;
|
||||
}
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(ExtractFieldScriptEngine.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* This mock script returns the field that is specified by name in the script body
|
||||
*/
|
||||
public static class ExtractFieldScriptEngine implements ScriptEngineService {
|
||||
|
||||
public static final String NAME = "extract_field";
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] types() {
|
||||
return new String[] { NAME };
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] extensions() {
|
||||
return types();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean sandboxed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
return script;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExecutableScript executable(CompiledScript compiledScript, Map<String, Object> params) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@Override
|
||||
public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map<String, Object> vars) {
|
||||
final long inc;
|
||||
if (vars == null || vars.containsKey("inc") == false) {
|
||||
inc = 0;
|
||||
} else {
|
||||
inc = ((Number) vars.get("inc")).longValue();
|
||||
}
|
||||
return new SearchScript() {
|
||||
|
||||
@Override
|
||||
public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException {
|
||||
|
||||
final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context);
|
||||
|
||||
return new LeafSearchScript() {
|
||||
|
||||
@Override
|
||||
public Object unwrap(Object value) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNextVar(String name, Object value) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object run() {
|
||||
String fieldName = (String) compiledScript.compiled();
|
||||
List<Long> values = new ArrayList<>();
|
||||
for (Object v : (List<?>) leafLookup.doc().get(fieldName)) {
|
||||
values.add(((Number) v).longValue() + inc);
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setScorer(Scorer scorer) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setSource(Map<String, Object> source) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int doc) {
|
||||
if (leafLookup != null) {
|
||||
leafLookup.setDocument(doc);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long runAsLong() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public float runAsFloat() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double runAsDouble() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock plugin for the {@link FieldValueScriptEngine}
|
||||
*/
|
||||
public static class FieldValueScriptPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return FieldValueScriptEngine.NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "Mock script engine for " + AvgIT.class;
|
||||
}
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(FieldValueScriptEngine.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* This mock script returns the field value and adds one month to the returned date
|
||||
*/
|
||||
public static class FieldValueScriptEngine implements ScriptEngineService {
|
||||
|
||||
public static final String NAME = "field_value";
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] types() {
|
||||
return new String[] { NAME };
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] extensions() {
|
||||
return types();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean sandboxed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
return script;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExecutableScript executable(CompiledScript compiledScript, Map<String, Object> params) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@Override
|
||||
public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map<String, Object> vars) {
|
||||
final long inc;
|
||||
if (vars == null || vars.containsKey("inc") == false) {
|
||||
inc = 0;
|
||||
} else {
|
||||
inc = ((Number) vars.get("inc")).longValue();
|
||||
}
|
||||
return new SearchScript() {
|
||||
|
||||
private Map<String, Object> vars = new HashMap<>(2);
|
||||
|
||||
@Override
|
||||
public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException {
|
||||
|
||||
final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context);
|
||||
|
||||
return new LeafSearchScript() {
|
||||
|
||||
@Override
|
||||
public Object unwrap(Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNextVar(String name, Object value) {
|
||||
vars.put(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object run() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setScorer(Scorer scorer) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setSource(Map<String, Object> source) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int doc) {
|
||||
if (leafLookup != null) {
|
||||
leafLookup.setDocument(doc);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long runAsLong() {
|
||||
return ((Number) vars.get("_value")).longValue() + inc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float runAsFloat() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double runAsDouble() {
|
||||
return ((Number) vars.get("_value")).doubleValue() + inc;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
}
|
||||
}
|
|
@ -16,21 +16,32 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.search.aggregations.metrics;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.LeafSearchScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.Global;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
|
||||
import org.elasticsearch.search.lookup.LeafSearchLookup;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
|
@ -44,10 +55,11 @@ import static org.hamcrest.Matchers.notNullValue;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class SumTests extends AbstractNumericTestCase {
|
||||
public class SumIT extends AbstractNumericTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
return Arrays.asList(ExtractFieldScriptPlugin.class, FieldValueScriptPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -157,7 +169,7 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
public void testSingleValuedFieldWithValueScript() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").field("value").script(new Script("_value + 1")))
|
||||
.addAggregation(sum("sum").field("value").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -165,7 +177,7 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 2+3+4+5+6+7+8+9+10+11));
|
||||
assertThat(sum.getValue(), equalTo((double) 1+2+3+4+5+6+7+8+9+10));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -174,7 +186,7 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
params.put("increment", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").field("value").script(new Script("_value + increment", ScriptType.INLINE, null, params)))
|
||||
.addAggregation(sum("sum").field("value").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -182,14 +194,14 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 2+3+4+5+6+7+8+9+10+11));
|
||||
assertThat(sum.getValue(), equalTo((double) 1+2+3+4+5+6+7+8+9+10));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptSingleValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").script(new Script("doc['value'].value")))
|
||||
.addAggregation(sum("sum").script(new Script("value", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -206,7 +218,7 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
params.put("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.addAggregation(sum("sum").script(new Script("value", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -217,29 +229,11 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
assertThat(sum.getValue(), equalTo((double) 2+3+4+5+6+7+8+9+10+11));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 2+3+4+5+6+7+8+9+10+11));
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").script(new Script("[ doc['value'].value, doc['value'].value + 1 ]")))
|
||||
.addAggregation(sum("sum").script(new Script("values", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -247,22 +241,7 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 1+2+2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").script(new Script("[ doc['value'].value, doc['value'].value + 1 ]")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 1+2+2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11));
|
||||
assertThat(sum.getValue(), equalTo((double) 2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -272,7 +251,7 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
sum("sum").script(new Script("[ doc['value'].value, doc['value'].value + inc ]", ScriptType.INLINE, null, params)))
|
||||
sum("sum").script(new Script("values", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -280,7 +259,7 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 1+2+2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11));
|
||||
assertThat(sum.getValue(), equalTo((double) 3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12+12+13));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -304,22 +283,7 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").field("values").script(new Script("_value + 1"))).execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 3 + 4 + 4 + 5 + 5 + 6 + 6 + 7 + 7 + 8 + 8 + 9 + 9 + 10 + 10 + 11 + 11 + 12 + 12 + 13));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testMultiValuedFieldWithValueScriptWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("increment", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").field("values").script(new Script("_value + increment", ScriptType.INLINE, null, params)))
|
||||
.addAggregation(sum("sum").field("values").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -327,6 +291,296 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 3 + 4 + 4 + 5 + 5 + 6 + 6 + 7 + 7 + 8 + 8 + 9 + 9 + 10 + 10 + 11 + 11 + 12 + 12 + 13));
|
||||
assertThat(sum.getValue(), equalTo((double) 2 + 3 + 3 + 4 + 4 + 5 + 5 + 6 + 6 + 7 + 7 + 8 + 8 + 9 + 9 + 10 + 10 + 11 + 11 + 12));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testMultiValuedFieldWithValueScriptWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("increment", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").field("values").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 2 + 3 + 3 + 4 + 4 + 5 + 5 + 6 + 6 + 7 + 7 + 8 + 8 + 9 + 9 + 10 + 10 + 11 + 11 + 12));
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock plugin for the {@link ExtractFieldScriptEngine}
|
||||
*/
|
||||
public static class ExtractFieldScriptPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return ExtractFieldScriptEngine.NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "Mock script engine for " + SumIT.class;
|
||||
}
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(ExtractFieldScriptEngine.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* This mock script returns the field that is specified by name in the
|
||||
* script body
|
||||
*/
|
||||
public static class ExtractFieldScriptEngine implements ScriptEngineService {
|
||||
|
||||
public static final String NAME = "extract_field";
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] types() {
|
||||
return new String[] { NAME };
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] extensions() {
|
||||
return types();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean sandboxed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
return script;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExecutableScript executable(CompiledScript compiledScript, Map<String, Object> params) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map<String, Object> vars) {
|
||||
final long inc;
|
||||
if (vars == null || vars.containsKey("inc") == false) {
|
||||
inc = 0;
|
||||
} else {
|
||||
inc = ((Number) vars.get("inc")).longValue();
|
||||
}
|
||||
return new SearchScript() {
|
||||
|
||||
@Override
|
||||
public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException {
|
||||
|
||||
final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context);
|
||||
|
||||
return new LeafSearchScript() {
|
||||
|
||||
@Override
|
||||
public Object unwrap(Object value) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNextVar(String name, Object value) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object run() {
|
||||
String fieldName = (String) compiledScript.compiled();
|
||||
List<Long> values = new ArrayList<>();
|
||||
for (Object v : (List<?>) leafLookup.doc().get(fieldName)) {
|
||||
values.add(((Number) v).longValue() + inc);
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setScorer(Scorer scorer) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setSource(Map<String, Object> source) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int doc) {
|
||||
if (leafLookup != null) {
|
||||
leafLookup.setDocument(doc);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long runAsLong() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public float runAsFloat() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double runAsDouble() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock plugin for the {@link FieldValueScriptEngine}
|
||||
*/
|
||||
public static class FieldValueScriptPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return FieldValueScriptEngine.NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "Mock script engine for " + SumIT.class;
|
||||
}
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(FieldValueScriptEngine.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* This mock script returns the field value and adds one to the returned
|
||||
* value
|
||||
*/
|
||||
public static class FieldValueScriptEngine implements ScriptEngineService {
|
||||
|
||||
public static final String NAME = "field_value";
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] types() {
|
||||
return new String[] { NAME };
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] extensions() {
|
||||
return types();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean sandboxed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
return script;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExecutableScript executable(CompiledScript compiledScript, Map<String, Object> params) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map<String, Object> vars) {
|
||||
final long inc;
|
||||
if (vars == null || vars.containsKey("inc") == false) {
|
||||
inc = 0;
|
||||
} else {
|
||||
inc = ((Number) vars.get("inc")).longValue();
|
||||
}
|
||||
return new SearchScript() {
|
||||
|
||||
private Map<String, Object> vars = new HashMap<>(2);
|
||||
|
||||
@Override
|
||||
public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException {
|
||||
|
||||
final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context);
|
||||
|
||||
return new LeafSearchScript() {
|
||||
|
||||
@Override
|
||||
public Object unwrap(Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNextVar(String name, Object value) {
|
||||
vars.put(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object run() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setScorer(Scorer scorer) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setSource(Map<String, Object> source) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int doc) {
|
||||
if (leafLookup != null) {
|
||||
leafLookup.setDocument(doc);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long runAsLong() {
|
||||
return ((Number) vars.get("_value")).longValue() + inc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float runAsFloat() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double runAsDouble() {
|
||||
return ((Number) vars.get("_value")).doubleValue() + inc;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
}
|
||||
}
|
|
@ -16,21 +16,22 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.search.aggregations.metrics;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.*;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.Global;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount;
|
||||
import org.elasticsearch.search.lookup.LeafSearchLookup;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
|
@ -44,13 +45,7 @@ import static org.hamcrest.Matchers.notNullValue;
|
|||
*
|
||||
*/
|
||||
@ESIntegTestCase.SuiteScopeTestCase
|
||||
public class ValueCountTests extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
}
|
||||
|
||||
public class ValueCountIT extends ESIntegTestCase {
|
||||
@Override
|
||||
public void setupSuiteScopeCluster() throws Exception {
|
||||
createIndex("idx");
|
||||
|
@ -68,6 +63,11 @@ public class ValueCountTests extends ESIntegTestCase {
|
|||
ensureSearchable();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singletonList(FieldValueScriptPlugin.class);
|
||||
}
|
||||
|
||||
public void testUnmapped() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx_unmapped")
|
||||
.setQuery(matchAllQuery())
|
||||
|
@ -148,7 +148,7 @@ public class ValueCountTests extends ESIntegTestCase {
|
|||
|
||||
public void testSingleValuedScript() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(count("count").script(new Script("doc['value'].value"))).execute().actionGet();
|
||||
.addAggregation(count("count").script(new Script("value", ScriptType.INLINE, FieldValueScriptEngine.NAME, null))).execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
|
@ -160,7 +160,7 @@ public class ValueCountTests extends ESIntegTestCase {
|
|||
|
||||
public void testMultiValuedScript() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(count("count").script(new Script("doc['values'].values"))).execute().actionGet();
|
||||
.addAggregation(count("count").script(new Script("values", ScriptType.INLINE, FieldValueScriptEngine.NAME, null))).execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
|
@ -171,10 +171,9 @@ public class ValueCountTests extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValuedScriptWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("s", "value");
|
||||
Map<String, Object> params = Collections.singletonMap("s", "value");
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(count("count").script(new Script("doc[s].value", ScriptType.INLINE, null, params))).execute().actionGet();
|
||||
.addAggregation(count("count").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params))).execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
|
@ -185,10 +184,9 @@ public class ValueCountTests extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testMultiValuedScriptWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("s", "values");
|
||||
Map<String, Object> params = Collections.singletonMap("s", "values");
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(count("count").script(new Script("doc[s].values", ScriptType.INLINE, null, params))).execute().actionGet();
|
||||
.addAggregation(count("count").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params))).execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
|
@ -197,4 +195,139 @@ public class ValueCountTests extends ESIntegTestCase {
|
|||
assertThat(valueCount.getName(), equalTo("count"));
|
||||
assertThat(valueCount.getValue(), equalTo(20l));
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock plugin for the {@link FieldValueScriptEngine}
|
||||
*/
|
||||
public static class FieldValueScriptPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return FieldValueScriptEngine.NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "Mock script engine for " + ValueCountIT.class;
|
||||
}
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(FieldValueScriptEngine.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* This mock script returns the field value. If the parameter map contains a parameter "s", the corresponding is used as field name.
|
||||
*/
|
||||
public static class FieldValueScriptEngine implements ScriptEngineService {
|
||||
|
||||
public static final String NAME = "field_value";
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] types() {
|
||||
return new String[] { NAME };
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] extensions() {
|
||||
return types();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean sandboxed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
return script;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExecutableScript executable(CompiledScript compiledScript, Map<String, Object> params) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@Override
|
||||
public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map<String, Object> vars) {
|
||||
final String fieldNameParam;
|
||||
if (vars == null || vars.containsKey("s") == false) {
|
||||
fieldNameParam = null;
|
||||
} else {
|
||||
fieldNameParam = (String) vars.get("s");
|
||||
}
|
||||
|
||||
return new SearchScript() {
|
||||
private Map<String, Object> vars = new HashMap<>(2);
|
||||
|
||||
@Override
|
||||
public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException {
|
||||
|
||||
final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context);
|
||||
|
||||
return new LeafSearchScript() {
|
||||
|
||||
@Override
|
||||
public Object unwrap(Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNextVar(String name, Object value) {
|
||||
vars.put(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object run() {
|
||||
String fieldName = (fieldNameParam != null) ? fieldNameParam : (String) compiledScript.compiled();
|
||||
return leafLookup.doc().get(fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setScorer(Scorer scorer) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setSource(Map<String, Object> source) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int doc) {
|
||||
if (leafLookup != null) {
|
||||
leafLookup.setDocument(doc);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long runAsLong() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public float runAsFloat() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double runAsDouble() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
}
|
||||
}
|
|
@ -41,7 +41,7 @@ import org.elasticsearch.common.Priority;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.geo.builders.LineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.MultiPolygonBuilder;
|
||||
import org.elasticsearch.common.geo.builders.PolygonBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
||||
|
@ -129,17 +129,17 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// polygon with hole
|
||||
ShapeBuilders.newPolygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.hole(new LineStringBuilder()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
.close().close().build();
|
||||
.close()).close().build();
|
||||
|
||||
try {
|
||||
// polygon with overlapping hole
|
||||
ShapeBuilders.newPolygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.hole(new LineStringBuilder()
|
||||
.point(-5, -5).point(-5, 11).point(5, 11).point(5, -5)
|
||||
.close().close().build();
|
||||
.close()).close().build();
|
||||
|
||||
fail("Self intersection not detected");
|
||||
} catch (InvalidShapeException e) {
|
||||
|
@ -149,12 +149,12 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// polygon with intersection holes
|
||||
ShapeBuilders.newPolygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.hole(new LineStringBuilder()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
.close()
|
||||
.hole()
|
||||
.close())
|
||||
.hole(new LineStringBuilder()
|
||||
.point(-5, -6).point(5, -6).point(5, -4).point(-5, -4)
|
||||
.close()
|
||||
.close())
|
||||
.close().build();
|
||||
fail("Intersection of holes not detected");
|
||||
} catch (InvalidShapeException e) {
|
||||
|
@ -175,52 +175,27 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
} catch (InvalidShapeException e) {
|
||||
}
|
||||
|
||||
// Not specified
|
||||
// try {
|
||||
// // two overlapping polygons within a multipolygon
|
||||
// ShapeBuilder.newMultiPolygon()
|
||||
// .polygon()
|
||||
// .point(-10, -10)
|
||||
// .point(-10, 10)
|
||||
// .point(10, 10)
|
||||
// .point(10, -10)
|
||||
// .close()
|
||||
// .polygon()
|
||||
// .point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
// .close().build();
|
||||
// fail("Polygon intersection not detected";
|
||||
// } catch (InvalidShapeException e) {}
|
||||
|
||||
// Multipolygon: polygon with hole and polygon within the whole
|
||||
ShapeBuilders.newMultiPolygon()
|
||||
.polygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
.close()
|
||||
.close()
|
||||
.polygon()
|
||||
.point(-4, -4).point(-4, 4).point(4, 4).point(4, -4)
|
||||
.close()
|
||||
ShapeBuilders
|
||||
.newMultiPolygon()
|
||||
.polygon(new PolygonBuilder()
|
||||
.point(-10, -10)
|
||||
.point(-10, 10)
|
||||
.point(10, 10)
|
||||
.point(10, -10)
|
||||
.hole(new LineStringBuilder().point(-5, -5)
|
||||
.point(-5, 5)
|
||||
.point(5, 5)
|
||||
.point(5, -5)
|
||||
.close())
|
||||
.close())
|
||||
.polygon(new PolygonBuilder()
|
||||
.point(-4, -4)
|
||||
.point(-4, 4)
|
||||
.point(4, 4)
|
||||
.point(4, -4)
|
||||
.close())
|
||||
.build();
|
||||
|
||||
// Not supported
|
||||
// try {
|
||||
// // Multipolygon: polygon with hole and polygon within the hole but overlapping
|
||||
// ShapeBuilder.newMultiPolygon()
|
||||
// .polygon()
|
||||
// .point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
// .hole()
|
||||
// .point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
// .close()
|
||||
// .close()
|
||||
// .polygon()
|
||||
// .point(-4, -4).point(-4, 6).point(4, 6).point(4, -4)
|
||||
// .close()
|
||||
// .build();
|
||||
// fail("Polygon intersection not detected";
|
||||
// } catch (InvalidShapeException e) {}
|
||||
|
||||
}
|
||||
|
||||
public void testShapeRelations() throws Exception {
|
||||
|
@ -248,15 +223,13 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// with a hole of size 5x5 equidistant from all sides. This hole in turn contains
|
||||
// the second polygon of size 4x4 equidistant from all sites
|
||||
MultiPolygonBuilder polygon = ShapeBuilders.newMultiPolygon()
|
||||
.polygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
.close()
|
||||
.close()
|
||||
.polygon()
|
||||
.point(-4, -4).point(-4, 4).point(4, 4).point(4, -4)
|
||||
.close();
|
||||
.polygon(new PolygonBuilder()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole(new LineStringBuilder()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5).close())
|
||||
.close())
|
||||
.polygon(new PolygonBuilder()
|
||||
.point(-4, -4).point(-4, 4).point(4, 4).point(4, -4).close());
|
||||
|
||||
BytesReference data = jsonBuilder().startObject().field("area", polygon).endObject().bytes();
|
||||
|
||||
|
@ -318,9 +291,8 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// Create a polygon that fills the empty area of the polygon defined above
|
||||
PolygonBuilder inverse = ShapeBuilders.newPolygon()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
.hole()
|
||||
.point(-4, -4).point(-4, 4).point(4, 4).point(4, -4)
|
||||
.close()
|
||||
.hole(new LineStringBuilder()
|
||||
.point(-4, -4).point(-4, 4).point(4, 4).point(4, -4).close())
|
||||
.close();
|
||||
|
||||
data = jsonBuilder().startObject().field("area", inverse).endObject().bytes();
|
||||
|
@ -338,9 +310,8 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// Create Polygon with hole and common edge
|
||||
PolygonBuilder builder = ShapeBuilders.newPolygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.point(-5, -5).point(-5, 5).point(10, 5).point(10, -5)
|
||||
.close()
|
||||
.hole(new LineStringBuilder()
|
||||
.point(-5, -5).point(-5, 5).point(10, 5).point(10, -5).close())
|
||||
.close();
|
||||
|
||||
if (withinSupport) {
|
||||
|
@ -367,7 +338,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// Create a polygon crossing longitude 180 with hole.
|
||||
builder = ShapeBuilders.newPolygon()
|
||||
.point(170, -10).point(190, -10).point(190, 10).point(170, 10)
|
||||
.hole().point(175, -5).point(185, -5).point(185, 5).point(175, 5).close()
|
||||
.hole(new LineStringBuilder().point(175, -5).point(185, -5).point(185, 5).point(175, 5).close())
|
||||
.close();
|
||||
|
||||
data = jsonBuilder().startObject().field("area", builder).endObject().bytes();
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.action.search.SearchResponse;
|
|||
import org.elasticsearch.common.geo.ShapeRelation;
|
||||
import org.elasticsearch.common.geo.builders.EnvelopeBuilder;
|
||||
import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder;
|
||||
import org.elasticsearch.common.geo.builders.LineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -193,7 +194,7 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase {
|
|||
public void testReusableBuilder() throws IOException {
|
||||
ShapeBuilder polygon = ShapeBuilders.newPolygon()
|
||||
.point(170, -10).point(190, -10).point(190, 10).point(170, 10)
|
||||
.hole().point(175, -5).point(185, -5).point(185, 5).point(175, 5).close()
|
||||
.hole(new LineStringBuilder().point(175, -5).point(185, -5).point(185, 5).point(175, 5).close())
|
||||
.close();
|
||||
assertUnmodified(polygon);
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.action.search.SearchResponse;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.search.internal.DefaultSearchContext;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
|
@ -41,6 +42,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
|||
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
@ -104,6 +106,57 @@ public class SimpleSearchIT extends ESIntegTestCase {
|
|||
assertHitCount(search, 1l);
|
||||
}
|
||||
|
||||
public void testIpCIDR() throws Exception {
|
||||
createIndex("test");
|
||||
|
||||
client().admin().indices().preparePutMapping("test").setType("type1")
|
||||
.setSource(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("ip").field("type", "ip").endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1", "1").setSource("ip", "192.168.0.1").execute().actionGet();
|
||||
client().prepareIndex("test", "type1", "2").setSource("ip", "192.168.0.2").execute().actionGet();
|
||||
client().prepareIndex("test", "type1", "3").setSource("ip", "192.168.0.3").execute().actionGet();
|
||||
client().prepareIndex("test", "type1", "4").setSource("ip", "192.168.1.4").execute().actionGet();
|
||||
refresh();
|
||||
|
||||
SearchResponse search = client().prepareSearch()
|
||||
.setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.1/32")))
|
||||
.execute().actionGet();
|
||||
assertHitCount(search, 1l);
|
||||
|
||||
search = client().prepareSearch()
|
||||
.setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.1/24")))
|
||||
.execute().actionGet();
|
||||
assertHitCount(search, 3l);
|
||||
|
||||
search = client().prepareSearch()
|
||||
.setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.1/8")))
|
||||
.execute().actionGet();
|
||||
assertHitCount(search, 4l);
|
||||
|
||||
search = client().prepareSearch()
|
||||
.setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.1.1/24")))
|
||||
.execute().actionGet();
|
||||
assertHitCount(search, 1l);
|
||||
|
||||
search = client().prepareSearch()
|
||||
.setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "0.0.0.0/0")))
|
||||
.execute().actionGet();
|
||||
assertHitCount(search, 4l);
|
||||
|
||||
search = client().prepareSearch()
|
||||
.setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.1.5/32")))
|
||||
.execute().actionGet();
|
||||
assertHitCount(search, 0l);
|
||||
|
||||
assertFailures(client().prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "0/0/0/0/0"))),
|
||||
RestStatus.BAD_REQUEST,
|
||||
containsString("not a valid ip address"));
|
||||
}
|
||||
|
||||
public void testSimpleId() {
|
||||
createIndex("test");
|
||||
|
||||
|
|
|
@ -31,7 +31,6 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
import com.vividsolutions.jts.geom.Geometry;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.geo.builders.BaseLineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder;
|
||||
import org.elasticsearch.common.geo.builders.LineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.MultiLineStringBuilder;
|
||||
|
@ -198,7 +197,7 @@ public class RandomShapeGenerator extends RandomGeoGenerator {
|
|||
case MULTILINESTRING:
|
||||
MultiLineStringBuilder mlsb = new MultiLineStringBuilder();
|
||||
for (int i=0; i<RandomInts.randomIntBetween(r, 1, 10); ++i) {
|
||||
mlsb.linestring((BaseLineStringBuilder) createShape(r, nearPoint, within, ShapeType.LINESTRING, false));
|
||||
mlsb.linestring((LineStringBuilder) createShape(r, nearPoint, within, ShapeType.LINESTRING, false));
|
||||
}
|
||||
return mlsb;
|
||||
case POLYGON:
|
||||
|
|
|
@ -72,27 +72,27 @@ sub dump_issues {
|
|||
$month++;
|
||||
$year += 1900;
|
||||
|
||||
print <<"HTML";
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
</head>
|
||||
<body>
|
||||
HTML
|
||||
print <<"ASCIIDOC";
|
||||
:issue: https://github.com/${User_Repo}issues/
|
||||
:pull: https://github.com/${User_Repo}pull/
|
||||
|
||||
[[release-notes-$version]]
|
||||
== $version Release Notes
|
||||
|
||||
ASCIIDOC
|
||||
|
||||
for my $group ( @Groups, 'other' ) {
|
||||
my $group_issues = $issues->{$group} or next;
|
||||
print "<h2>$Group_Labels{$group}</h2>\n\n<ul>\n";
|
||||
print "[[$group-$version]]\n"
|
||||
. "[float]\n"
|
||||
. "=== $Group_Labels{$group}\n\n";
|
||||
|
||||
for my $header ( sort keys %$group_issues ) {
|
||||
my $header_issues = $group_issues->{$header};
|
||||
my $prefix = "<li>";
|
||||
if ($header) {
|
||||
print "<li>$header:<ul>";
|
||||
}
|
||||
print( $header || 'HEADER MISSING', "::\n" );
|
||||
|
||||
for my $issue (@$header_issues) {
|
||||
my $title = $issue->{title};
|
||||
$title =~ s{`([^`]+)`}{<code>$1</code>}g;
|
||||
|
||||
if ( $issue->{state} eq 'open' ) {
|
||||
$title .= " [OPEN]";
|
||||
|
@ -102,30 +102,23 @@ HTML
|
|||
}
|
||||
my $number = $issue->{number};
|
||||
|
||||
print encode_utf8( $prefix
|
||||
. $title
|
||||
. qq[ <a href="${Issue_URL}${number}">#${number}</a>] );
|
||||
print encode_utf8("* $title {pull}${number}[#${number}]");
|
||||
|
||||
if ( my $related = $issue->{related_issues} ) {
|
||||
my %uniq = map { $_ => 1 } @$related;
|
||||
print keys %uniq > 1
|
||||
? " (issues: "
|
||||
: " (issue: ";
|
||||
print join ", ",
|
||||
map {qq[<a href="${Issue_URL}${_}">#${_}</a>]}
|
||||
print join ", ", map {"{issue}${_}[#${_}]"}
|
||||
sort keys %uniq;
|
||||
print ")";
|
||||
}
|
||||
print "</li>\n";
|
||||
}
|
||||
if ($header) {
|
||||
print "</ul></li>\n";
|
||||
print "\n";
|
||||
}
|
||||
print "\n";
|
||||
}
|
||||
print "</ul>";
|
||||
print "\n\n";
|
||||
}
|
||||
print "</body></html>\n";
|
||||
}
|
||||
|
||||
#===================================
|
||||
|
|
|
@ -42,7 +42,7 @@ The `phonetic` token filter takes the following settings:
|
|||
Which phonetic encoder to use. Accepts `metaphone` (default),
|
||||
`doublemetaphone`, `soundex`, `refinedsoundex`, `caverphone1`,
|
||||
`caverphone2`, `cologne`, `nysiis`, `koelnerphonetik`, `haasephonetik`,
|
||||
`beidermorse`.
|
||||
`beidermorse`, `daitch_mokotoff`.
|
||||
|
||||
`replace`::
|
||||
|
||||
|
|
|
@ -218,6 +218,14 @@ The following settings are supported:
|
|||
You could specify a canned ACL using the `canned_acl` setting. When the S3 repository
|
||||
creates buckets and objects, it adds the canned ACL into the buckets and objects.
|
||||
|
||||
`storage_class`::
|
||||
|
||||
Sets the S3 storage class type for the backup files. Values may be
|
||||
`standard`, `reduced_redundancy`, `standard_ia`. Defaults to `standard`.
|
||||
Due to the extra complexity with the Glacier class lifecycle, it is not
|
||||
currently supported by the plugin. For more information about the
|
||||
different classes, see http://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html[AWS Storage Classes Guide]
|
||||
|
||||
The S3 repositories use the same credentials as the rest of the AWS services
|
||||
provided by this plugin (`discovery`). See <<repository-s3-usage>> for details.
|
||||
|
||||
|
|
|
@ -58,7 +58,9 @@ With delayed allocation enabled, the above scenario changes to look like this:
|
|||
|
||||
NOTE: This setting will not affect the promotion of replicas to primaries, nor
|
||||
will it affect the assignment of replicas that have not been assigned
|
||||
previously.
|
||||
previously. In particular, delayed allocation does not come into effect after a full cluster restart.
|
||||
Also, in case of a master failover situation, elapsed delay time is forgotten
|
||||
(i.e. reset to the full initial delay).
|
||||
|
||||
==== Cancellation of shard relocation
|
||||
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
:jdk: 1.8.0_25
|
||||
:defguide: https://www.elastic.co/guide/en/elasticsearch/guide/current
|
||||
:plugins: https://www.elastic.co/guide/en/elasticsearch/plugins/master
|
||||
:issue: https://github.com/elastic/elasticsearch/issues
|
||||
:pull: https://github.com/elastic/elasticsearch/pull
|
||||
|
||||
include::getting-started.asciidoc[]
|
||||
|
||||
|
@ -42,6 +44,8 @@ include::testing.asciidoc[]
|
|||
|
||||
include::glossary.asciidoc[]
|
||||
|
||||
// include::release-notes.asciidoc[]
|
||||
|
||||
include::redirects.asciidoc[]
|
||||
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
[[token-count]]
|
||||
=== Token count datatype
|
||||
|
||||
A field of type `token_count` is really an <<number,`integer>> field which
|
||||
A field of type `token_count` is really an <<number,`integer`>> field which
|
||||
accepts string values, analyzes them, then indexes the number of tokens in the
|
||||
string.
|
||||
|
||||
|
|
|
@ -6,11 +6,7 @@ Returns documents that have at least one non-`null` value in the original field:
|
|||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"constant_score" : {
|
||||
"filter" : {
|
||||
"exists" : { "field" : "user" }
|
||||
}
|
||||
}
|
||||
"exists" : { "field" : "user" }
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
[[es-release-notes]]
|
||||
= Release Notes
|
||||
|
||||
[partintro]
|
||||
--
|
||||
This section will summarize the changes in released versions.
|
||||
--
|
|
@ -30,7 +30,6 @@ import org.apache.lucene.analysis.phonetic.BeiderMorseFilter;
|
|||
import org.apache.lucene.analysis.phonetic.DoubleMetaphoneFilter;
|
||||
import org.apache.lucene.analysis.phonetic.PhoneticFilter;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.inject.assistedinject.Assisted;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
|
@ -105,6 +104,8 @@ public class PhoneticTokenFilterFactory extends AbstractTokenFilterFactory {
|
|||
this.encoder = new HaasePhonetik();
|
||||
} else if ("nysiis".equalsIgnoreCase(encodername)) {
|
||||
this.encoder = new Nysiis();
|
||||
} else if ("daitch_mokotoff".equalsIgnoreCase(encodername)) {
|
||||
this.encoder = new DaitchMokotoffSoundex();
|
||||
} else {
|
||||
throw new IllegalArgumentException("unknown encoder [" + encodername + "] for phonetic token filter");
|
||||
}
|
||||
|
|
|
@ -28,3 +28,6 @@ index:
|
|||
nysiisfilter:
|
||||
type: phonetic
|
||||
encoder: nysiis
|
||||
daitch_mokotoff:
|
||||
type: phonetic
|
||||
encoder: daitch_mokotoff
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
# Integration tests for Phonetic analysis components
|
||||
#
|
||||
|
||||
|
||||
"Daitch Mokotoff":
|
||||
- do:
|
||||
indices.create:
|
||||
index: phonetic_sample
|
||||
body:
|
||||
settings:
|
||||
index:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
tokenizer: standard
|
||||
filter: ["standard", "lowercase", "daitch_mokotoff"]
|
||||
filter:
|
||||
daitch_mokotoff:
|
||||
type: phonetic
|
||||
encoder: daitch_mokotoff
|
||||
- do:
|
||||
cluster.health:
|
||||
wait_for_status: yellow
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: phonetic_sample
|
||||
analyzer: my_analyzer
|
||||
text: Moskowitz
|
||||
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: "645740" }
|
||||
|
|
@ -444,33 +444,6 @@ public class ExtendedStatsTests extends AbstractNumericTestCase {
|
|||
checkUpperLowerBounds(stats, sigma);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
double sigma = randomDouble() * randomIntBetween(1, 10);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
extendedStats("stats").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)).sigma(sigma))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
assertThat(stats.getName(), equalTo("stats"));
|
||||
assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11) / 10));
|
||||
assertThat(stats.getMin(), equalTo(2.0));
|
||||
assertThat(stats.getMax(), equalTo(11.0));
|
||||
assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11));
|
||||
assertThat(stats.getCount(), equalTo(10l));
|
||||
assertThat(stats.getSumOfSquares(), equalTo((double) 4+9+16+25+36+49+64+81+100+121));
|
||||
assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11)));
|
||||
assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11)));
|
||||
checkUpperLowerBounds(stats, sigma);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
double sigma = randomDouble() * randomIntBetween(1, 10);
|
||||
|
@ -495,32 +468,6 @@ public class ExtendedStatsTests extends AbstractNumericTestCase {
|
|||
checkUpperLowerBounds(stats, sigma);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
double sigma = randomDouble() * randomIntBetween(1, 10);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(extendedStats("stats").script(new Script("doc['values'].values")).sigma(sigma))
|
||||
.execute().actionGet();
|
||||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
assertThat(stats.getName(), equalTo("stats"));
|
||||
assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12) / 20));
|
||||
assertThat(stats.getMin(), equalTo(2.0));
|
||||
assertThat(stats.getMax(), equalTo(12.0));
|
||||
assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12));
|
||||
assertThat(stats.getCount(), equalTo(20l));
|
||||
assertThat(stats.getSumOfSquares(), equalTo((double) 4+9+16+25+36+49+64+81+100+121+9+16+25+36+49+64+81+100+121+144));
|
||||
assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 12)));
|
||||
assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 12)));
|
||||
checkUpperLowerBounds(stats, sigma);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
|
|
|
@ -380,26 +380,6 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase {
|
|||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
int sigDigits = randomSignificantDigits();
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("dec", 1);
|
||||
final double[] pcts = randomPercents(minValue - 1, maxValue - 1);
|
||||
SearchResponse searchResponse = client()
|
||||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits)
|
||||
.script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)).percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
int sigDigits = randomSignificantDigits();
|
||||
|
@ -417,23 +397,6 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase {
|
|||
assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
int sigDigits = randomSignificantDigits();
|
||||
final double[] pcts = randomPercents(minValues, maxValues);
|
||||
SearchResponse searchResponse = client()
|
||||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits)
|
||||
.script(new Script("doc['values'].values")).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
int sigDigits = randomSignificantDigits();
|
||||
|
|
|
@ -370,26 +370,6 @@ public class HDRPercentilesTests extends AbstractNumericTestCase {
|
|||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("dec", 1);
|
||||
final double[] pcts = randomPercentiles();
|
||||
int sigDigits = randomSignificantDigits();
|
||||
SearchResponse searchResponse = client()
|
||||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR)
|
||||
.script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)).percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
final double[] pcts = randomPercentiles();
|
||||
|
@ -407,23 +387,6 @@ public class HDRPercentilesTests extends AbstractNumericTestCase {
|
|||
assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
final double[] pcts = randomPercentiles();
|
||||
int sigDigits = randomSignificantDigits();
|
||||
SearchResponse searchResponse = client()
|
||||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR)
|
||||
.script(new Script("doc['values'].values")).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
|
|
|
@ -262,23 +262,6 @@ public class MaxTests extends AbstractNumericTestCase {
|
|||
assertThat(max.getValue(), equalTo(11.0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(max("max").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
assertThat(max.getName(), equalTo("max"));
|
||||
assertThat(max.getValue(), equalTo(11.0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
|
@ -294,21 +277,6 @@ public class MaxTests extends AbstractNumericTestCase {
|
|||
assertThat(max.getValue(), equalTo(12.0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(max("max").script(new Script("doc['values'].values")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
assertThat(max.getName(), equalTo("max"));
|
||||
assertThat(max.getValue(), equalTo(12.0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
|
|
|
@ -272,22 +272,6 @@ public class MinTests extends AbstractNumericTestCase {
|
|||
assertThat(min.getValue(), equalTo(0.0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("dec", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(min("min").script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params))).execute()
|
||||
.actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
assertThat(min.getName(), equalTo("min"));
|
||||
assertThat(min.getValue(), equalTo(0.0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
|
@ -301,19 +285,6 @@ public class MinTests extends AbstractNumericTestCase {
|
|||
assertThat(min.getValue(), equalTo(2.0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(min("min").script(new Script("doc['values'].values"))).execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
assertThat(min.getName(), equalTo("min"));
|
||||
assertThat(min.getValue(), equalTo(2.0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
|
|
|
@ -1,61 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.scriptQuery;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
*/
|
||||
@ESIntegTestCase.ClusterScope(scope= ESIntegTestCase.Scope.SUITE)
|
||||
public class SearchTimeoutTests extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)).build();
|
||||
}
|
||||
|
||||
public void testSimpleTimeout() throws Exception {
|
||||
client().prepareIndex("test", "type", "1").setSource("field", "value").setRefresh(true).execute().actionGet();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS))
|
||||
.setQuery(scriptQuery(new Script("Thread.sleep(500); return true;")))
|
||||
.execute().actionGet();
|
||||
assertThat(searchResponse.isTimedOut(), equalTo(true));
|
||||
}
|
||||
}
|
|
@ -353,29 +353,6 @@ public class StatsTests extends AbstractNumericTestCase {
|
|||
assertThat(stats.getCount(), equalTo(10l));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(stats("stats").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
assertThat(stats.getName(), equalTo("stats"));
|
||||
assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11) / 10));
|
||||
assertThat(stats.getMin(), equalTo(2.0));
|
||||
assertThat(stats.getMax(), equalTo(11.0));
|
||||
assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11));
|
||||
assertThat(stats.getCount(), equalTo(10l));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
|
@ -397,27 +374,6 @@ public class StatsTests extends AbstractNumericTestCase {
|
|||
assertThat(stats.getCount(), equalTo(20l));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(stats("stats").script(new Script("doc['values'].values")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
assertThat(stats.getName(), equalTo("stats"));
|
||||
assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12) / 20));
|
||||
assertThat(stats.getMin(), equalTo(2.0));
|
||||
assertThat(stats.getMax(), equalTo(12.0));
|
||||
assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12));
|
||||
assertThat(stats.getCount(), equalTo(20l));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
|
|
|
@ -363,25 +363,6 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase {
|
|||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("dec", 1);
|
||||
final double[] pcts = randomPercents(minValue -1 , maxValue - 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(randomCompression(percentileRanks("percentile_ranks"))
|
||||
.script(
|
||||
new Script("doc['value'].value - dec", ScriptType.INLINE, null, params))
|
||||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
final double[] pcts = randomPercents(minValues, maxValues);
|
||||
|
@ -398,22 +379,6 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase {
|
|||
assertConsistent(pcts, percentiles, minValues, maxValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
final double[] pcts = randomPercents(minValues, maxValues);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(randomCompression(percentileRanks("percentile_ranks"))
|
||||
.script(new Script("doc['values'].values"))
|
||||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
|
|
|
@ -347,25 +347,6 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase {
|
|||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("dec", 1);
|
||||
final double[] pcts = randomPercentiles();
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(randomCompression(percentiles("percentiles"))
|
||||
.script(
|
||||
new Script("doc['value'].value - dec", ScriptType.INLINE, null, params))
|
||||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
final double[] pcts = randomPercentiles();
|
||||
|
@ -382,22 +363,6 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase {
|
|||
assertConsistent(pcts, percentiles, minValues, maxValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
final double[] pcts = randomPercentiles();
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(randomCompression(percentiles("percentiles"))
|
||||
.script(new Script("doc['values'].values"))
|
||||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
|
|
|
@ -80,7 +80,6 @@
|
|||
renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/TopHitsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TopHitsTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/index/mapper/TransformOnIndexMapperIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TransformOnIndexMapperTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ValueCountTests.java
|
||||
renamed: core/src/main/java/org/elasticsearch/script/groovy/GroovyScriptCompilationException.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java
|
||||
renamed: core/src/test/java/org/elasticsearch/script/GroovyScriptIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/script/GroovySecurityIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java
|
||||
|
|
|
@ -24,15 +24,12 @@ import org.elasticsearch.common.inject.Inject;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
|
||||
import static org.elasticsearch.cloud.azure.storage.AzureStorageService.Storage.*;
|
||||
|
||||
public class AzureStorageSettingsFilter extends AbstractComponent {
|
||||
|
||||
@Inject
|
||||
public AzureStorageSettingsFilter(Settings settings, SettingsFilter settingsFilter) {
|
||||
super(settings);
|
||||
// Cloud storage API settings needed to be hidden
|
||||
settingsFilter.addFilter(ACCOUNT);
|
||||
settingsFilter.addFilter(KEY);
|
||||
settingsFilter.addFilter("cloud.azure.storage.*");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cloud.azure.storage;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.rest.FakeRestRequest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class AzureStorageSettingsFilterTest extends ESTestCase {
|
||||
final static Settings settings = Settings.builder()
|
||||
.put("cloud.azure.storage.azure1.account", "myaccount1")
|
||||
.put("cloud.azure.storage.azure1.key", "mykey1")
|
||||
.put("cloud.azure.storage.azure1.default", true)
|
||||
.put("cloud.azure.storage.azure2.account", "myaccount2")
|
||||
.put("cloud.azure.storage.azure2.key", "mykey2")
|
||||
.put("cloud.azure.storage.azure3.account", "myaccount3")
|
||||
.put("cloud.azure.storage.azure3.key", "mykey3")
|
||||
.build();
|
||||
|
||||
public void testSettingsFiltering() throws IOException {
|
||||
|
||||
SettingsFilter settingsFilter = new SettingsFilter(Settings.EMPTY);
|
||||
|
||||
// We just add Azure filters
|
||||
new AzureStorageSettingsFilter(Settings.EMPTY, settingsFilter);
|
||||
|
||||
// Test using direct filtering
|
||||
Settings filteredSettings = SettingsFilter.filterSettings(settingsFilter.getPatterns(), settings);
|
||||
assertThat(filteredSettings.getAsMap().keySet(), is(empty()));
|
||||
|
||||
// Test using toXContent filtering
|
||||
RestRequest request = new FakeRestRequest();
|
||||
settingsFilter.addFilterSettingParams(request);
|
||||
XContentBuilder xContentBuilder = XContentBuilder.builder(JsonXContent.jsonXContent);
|
||||
xContentBuilder.startObject();
|
||||
settings.toXContent(xContentBuilder, request);
|
||||
xContentBuilder.endObject();
|
||||
String filteredSettingsString = xContentBuilder.string();
|
||||
filteredSettings = Settings.builder().loadFromSource(filteredSettingsString).build();
|
||||
assertThat(filteredSettings.getAsMap().keySet(), is(empty()));
|
||||
}
|
||||
|
||||
}
|
|
@ -134,7 +134,9 @@ public class DefaultS3OutputStream extends S3OutputStream {
|
|||
throw new RuntimeException(impossible);
|
||||
}
|
||||
|
||||
PutObjectRequest putRequest = new PutObjectRequest(bucketName, blobName, inputStream, md).withCannedAcl(blobStore.getCannedACL());
|
||||
PutObjectRequest putRequest = new PutObjectRequest(bucketName, blobName, inputStream, md)
|
||||
.withStorageClass(blobStore.getStorageClass())
|
||||
.withCannedAcl(blobStore.getCannedACL());
|
||||
PutObjectResult putObjectResult = blobStore.client().putObject(putRequest);
|
||||
|
||||
String localMd5 = Base64.encodeAsString(messageDigest.digest());
|
||||
|
@ -167,7 +169,10 @@ public class DefaultS3OutputStream extends S3OutputStream {
|
|||
}
|
||||
|
||||
protected String doInitialize(S3BlobStore blobStore, String bucketName, String blobName, boolean serverSideEncryption) {
|
||||
InitiateMultipartUploadRequest request = new InitiateMultipartUploadRequest(bucketName, blobName).withCannedACL(blobStore.getCannedACL());
|
||||
InitiateMultipartUploadRequest request = new InitiateMultipartUploadRequest(bucketName, blobName)
|
||||
.withCannedACL(blobStore.getCannedACL())
|
||||
.withStorageClass(blobStore.getStorageClass());
|
||||
|
||||
if (serverSideEncryption) {
|
||||
ObjectMetadata md = new ObjectMetadata();
|
||||
md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
|
||||
|
|
|
@ -21,14 +21,8 @@ package org.elasticsearch.cloud.aws.blobstore;
|
|||
|
||||
import com.amazonaws.AmazonClientException;
|
||||
import com.amazonaws.services.s3.AmazonS3;
|
||||
import com.amazonaws.services.s3.model.AmazonS3Exception;
|
||||
import com.amazonaws.services.s3.model.CannedAccessControlList;
|
||||
import com.amazonaws.services.s3.model.CreateBucketRequest;
|
||||
import com.amazonaws.services.s3.model.DeleteObjectsRequest;
|
||||
import com.amazonaws.services.s3.model.*;
|
||||
import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion;
|
||||
import com.amazonaws.services.s3.model.ObjectListing;
|
||||
import com.amazonaws.services.s3.model.S3ObjectSummary;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.blobstore.BlobContainer;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
|
@ -40,6 +34,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit;
|
|||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -62,8 +57,10 @@ public class S3BlobStore extends AbstractComponent implements BlobStore {
|
|||
|
||||
private final CannedAccessControlList cannedACL;
|
||||
|
||||
private final StorageClass storageClass;
|
||||
|
||||
public S3BlobStore(Settings settings, AmazonS3 client, String bucket, @Nullable String region, boolean serverSideEncryption,
|
||||
ByteSizeValue bufferSize, int maxRetries, String cannedACL) {
|
||||
ByteSizeValue bufferSize, int maxRetries, String cannedACL, String storageClass) {
|
||||
super(settings);
|
||||
this.client = client;
|
||||
this.bucket = bucket;
|
||||
|
@ -77,6 +74,7 @@ public class S3BlobStore extends AbstractComponent implements BlobStore {
|
|||
|
||||
this.cannedACL = initCannedACL(cannedACL);
|
||||
this.numberOfRetries = maxRetries;
|
||||
this.storageClass = initStorageClass(storageClass);
|
||||
|
||||
// Note: the method client.doesBucketExist() may return 'true' is the bucket exists
|
||||
// but we don't have access to it (ie, 403 Forbidden response code)
|
||||
|
@ -196,6 +194,25 @@ public class S3BlobStore extends AbstractComponent implements BlobStore {
|
|||
return cannedACL;
|
||||
}
|
||||
|
||||
public StorageClass getStorageClass() { return storageClass; }
|
||||
|
||||
public static StorageClass initStorageClass(String storageClass) {
|
||||
if (storageClass == null || storageClass.equals("")) {
|
||||
return StorageClass.Standard;
|
||||
}
|
||||
|
||||
try {
|
||||
StorageClass _storageClass = StorageClass.fromValue(storageClass.toUpperCase(Locale.ENGLISH));
|
||||
if(_storageClass.equals(StorageClass.Glacier)) {
|
||||
throw new BlobStoreException("Glacier storage class is not supported");
|
||||
}
|
||||
|
||||
return _storageClass;
|
||||
} catch (IllegalArgumentException illegalArgumentException) {
|
||||
throw new BlobStoreException("`" + storageClass + "` is not a valid S3 Storage Class.");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs canned acl from string
|
||||
*/
|
||||
|
|
|
@ -118,13 +118,15 @@ public class S3Repository extends BlobStoreRepository {
|
|||
this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", settings.getAsBytesSize("repositories.s3.chunk_size", new ByteSizeValue(100, ByteSizeUnit.MB)));
|
||||
this.compress = repositorySettings.settings().getAsBoolean("compress", settings.getAsBoolean("repositories.s3.compress", false));
|
||||
|
||||
// Parse and validate the user's S3 Storage Class setting
|
||||
String storageClass = repositorySettings.settings().get("storage_class", settings.get("repositories.s3.storage_class", null));
|
||||
String cannedACL = repositorySettings.settings().get("canned_acl", settings.get("repositories.s3.canned_acl", null));
|
||||
|
||||
logger.debug("using bucket [{}], region [{}], endpoint [{}], protocol [{}], chunk_size [{}], server_side_encryption [{}], buffer_size [{}], max_retries [{}], cannedACL [{}]",
|
||||
bucket, region, endpoint, protocol, chunkSize, serverSideEncryption, bufferSize, maxRetries, cannedACL);
|
||||
logger.debug("using bucket [{}], region [{}], endpoint [{}], protocol [{}], chunk_size [{}], server_side_encryption [{}], buffer_size [{}], max_retries [{}], cannedACL [{}], storageClass [{}]",
|
||||
bucket, region, endpoint, protocol, chunkSize, serverSideEncryption, bufferSize, maxRetries, cannedACL, storageClass);
|
||||
|
||||
blobStore = new S3BlobStore(settings, s3Service.client(endpoint, protocol, region, repositorySettings.settings().get("access_key"), repositorySettings.settings().get("secret_key"), maxRetries),
|
||||
bucket, region, serverSideEncryption, bufferSize, maxRetries, cannedACL);
|
||||
bucket, region, serverSideEncryption, bufferSize, maxRetries, cannedACL, storageClass);
|
||||
|
||||
String basePath = repositorySettings.settings().get("base_path", settings.get("repositories.s3.base_path"));
|
||||
if (Strings.hasLength(basePath)) {
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.cloud.aws.blobstore;
|
||||
|
||||
import com.amazonaws.services.s3.model.CannedAccessControlList;
|
||||
import com.amazonaws.services.s3.model.StorageClass;
|
||||
import org.elasticsearch.common.blobstore.BlobStoreException;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
|
@ -58,4 +59,37 @@ public class S3BlobStoreTests extends ESTestCase {
|
|||
assertThat(ex.getMessage(), equalTo("cannedACL is not valid: [test_invalid]"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testInitStorageClass() throws IOException {
|
||||
// it should default to `standard`
|
||||
assertThat(S3BlobStore.initStorageClass(null), equalTo(StorageClass.Standard));
|
||||
assertThat(S3BlobStore.initStorageClass(""), equalTo(StorageClass.Standard));
|
||||
|
||||
// it should accept [standard, standard_ia, reduced_redundancy]
|
||||
assertThat(S3BlobStore.initStorageClass("standard"), equalTo(StorageClass.Standard));
|
||||
assertThat(S3BlobStore.initStorageClass("standard_ia"), equalTo(StorageClass.StandardInfrequentAccess));
|
||||
assertThat(S3BlobStore.initStorageClass("reduced_redundancy"), equalTo(StorageClass.ReducedRedundancy));
|
||||
}
|
||||
|
||||
public void testCaseInsensitiveStorageClass() throws IOException {
|
||||
assertThat(S3BlobStore.initStorageClass("sTandaRd"), equalTo(StorageClass.Standard));
|
||||
assertThat(S3BlobStore.initStorageClass("sTandaRd_Ia"), equalTo(StorageClass.StandardInfrequentAccess));
|
||||
assertThat(S3BlobStore.initStorageClass("reduCED_redundancy"), equalTo(StorageClass.ReducedRedundancy));
|
||||
}
|
||||
|
||||
public void testInvalidStorageClass() throws IOException {
|
||||
try {
|
||||
S3BlobStore.initStorageClass("whatever");
|
||||
} catch(BlobStoreException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("`whatever` is not a valid S3 Storage Class."));
|
||||
}
|
||||
}
|
||||
|
||||
public void testRejectGlacierStorageClass() throws IOException {
|
||||
try {
|
||||
S3BlobStore.initStorageClass("glacier");
|
||||
} catch(BlobStoreException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("Glacier storage class is not supported"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
access_key: "AKVAIQBF2RECL7FJWGJQ"
|
||||
secret_key: "vExyMThREXeRMm/b/LRzEB8jWwvzQeXgjqMX+6br"
|
||||
canned_acl: "public-read"
|
||||
storage_class: "standard"
|
||||
|
||||
# Get repositry
|
||||
- do:
|
||||
|
|
|
@ -94,11 +94,7 @@ public abstract class AbstractNumericTestCase extends ESIntegTestCase {
|
|||
|
||||
public abstract void testScriptSingleValuedWithParams() throws Exception;
|
||||
|
||||
public abstract void testScriptExplicitSingleValuedWithParams() throws Exception;
|
||||
|
||||
public abstract void testScriptMultiValued() throws Exception;
|
||||
|
||||
public abstract void testScriptExplicitMultiValued() throws Exception;
|
||||
|
||||
public abstract void testScriptMultiValuedWithParams() throws Exception;
|
||||
}
|
|
@ -25,10 +25,13 @@ import org.elasticsearch.cluster.ClusterState;
|
|||
import org.elasticsearch.cluster.EmptyClusterInfoService;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.RoutingNode;
|
||||
import org.elasticsearch.cluster.routing.RoutingNodes;
|
||||
import org.elasticsearch.cluster.routing.RoutingTable;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.allocation.AllocationService;
|
||||
import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation;
|
||||
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
|
||||
import org.elasticsearch.cluster.routing.allocation.StartedRerouteAllocation;
|
||||
import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocators;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
|
||||
|
@ -36,7 +39,10 @@ import org.elasticsearch.cluster.routing.allocation.decider.Decision;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.gateway.AsyncShardFetch;
|
||||
import org.elasticsearch.gateway.GatewayAllocator;
|
||||
import org.elasticsearch.gateway.ReplicaShardAllocator;
|
||||
import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData;
|
||||
import org.elasticsearch.node.settings.NodeSettingsService;
|
||||
import org.elasticsearch.test.gateway.NoopGatewayAllocator;
|
||||
|
||||
|
@ -46,6 +52,7 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
|
||||
import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList;
|
||||
|
@ -179,4 +186,48 @@ public abstract class ESAllocationTestCase extends ESTestCase {
|
|||
return decision;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mocks behavior in ReplicaShardAllocator to remove delayed shards from list of unassigned shards so they don't get reassigned yet.
|
||||
* Also computes delay in UnassignedInfo based on customizable time source.
|
||||
*/
|
||||
protected static class DelayedShardsMockGatewayAllocator extends GatewayAllocator {
|
||||
private final ReplicaShardAllocator replicaShardAllocator = new ReplicaShardAllocator(Settings.EMPTY) {
|
||||
@Override
|
||||
protected AsyncShardFetch.FetchResult<TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> fetchData(ShardRouting shard, RoutingAllocation allocation) {
|
||||
return new AsyncShardFetch.FetchResult<>(shard.shardId(), null, Collections.<String>emptySet(), Collections.<String>emptySet());
|
||||
}
|
||||
};
|
||||
|
||||
private volatile Function<ShardRouting, Long> timeSource;
|
||||
|
||||
public DelayedShardsMockGatewayAllocator() {
|
||||
super(Settings.EMPTY, null, null);
|
||||
}
|
||||
|
||||
public void setTimeSource(Function<ShardRouting, Long> timeSource) {
|
||||
this.timeSource = timeSource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applyStartedShards(StartedRerouteAllocation allocation) {}
|
||||
|
||||
@Override
|
||||
public void applyFailedShards(FailedRerouteAllocation allocation) {}
|
||||
|
||||
@Override
|
||||
public boolean allocateUnassigned(RoutingAllocation allocation) {
|
||||
final RoutingNodes.UnassignedShards.UnassignedIterator unassignedIterator = allocation.routingNodes().unassigned().iterator();
|
||||
boolean changed = false;
|
||||
while (unassignedIterator.hasNext()) {
|
||||
ShardRouting shard = unassignedIterator.next();
|
||||
if (shard.primary() || shard.allocatedPostIndexCreate() == false) {
|
||||
continue;
|
||||
}
|
||||
changed |= replicaShardAllocator.ignoreUnassignedIfDelayed(timeSource == null ? System.nanoTime() : timeSource.apply(shard),
|
||||
allocation, unassignedIterator, shard);
|
||||
}
|
||||
return changed;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.test.rest;
|
||||
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* Matches blacklist patterns.
|
||||
*
|
||||
* Currently the following syntax is supported:
|
||||
*
|
||||
* <ul>
|
||||
* <li>Exact matches, as in <code>cat.aliases/10_basic/Empty cluster</code></li>
|
||||
* <li>Wildcard matches within the same segment of a path , as in <code>indices.get/10_basic/*allow_no_indices*</code>. This will
|
||||
* match <code>indices.get/10_basic/allow_no_indices</code>, <code>indices.get/10_basic/allow_no_indices_at_all</code> but not
|
||||
* <code>indices.get/10_basic/advanced/allow_no_indices</code> (contains an additional segment)</li>
|
||||
* </ul>
|
||||
*
|
||||
* Each blacklist pattern is a suffix match on the path. Empty patterns are not allowed.
|
||||
*/
|
||||
final class BlacklistedPathPatternMatcher {
|
||||
private final Pattern pattern;
|
||||
|
||||
/**
|
||||
* Constructs a new <code>BlacklistedPathPatternMatcher</code> instance from the provided suffix pattern.
|
||||
*
|
||||
* @param p The suffix pattern. Must be a non-empty string.
|
||||
*/
|
||||
BlacklistedPathPatternMatcher(String p) {
|
||||
// guard against accidentally matching everything as an empty string lead to the pattern ".*" which matches everything
|
||||
if (p == null || p.trim().isEmpty()) {
|
||||
throw new IllegalArgumentException("Empty blacklist patterns are not supported");
|
||||
}
|
||||
// very simple transformation from wildcard to a proper regex
|
||||
String finalPattern = p
|
||||
.replaceAll("\\*", "[^/]*") // support wildcard matches (within a single path segment)
|
||||
.replaceAll("\\\\,", ","); // restore previously escaped ',' in paths.
|
||||
|
||||
// suffix match
|
||||
pattern = Pattern.compile(".*" + finalPattern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether the provided path matches the suffix pattern, i.e. "/foo/bar" will match the pattern "bar".
|
||||
*
|
||||
* @param path The path to match. Must not be null.
|
||||
* @return true iff this path is a suffix match.
|
||||
*/
|
||||
public boolean isSuffixMatch(String path) {
|
||||
return pattern.matcher(path).matches();
|
||||
}
|
||||
}
|
|
@ -29,7 +29,6 @@ import org.apache.lucene.util.LuceneTestCase.SuppressFsync;
|
|||
import org.apache.lucene.util.TimeUnits;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.node.Node;
|
||||
|
@ -65,7 +64,6 @@ import java.nio.file.FileSystem;
|
|||
import java.nio.file.FileSystems;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.PathMatcher;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
|
@ -123,9 +121,18 @@ public abstract class ESRestTestCase extends ESIntegTestCase {
|
|||
private static final String DEFAULT_TESTS_PATH = "/rest-api-spec/test";
|
||||
private static final String DEFAULT_SPEC_PATH = "/rest-api-spec/api";
|
||||
|
||||
private static final String PATHS_SEPARATOR = ",";
|
||||
/**
|
||||
* This separator pattern matches ',' except it is preceded by a '\'. This allows us to support ',' within paths when it is escaped with
|
||||
* a slash.
|
||||
*
|
||||
* For example, the path string "/a/b/c\,d/e/f,/foo/bar,/baz" is separated to "/a/b/c\,d/e/f", "/foo/bar" and "/baz".
|
||||
*
|
||||
* For reference, this regular expression feature is known as zero-width negative look-behind.
|
||||
*
|
||||
*/
|
||||
private static final String PATHS_SEPARATOR = "(?<!\\\\),";
|
||||
|
||||
private final PathMatcher[] blacklistPathMatchers;
|
||||
private final List<BlacklistedPathPatternMatcher> blacklistPathMatchers = new ArrayList<>();
|
||||
private static RestTestExecutionContext restTestExecutionContext;
|
||||
|
||||
private final RestTestCandidate testCandidate;
|
||||
|
@ -133,14 +140,8 @@ public abstract class ESRestTestCase extends ESIntegTestCase {
|
|||
public ESRestTestCase(RestTestCandidate testCandidate) {
|
||||
this.testCandidate = testCandidate;
|
||||
String[] blacklist = resolvePathsProperty(REST_TESTS_BLACKLIST, null);
|
||||
if (blacklist != null) {
|
||||
blacklistPathMatchers = new PathMatcher[blacklist.length];
|
||||
int i = 0;
|
||||
for (String glob : blacklist) {
|
||||
blacklistPathMatchers[i++] = PathUtils.getDefaultFileSystem().getPathMatcher("glob:" + glob);
|
||||
}
|
||||
} else {
|
||||
blacklistPathMatchers = new PathMatcher[0];
|
||||
for (String entry : blacklist) {
|
||||
this.blacklistPathMatchers.add(new BlacklistedPathPatternMatcher(entry));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -226,7 +227,7 @@ public abstract class ESRestTestCase extends ESIntegTestCase {
|
|||
private static String[] resolvePathsProperty(String propertyName, String defaultValue) {
|
||||
String property = System.getProperty(propertyName);
|
||||
if (!Strings.hasLength(property)) {
|
||||
return defaultValue == null ? null : new String[]{defaultValue};
|
||||
return defaultValue == null ? Strings.EMPTY_ARRAY : new String[]{defaultValue};
|
||||
} else {
|
||||
return property.split(PATHS_SEPARATOR);
|
||||
}
|
||||
|
@ -324,11 +325,9 @@ public abstract class ESRestTestCase extends ESIntegTestCase {
|
|||
@Before
|
||||
public void reset() throws IOException, RestException {
|
||||
//skip test if it matches one of the blacklist globs
|
||||
for (PathMatcher blacklistedPathMatcher : blacklistPathMatchers) {
|
||||
//we need to replace a few characters otherwise the test section name can't be parsed as a path on windows
|
||||
String testSection = testCandidate.getTestSection().getName().replace("*", "").replace("\\", "/").replaceAll("\\s+/", "/").replace(":", "").trim();
|
||||
String testPath = testCandidate.getSuitePath() + "/" + testSection;
|
||||
assumeFalse("[" + testCandidate.getTestPath() + "] skipped, reason: blacklisted", blacklistedPathMatcher.matches(PathUtils.get(testPath)));
|
||||
for (BlacklistedPathPatternMatcher blacklistedPathMatcher : blacklistPathMatchers) {
|
||||
String testPath = testCandidate.getSuitePath() + "/" + testCandidate.getTestSection().getName();
|
||||
assumeFalse("[" + testCandidate.getTestPath() + "] skipped, reason: blacklisted", blacklistedPathMatcher.isSuffixMatch(testPath));
|
||||
}
|
||||
//The client needs non static info to get initialized, therefore it can't be initialized in the before class
|
||||
restTestExecutionContext.initClient(cluster().httpAddresses(), restClientSettings());
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.test.rest;
|
||||
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
public class BlacklistedPathPatternMatcherTests extends ESTestCase {
|
||||
|
||||
public void testMatchesExact() {
|
||||
// suffix match
|
||||
assertMatch("cat.aliases/10_basic/Empty cluster", "/some/suite_path/cat.aliases/10_basic/Empty cluster");
|
||||
// exact match
|
||||
assertMatch("cat.aliases/10_basic/Empty cluster", "cat.aliases/10_basic/Empty cluster");
|
||||
// additional text at the end should not match
|
||||
assertNoMatch("cat.aliases/10_basic/Empty cluster", "cat.aliases/10_basic/Empty clusters in here");
|
||||
}
|
||||
|
||||
public void testMatchesSimpleWildcardPatterns() {
|
||||
assertMatch("termvector/20_issue7121/*", "/suite/termvector/20_issue7121/test_first");
|
||||
assertMatch("termvector/20_issue7121/*", "/suite/termvector/20_issue7121/");
|
||||
// do not cross segment boundaries
|
||||
assertNoMatch("termvector/20_issue7121/*", "/suite/termvector/20_issue7121/test/first");
|
||||
}
|
||||
|
||||
public void testMatchesMultiWildcardPatterns() {
|
||||
assertMatch("indices.get/10_basic/*allow_no_indices*", "/suite/indices.get/10_basic/we_allow_no_indices");
|
||||
assertMatch("indices.get/10_basic/*allow_no_indices*", "/suite/indices.get/10_basic/we_allow_no_indices_at_all");
|
||||
assertNoMatch("indices.get/10_basic/*allow_no_indices*", "/suite/indices.get/10_basic/we_allow_no_indices_at_all/here");
|
||||
assertMatch("indices.get/*/*allow_no_indices*", "/suite/indices.get/10_basic/we_allow_no_indices_at_all");
|
||||
assertMatch("indices.get/*/*allow_no_indices*", "/suite/indices.get/20_basic/we_allow_no_indices_at_all");
|
||||
assertMatch("*/*/*allow_no_indices*", "/suite/path/to/test/indices.get/20_basic/we_allow_no_indices_at_all");
|
||||
}
|
||||
|
||||
public void testMatchesPatternsWithEscapedCommas() {
|
||||
assertMatch("indices.get/10_basic\\,20_advanced/foo", "/suite/indices.get/10_basic,20_advanced/foo");
|
||||
}
|
||||
|
||||
public void testMatchesMixedPatterns() {
|
||||
assertMatch("indices.get/*/10_basic\\,20_advanced/*foo*", "/suite/indices.get/all/10_basic,20_advanced/foo");
|
||||
assertMatch("indices.get/*/10_basic\\,20_advanced/*foo*", "/suite/indices.get/all/10_basic,20_advanced/my_foo");
|
||||
assertMatch("indices.get/*/10_basic\\,20_advanced/*foo*", "/suite/indices.get/all/10_basic,20_advanced/foo_bar");
|
||||
}
|
||||
|
||||
|
||||
|
||||
private void assertMatch(String pattern, String path) {
|
||||
BlacklistedPathPatternMatcher matcher = new BlacklistedPathPatternMatcher(pattern);
|
||||
assertTrue("Pattern [" + pattern + "] should have matched path [" + path + "]", matcher.isSuffixMatch(path));
|
||||
}
|
||||
|
||||
private void assertNoMatch(String pattern, String path) {
|
||||
BlacklistedPathPatternMatcher matcher = new BlacklistedPathPatternMatcher(pattern);
|
||||
assertFalse("Pattern [" + pattern + "] should not have matched path [" + path + "]", matcher.isSuffixMatch(path));
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue