Merge branch 'master' into feature/seq_no
This commit is contained in:
commit
04be4e4905
20
build.gradle
20
build.gradle
|
@ -47,7 +47,7 @@ subprojects {
|
|||
}
|
||||
}
|
||||
extraArchive {
|
||||
javadoc = false
|
||||
javadoc = true
|
||||
tests = false
|
||||
}
|
||||
// we have our own username/password prompts so that they only happen once
|
||||
|
@ -87,8 +87,8 @@ allprojects {
|
|||
}
|
||||
|
||||
subprojects {
|
||||
// include license and notice in jars
|
||||
gradle.projectsEvaluated {
|
||||
project.afterEvaluate {
|
||||
// include license and notice in jars
|
||||
tasks.withType(Jar) {
|
||||
into('META-INF') {
|
||||
from project.rootProject.rootDir
|
||||
|
@ -96,6 +96,15 @@ subprojects {
|
|||
include 'NOTICE.txt'
|
||||
}
|
||||
}
|
||||
// ignore missing javadocs
|
||||
tasks.withType(Javadoc) { Javadoc javadoc ->
|
||||
// the -quiet here is because of a bug in gradle, in that adding a string option
|
||||
// by itself is not added to the options. By adding quiet, both this option and
|
||||
// the "value" -quiet is added, separated by a space. This is ok since the javadoc
|
||||
// command already adds -quiet, so we are just duplicating it
|
||||
// see https://discuss.gradle.org/t/add-custom-javadoc-option-that-does-not-take-an-argument/5959
|
||||
javadoc.options.addStringOption('Xdoclint:all,-missing', '-quiet')
|
||||
}
|
||||
}
|
||||
|
||||
configurations {
|
||||
|
@ -163,11 +172,6 @@ task buildSrcEclipse(type: GradleBuild) {
|
|||
}
|
||||
tasks.eclipse.dependsOn(buildSrcEclipse)
|
||||
|
||||
task clean(type: GradleBuild) {
|
||||
buildFile = 'buildSrc/build.gradle'
|
||||
tasks = ['clean']
|
||||
}
|
||||
|
||||
// we need to add the same --debug-jvm option as
|
||||
// the real RunTask has, so we can pass it through
|
||||
class Run extends DefaultTask {
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
*/
|
||||
package org.elasticsearch.gradle
|
||||
|
||||
import org.gradle.process.ExecResult
|
||||
|
||||
import java.time.ZonedDateTime
|
||||
import java.time.ZoneOffset
|
||||
|
||||
|
@ -63,18 +65,35 @@ class BuildPlugin implements Plugin<Project> {
|
|||
PrecommitTasks.configure(project)
|
||||
}
|
||||
|
||||
/** Performs checks on the build environment and prints information about the build environment. */
|
||||
static void globalBuildInfo(Project project) {
|
||||
if (project.rootProject.ext.has('buildChecksDone') == false) {
|
||||
String javaHome = System.getenv('JAVA_HOME')
|
||||
String javaHome = findJavaHome()
|
||||
File gradleJavaHome = Jvm.current().javaHome
|
||||
String gradleJavaVersionDetails = "${System.getProperty('java.vendor')} ${System.getProperty('java.version')}" +
|
||||
" [${System.getProperty('java.vm.name')} ${System.getProperty('java.vm.version')}]"
|
||||
|
||||
String javaVersionDetails = gradleJavaVersionDetails
|
||||
String javaVersion = System.getProperty('java.version')
|
||||
JavaVersion javaVersionEnum = JavaVersion.current()
|
||||
if (new File(javaHome).canonicalPath != gradleJavaHome.canonicalPath) {
|
||||
javaVersionDetails = findJavaVersionDetails(project, javaHome)
|
||||
javaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(project, javaHome))
|
||||
javaVersion = findJavaVersion(project, javaHome)
|
||||
}
|
||||
|
||||
// Build debugging info
|
||||
println '======================================='
|
||||
println 'Elasticsearch Build Hamster says Hello!'
|
||||
println '======================================='
|
||||
println " Gradle Version : ${project.gradle.gradleVersion}"
|
||||
println " JDK Version : ${System.getProperty('java.runtime.version')} (${System.getProperty('java.vendor')})"
|
||||
println " JAVA_HOME : ${javaHome == null ? 'not set' : javaHome}"
|
||||
println " OS Info : ${System.getProperty('os.name')} ${System.getProperty('os.version')} (${System.getProperty('os.arch')})"
|
||||
println " Gradle Version : ${project.gradle.gradleVersion}"
|
||||
println " OS Info : ${System.getProperty('os.name')} ${System.getProperty('os.version')} (${System.getProperty('os.arch')})"
|
||||
if (gradleJavaVersionDetails != javaVersionDetails) {
|
||||
println " JDK Version (gradle) : ${gradleJavaVersionDetails}"
|
||||
println " JDK Version (compile) : ${javaVersionDetails}"
|
||||
} else {
|
||||
println " JDK Version : ${gradleJavaVersionDetails}"
|
||||
}
|
||||
|
||||
// enforce gradle version
|
||||
GradleVersion minGradle = GradleVersion.version('2.8')
|
||||
|
@ -83,31 +102,74 @@ class BuildPlugin implements Plugin<Project> {
|
|||
}
|
||||
|
||||
// enforce Java version
|
||||
if (JavaVersion.current() < minimumJava) {
|
||||
if (javaVersionEnum < minimumJava) {
|
||||
throw new GradleException("Java ${minimumJava} or above is required to build Elasticsearch")
|
||||
}
|
||||
|
||||
// find java home so eg tests can use it to set java to run with
|
||||
if (javaHome == null) {
|
||||
if (System.getProperty("idea.active") != null) {
|
||||
// intellij doesn't set JAVA_HOME, so we use the jdk gradle was run with
|
||||
javaHome = Jvm.current().javaHome
|
||||
} else {
|
||||
throw new GradleException('JAVA_HOME must be set to build Elasticsearch')
|
||||
}
|
||||
}
|
||||
project.rootProject.ext.javaHome = javaHome
|
||||
project.rootProject.ext.javaVersion = javaVersion
|
||||
project.rootProject.ext.buildChecksDone = true
|
||||
}
|
||||
project.targetCompatibility = minimumJava
|
||||
project.sourceCompatibility = minimumJava
|
||||
// set java home for each project, so they dont have to find it in the root project
|
||||
project.ext.javaHome = project.rootProject.ext.javaHome
|
||||
project.ext.javaVersion = project.rootProject.ext.javaVersion
|
||||
}
|
||||
|
||||
/** Return the name
|
||||
*/
|
||||
static String transitiveDepConfigName(String groupId, String artifactId, String version) {
|
||||
/** Finds and enforces JAVA_HOME is set */
|
||||
private static String findJavaHome() {
|
||||
String javaHome = System.getenv('JAVA_HOME')
|
||||
if (javaHome == null) {
|
||||
if (System.getProperty("idea.active") != null) {
|
||||
// intellij doesn't set JAVA_HOME, so we use the jdk gradle was run with
|
||||
javaHome = Jvm.current().javaHome
|
||||
} else {
|
||||
throw new GradleException('JAVA_HOME must be set to build Elasticsearch')
|
||||
}
|
||||
}
|
||||
return javaHome
|
||||
}
|
||||
|
||||
/** Finds printable java version of the given JAVA_HOME */
|
||||
private static String findJavaVersionDetails(Project project, String javaHome) {
|
||||
String versionInfoScript = 'print(' +
|
||||
'java.lang.System.getProperty("java.vendor") + " " + java.lang.System.getProperty("java.version") + ' +
|
||||
'" [" + java.lang.System.getProperty("java.vm.name") + " " + java.lang.System.getProperty("java.vm.version") + "]");'
|
||||
return runJavascript(project, javaHome, versionInfoScript).trim()
|
||||
}
|
||||
|
||||
/** Finds the parsable java specification version */
|
||||
private static String findJavaSpecificationVersion(Project project, String javaHome) {
|
||||
String versionScript = 'print(java.lang.System.getProperty("java.specification.version"));'
|
||||
return runJavascript(project, javaHome, versionScript)
|
||||
}
|
||||
|
||||
/** Finds the parsable java specification version */
|
||||
private static String findJavaVersion(Project project, String javaHome) {
|
||||
String versionScript = 'print(java.lang.System.getProperty("java.version"));'
|
||||
return runJavascript(project, javaHome, versionScript)
|
||||
}
|
||||
|
||||
/** Runs the given javascript using jjs from the jdk, and returns the output */
|
||||
private static String runJavascript(Project project, String javaHome, String script) {
|
||||
File tmpScript = File.createTempFile('es-gradle-tmp', '.js')
|
||||
tmpScript.setText(script, 'UTF-8')
|
||||
ByteArrayOutputStream output = new ByteArrayOutputStream()
|
||||
ExecResult result = project.exec {
|
||||
executable = new File(javaHome, 'bin/jjs')
|
||||
args tmpScript.toString()
|
||||
standardOutput = output
|
||||
errorOutput = new ByteArrayOutputStream()
|
||||
ignoreExitValue = true // we do not fail so we can first cleanup the tmp file
|
||||
}
|
||||
java.nio.file.Files.delete(tmpScript.toPath())
|
||||
result.assertNormalExitValue()
|
||||
return output.toString('UTF-8').trim()
|
||||
}
|
||||
|
||||
/** Return the configuration name used for finding transitive deps of the given dependency. */
|
||||
private static String transitiveDepConfigName(String groupId, String artifactId, String version) {
|
||||
return "_transitive_${groupId}:${artifactId}:${version}"
|
||||
}
|
||||
|
||||
|
@ -224,7 +286,9 @@ class BuildPlugin implements Plugin<Project> {
|
|||
project.afterEvaluate {
|
||||
// fail on all javac warnings
|
||||
project.tasks.withType(JavaCompile) {
|
||||
options.compilerArgs << '-Werror' << '-Xlint:all' << '-Xdoclint:all/private' << '-Xdoclint:-missing'
|
||||
options.fork = true
|
||||
options.forkOptions.executable = new File(project.javaHome, 'bin/javac')
|
||||
options.compilerArgs << '-Werror' << '-Xlint:all' << '-Xdoclint:all' << '-Xdoclint:-missing'
|
||||
options.encoding = 'UTF-8'
|
||||
}
|
||||
}
|
||||
|
@ -239,7 +303,8 @@ class BuildPlugin implements Plugin<Project> {
|
|||
jarTask.manifest.attributes(
|
||||
'X-Compile-Elasticsearch-Version': VersionProperties.elasticsearch,
|
||||
'X-Compile-Lucene-Version': VersionProperties.lucene,
|
||||
'Build-Date': ZonedDateTime.now(ZoneOffset.UTC))
|
||||
'Build-Date': ZonedDateTime.now(ZoneOffset.UTC),
|
||||
'Build-Java-Version': project.javaVersion)
|
||||
if (jarTask.manifest.attributes.containsKey('Change') == false) {
|
||||
logger.warn('Building without git revision id.')
|
||||
jarTask.manifest.attributes('Change': 'N/A')
|
||||
|
|
|
@ -18,9 +18,12 @@
|
|||
*/
|
||||
package org.elasticsearch.gradle.precommit
|
||||
|
||||
import org.gradle.api.GradleException
|
||||
import org.gradle.api.Project
|
||||
import org.gradle.api.Task
|
||||
import org.gradle.api.file.FileCollection
|
||||
import org.gradle.api.plugins.JavaBasePlugin
|
||||
import org.gradle.api.tasks.Exec
|
||||
import org.gradle.api.tasks.TaskContainer
|
||||
|
||||
/**
|
||||
|
@ -32,7 +35,8 @@ class PrecommitTasks {
|
|||
static void configure(Project project) {
|
||||
List precommitTasks = [
|
||||
configureForbiddenApis(project),
|
||||
configureForbiddenPatterns(project.tasks)]
|
||||
configureForbiddenPatterns(project.tasks),
|
||||
configureJarHell(project)]
|
||||
|
||||
Map precommitOptions = [
|
||||
name: 'precommit',
|
||||
|
@ -90,4 +94,40 @@ class PrecommitTasks {
|
|||
rule name: 'tab', pattern: /\t/
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a task to run jar hell before on the test classpath.
|
||||
*
|
||||
* We use a simple "marker" file that we touch when the task succeeds
|
||||
* as the task output. This is compared against the modified time of the
|
||||
* inputs (ie the jars/class files).
|
||||
*/
|
||||
static Task configureJarHell(Project project) {
|
||||
File successMarker = new File(project.buildDir, 'markers/jarHell')
|
||||
Exec task = project.tasks.create(name: 'jarHell', type: Exec)
|
||||
FileCollection testClasspath = project.sourceSets.test.runtimeClasspath
|
||||
task.dependsOn(testClasspath)
|
||||
task.inputs.files(testClasspath)
|
||||
task.outputs.file(successMarker)
|
||||
task.executable = new File(project.javaHome, 'bin/java')
|
||||
task.doFirst({
|
||||
task.args('-cp', testClasspath.asPath, 'org.elasticsearch.bootstrap.JarHell')
|
||||
})
|
||||
if (task.logger.isInfoEnabled() == false) {
|
||||
task.standardOutput = new ByteArrayOutputStream()
|
||||
task.errorOutput = task.standardOutput
|
||||
task.ignoreExitValue = true
|
||||
task.doLast({
|
||||
if (execResult.exitValue != 0) {
|
||||
logger.error(standardOutput.toString())
|
||||
throw new GradleException("JarHell failed")
|
||||
}
|
||||
})
|
||||
}
|
||||
task.doLast({
|
||||
successMarker.parentFile.mkdirs()
|
||||
successMarker.setText("", 'UTF-8')
|
||||
})
|
||||
return task
|
||||
}
|
||||
}
|
||||
|
|
|
@ -213,7 +213,7 @@ class ClusterFormationTasks {
|
|||
static Task configureStartTask(String name, Project project, Task setup, File cwd, ClusterConfiguration config, String clusterName, File pidFile, File home) {
|
||||
Map esEnv = [
|
||||
'JAVA_HOME' : project.javaHome,
|
||||
'JAVA_OPTS': config.jvmArgs
|
||||
'ES_GC_OPTS': config.jvmArgs // we pass these with the undocumented gc opts so the argline can set gc, etc
|
||||
]
|
||||
List<String> esProps = config.systemProperties.collect { key, value -> "-D${key}=${value}" }
|
||||
for (Map.Entry<String, String> property : System.properties.entrySet()) {
|
||||
|
@ -242,7 +242,7 @@ class ClusterFormationTasks {
|
|||
// gradle task options are not processed until the end of the configuration phase
|
||||
if (config.debug) {
|
||||
println 'Running elasticsearch in debug mode, suspending until connected on port 8000'
|
||||
esEnv['JAVA_OPTS'] += ' -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000'
|
||||
esEnv['JAVA_OPTS'] = '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000'
|
||||
}
|
||||
|
||||
// Due to how ant exec works with the spawn option, we lose all stdout/stderr from the
|
||||
|
@ -309,10 +309,10 @@ class ClusterFormationTasks {
|
|||
if (ant.properties.containsKey("failed${name}".toString()) || failedMarker.exists()) {
|
||||
if (logger.isInfoEnabled() == false) {
|
||||
// We already log the command at info level. No need to do it twice.
|
||||
logger.error(esCommandString)
|
||||
esCommandString.eachLine { line -> logger.error(line) }
|
||||
}
|
||||
// the waitfor failed, so dump any output we got (may be empty if info logging, but that is ok)
|
||||
logger.error(buffer.toString('UTF-8'))
|
||||
buffer.toString('UTF-8').eachLine { line -> logger.error(line) }
|
||||
// also dump the log file for the startup script (which will include ES logging output to stdout)
|
||||
File startLog = new File(cwd, 'run.log')
|
||||
if (startLog.exists()) {
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* An {@ClusterInfoRequest} that fetches {@link org.elasticsearch.search.warmer.IndexWarmersMetaData} for
|
||||
* A {@link ClusterInfoRequest} that fetches {@link org.elasticsearch.search.warmer.IndexWarmersMetaData} for
|
||||
* a list or all existing index warmers in the cluster-state
|
||||
*/
|
||||
public class GetWarmersRequest extends ClusterInfoRequest<GetWarmersRequest> {
|
||||
|
|
|
@ -123,7 +123,7 @@ public interface ClusterService extends LifecycleComponent<ClusterService> {
|
|||
/**
|
||||
* Returns the maximum wait time for tasks in the queue
|
||||
*
|
||||
* @returns A zero time value if the queue is empty, otherwise the time value oldest task waiting in the queue
|
||||
* @return A zero time value if the queue is empty, otherwise the time value oldest task waiting in the queue
|
||||
*/
|
||||
TimeValue getMaxTaskWaitTime();
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ public class DiskUsage {
|
|||
final long freeBytes;
|
||||
|
||||
/**
|
||||
* Create a new DiskUsage, if {@code totalBytes} is 0, {@get getFreeDiskAsPercentage}
|
||||
* Create a new DiskUsage, if {@code totalBytes} is 0, {@link #getFreeDiskAsPercentage()}
|
||||
* will always return 100.0% free
|
||||
*/
|
||||
public DiskUsage(String nodeId, String nodeName, String path, long totalBytes, long freeBytes) {
|
||||
|
|
|
@ -23,7 +23,6 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
||||
import com.spatial4j.core.shape.ShapeCollection;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import com.spatial4j.core.shape.Shape;
|
||||
|
@ -34,11 +33,7 @@ import com.vividsolutions.jts.geom.LineString;
|
|||
|
||||
public abstract class BaseLineStringBuilder<E extends BaseLineStringBuilder<E>> extends PointCollection<E> {
|
||||
|
||||
protected BaseLineStringBuilder() {
|
||||
this(new ArrayList<Coordinate>());
|
||||
}
|
||||
|
||||
protected BaseLineStringBuilder(ArrayList<Coordinate> points) {
|
||||
public BaseLineStringBuilder(ArrayList<Coordinate> points) {
|
||||
super(points);
|
||||
}
|
||||
|
||||
|
@ -78,15 +73,15 @@ public abstract class BaseLineStringBuilder<E extends BaseLineStringBuilder<E>>
|
|||
|
||||
/**
|
||||
* Decompose a linestring given as array of coordinates at a vertical line.
|
||||
*
|
||||
*
|
||||
* @param dateline x-axis intercept of the vertical line
|
||||
* @param coordinates coordinates forming the linestring
|
||||
* @return array of linestrings given as coordinate arrays
|
||||
* @return array of linestrings given as coordinate arrays
|
||||
*/
|
||||
protected static Coordinate[][] decompose(double dateline, Coordinate[] coordinates) {
|
||||
int offset = 0;
|
||||
ArrayList<Coordinate[]> parts = new ArrayList<>();
|
||||
|
||||
|
||||
double shift = coordinates[0].x > DATELINE ? DATELINE : (coordinates[0].x < -DATELINE ? -DATELINE : 0);
|
||||
|
||||
for (int i = 1; i < coordinates.length; i++) {
|
||||
|
|
|
@ -42,8 +42,8 @@ import java.util.Iterator;
|
|||
* The {@link BasePolygonBuilder} implements the groundwork to create polygons. This contains
|
||||
* Methods to wrap polygons at the dateline and building shapes from the data held by the
|
||||
* builder.
|
||||
* Since this Builder can be embedded to other builders (i.e. {@link MultiPolygonBuilder})
|
||||
* the class of the embedding builder is given by the generic argument <code>E</code>
|
||||
* Since this Builder can be embedded to other builders (i.e. {@link MultiPolygonBuilder})
|
||||
* the class of the embedding builder is given by the generic argument <code>E</code>
|
||||
|
||||
* @param <E> type of the embedding class
|
||||
*/
|
||||
|
@ -51,11 +51,11 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.POLYGON;
|
||||
|
||||
// Linear ring defining the shell of the polygon
|
||||
protected Ring<E> shell;
|
||||
// line string defining the shell of the polygon
|
||||
protected LineStringBuilder shell;
|
||||
|
||||
// List of linear rings defining the holes of the polygon
|
||||
protected final ArrayList<BaseLineStringBuilder<?>> holes = new ArrayList<>();
|
||||
// List of line strings defining the holes of the polygon
|
||||
protected final ArrayList<LineStringBuilder> holes = new ArrayList<>();
|
||||
|
||||
public BasePolygonBuilder(Orientation orientation) {
|
||||
super(orientation);
|
||||
|
@ -65,7 +65,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
private E thisRef() {
|
||||
return (E)this;
|
||||
}
|
||||
|
||||
|
||||
public E point(double longitude, double latitude) {
|
||||
shell.point(longitude, latitude);
|
||||
return thisRef();
|
||||
|
@ -96,27 +96,17 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
* @param hole linear ring defining the hole
|
||||
* @return this
|
||||
*/
|
||||
public E hole(BaseLineStringBuilder<?> hole) {
|
||||
public E hole(LineStringBuilder hole) {
|
||||
holes.add(hole);
|
||||
return thisRef();
|
||||
}
|
||||
|
||||
/**
|
||||
* build new hole to the polygon
|
||||
* @return this
|
||||
*/
|
||||
public Ring<E> hole() {
|
||||
Ring<E> hole = new Ring<>(thisRef());
|
||||
this.holes.add(hole);
|
||||
return hole;
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the shell of the polygon
|
||||
* @return parent
|
||||
*/
|
||||
public ShapeBuilder close() {
|
||||
return shell.close();
|
||||
public BasePolygonBuilder close() {
|
||||
shell.close();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -138,11 +128,11 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
* within the polygon.
|
||||
* This Method also wraps the polygons at the dateline. In order to this fact the result may
|
||||
* contains more polygons and less holes than defined in the builder it self.
|
||||
*
|
||||
*
|
||||
* @return coordinates of the polygon
|
||||
*/
|
||||
public Coordinate[][][] coordinates() {
|
||||
int numEdges = shell.points.size()-1; // Last point is repeated
|
||||
int numEdges = shell.points.size()-1; // Last point is repeated
|
||||
for (int i = 0; i < holes.size(); i++) {
|
||||
numEdges += holes.get(i).points.size()-1;
|
||||
validateHole(shell, this.holes.get(i));
|
||||
|
@ -172,12 +162,12 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
|
||||
protected XContentBuilder coordinatesArray(XContentBuilder builder, Params params) throws IOException {
|
||||
shell.coordinatesToXcontent(builder, true);
|
||||
for(BaseLineStringBuilder<?> hole : holes) {
|
||||
for(BaseLineStringBuilder hole : holes) {
|
||||
hole.coordinatesToXcontent(builder, true);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
|
@ -188,7 +178,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
||||
public Geometry buildGeometry(GeometryFactory factory, boolean fixDateline) {
|
||||
if(fixDateline) {
|
||||
Coordinate[][][] polygons = coordinates();
|
||||
|
@ -207,7 +197,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
protected Polygon toPolygon(GeometryFactory factory) {
|
||||
final LinearRing shell = linearRing(factory, this.shell.points);
|
||||
final LinearRing[] holes = new LinearRing[this.holes.size()];
|
||||
Iterator<BaseLineStringBuilder<?>> iterator = this.holes.iterator();
|
||||
Iterator<LineStringBuilder> iterator = this.holes.iterator();
|
||||
for (int i = 0; iterator.hasNext(); i++) {
|
||||
holes[i] = linearRing(factory, iterator.next().points);
|
||||
}
|
||||
|
@ -226,7 +216,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
protected static Polygon polygon(GeometryFactory factory, Coordinate[][] polygon) {
|
||||
LinearRing shell = factory.createLinearRing(polygon[0]);
|
||||
LinearRing[] holes;
|
||||
|
||||
|
||||
if(polygon.length > 1) {
|
||||
holes = new LinearRing[polygon.length-1];
|
||||
for (int i = 0; i < holes.length; i++) {
|
||||
|
@ -243,7 +233,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
* in turn contains an array of linestrings. These line Strings are represented as an array of
|
||||
* coordinates. The first linestring will be the shell of the polygon the others define holes
|
||||
* within the polygon.
|
||||
*
|
||||
*
|
||||
* @param factory {@link GeometryFactory} to use
|
||||
* @param polygons definition of polygons
|
||||
* @return a new Multipolygon
|
||||
|
@ -258,19 +248,19 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
|
||||
/**
|
||||
* This method sets the component id of all edges in a ring to a given id and shifts the
|
||||
* coordinates of this component according to the dateline
|
||||
*
|
||||
* coordinates of this component according to the dateline
|
||||
*
|
||||
* @param edge An arbitrary edge of the component
|
||||
* @param id id to apply to the component
|
||||
* @param edges a list of edges to which all edges of the component will be added (could be <code>null</code>)
|
||||
* @return number of edges that belong to this component
|
||||
*/
|
||||
private static int component(final Edge edge, final int id, final ArrayList<Edge> edges) {
|
||||
// find a coordinate that is not part of the dateline
|
||||
// find a coordinate that is not part of the dateline
|
||||
Edge any = edge;
|
||||
while(any.coordinate.x == +DATELINE || any.coordinate.x == -DATELINE) {
|
||||
if((any = any.next) == edge) {
|
||||
break;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -362,7 +352,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
private static final Coordinate[][] EMPTY = new Coordinate[0][];
|
||||
|
||||
|
@ -378,7 +368,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
}
|
||||
|
||||
return points;
|
||||
}
|
||||
}
|
||||
|
||||
private static Edge[] edges(Edge[] edges, int numHoles, ArrayList<ArrayList<Coordinate[]>> components) {
|
||||
ArrayList<Edge> mainEdges = new ArrayList<>(edges.length);
|
||||
|
@ -412,7 +402,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
}
|
||||
for (int i = 0; i < numHoles; i++) {
|
||||
final Edge current = new Edge(holes[i].coordinate, holes[i].next);
|
||||
// the edge intersects with itself at its own coordinate. We need intersect to be set this way so the binary search
|
||||
// the edge intersects with itself at its own coordinate. We need intersect to be set this way so the binary search
|
||||
// will get the correct position in the edge list and therefore the correct component to add the hole
|
||||
current.intersect = current.coordinate;
|
||||
final int intersections = intersections(current.coordinate.x, edges);
|
||||
|
@ -457,20 +447,20 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
holes[e2.component-1] = holes[numHoles];
|
||||
holes[numHoles] = null;
|
||||
}
|
||||
// only connect edges if intersections are pairwise
|
||||
// only connect edges if intersections are pairwise
|
||||
// 1. per the comment above, the edge array is sorted by y-value of the intersection
|
||||
// with the dateline. Two edges have the same y intercept when they cross the
|
||||
// with the dateline. Two edges have the same y intercept when they cross the
|
||||
// dateline thus they appear sequentially (pairwise) in the edge array. Two edges
|
||||
// do not have the same y intercept when we're forming a multi-poly from a poly
|
||||
// that wraps the dateline (but there are 2 ordered intercepts).
|
||||
// The connect method creates a new edge for these paired edges in the linked list.
|
||||
// For boundary conditions (e.g., intersect but not crossing) there is no sibling edge
|
||||
// that wraps the dateline (but there are 2 ordered intercepts).
|
||||
// The connect method creates a new edge for these paired edges in the linked list.
|
||||
// For boundary conditions (e.g., intersect but not crossing) there is no sibling edge
|
||||
// to connect. Thus the first logic check enforces the pairwise rule
|
||||
// 2. the second logic check ensures the two candidate edges aren't already connected by an
|
||||
// existing edge along the dateline - this is necessary due to a logic change in
|
||||
// ShapeBuilder.intersection that computes dateline edges as valid intersect points
|
||||
// ShapeBuilder.intersection that computes dateline edges as valid intersect points
|
||||
// in support of OGC standards
|
||||
if (e1.intersect != Edge.MAX_COORDINATE && e2.intersect != Edge.MAX_COORDINATE
|
||||
if (e1.intersect != Edge.MAX_COORDINATE && e2.intersect != Edge.MAX_COORDINATE
|
||||
&& !(e1.next.next.coordinate.equals3D(e2.coordinate) && Math.abs(e1.next.coordinate.x) == DATELINE
|
||||
&& Math.abs(e2.coordinate.x) == DATELINE) ) {
|
||||
connect(e1, e2);
|
||||
|
@ -489,7 +479,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
// NOTE: the order of the object creation is crucial here! Don't change it!
|
||||
// first edge has no point on dateline
|
||||
Edge e1 = new Edge(in.intersect, in.next);
|
||||
|
||||
|
||||
if(out.intersect != out.next.coordinate) {
|
||||
// second edge has no point on dateline
|
||||
Edge e2 = new Edge(out.intersect, out.next);
|
||||
|
@ -507,7 +497,7 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
// second edge has no point on dateline
|
||||
Edge e1 = new Edge(out.intersect, out.next);
|
||||
in.next = new Edge(in.intersect, e1, in.intersect);
|
||||
|
||||
|
||||
} else {
|
||||
// second edge intersects with dateline
|
||||
in.next = new Edge(in.intersect, out.next, in.intersect);
|
||||
|
@ -516,8 +506,8 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
}
|
||||
}
|
||||
|
||||
private static int createEdges(int component, Orientation orientation, BaseLineStringBuilder<?> shell,
|
||||
BaseLineStringBuilder<?> hole,
|
||||
private static int createEdges(int component, Orientation orientation, BaseLineStringBuilder shell,
|
||||
BaseLineStringBuilder hole,
|
||||
Edge[] edges, int offset) {
|
||||
// inner rings (holes) have an opposite direction than the outer rings
|
||||
// XOR will invert the orientation for outer ring cases (Truth Table:, T/T = F, T/F = T, F/T = T, F/F = F)
|
||||
|
@ -527,32 +517,4 @@ public abstract class BasePolygonBuilder<E extends BasePolygonBuilder<E>> extend
|
|||
Edge.ring(component, direction, orientation == Orientation.LEFT, shell, points, 0, edges, offset, points.length-1);
|
||||
return points.length-1;
|
||||
}
|
||||
|
||||
public static class Ring<P extends ShapeBuilder> extends BaseLineStringBuilder<Ring<P>> {
|
||||
|
||||
private final P parent;
|
||||
|
||||
protected Ring(P parent) {
|
||||
this(parent, new ArrayList<Coordinate>());
|
||||
}
|
||||
|
||||
protected Ring(P parent, ArrayList<Coordinate> points) {
|
||||
super(points);
|
||||
this.parent = parent;
|
||||
}
|
||||
|
||||
public P close() {
|
||||
Coordinate start = points.get(0);
|
||||
Coordinate end = points.get(points.size()-1);
|
||||
if(start.x != end.x || start.y != end.y) {
|
||||
points.add(start);
|
||||
}
|
||||
return parent;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoShapeType type() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
|
||||
public class GeometryCollectionBuilder extends ShapeBuilder {
|
||||
|
||||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.GEOMETRYCOLLECTION;
|
||||
|
||||
protected final ArrayList<ShapeBuilder> shapes = new ArrayList<>();
|
||||
|
@ -46,42 +46,42 @@ public class GeometryCollectionBuilder extends ShapeBuilder {
|
|||
this.shapes.add(shape);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public GeometryCollectionBuilder point(PointBuilder point) {
|
||||
this.shapes.add(point);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public GeometryCollectionBuilder multiPoint(MultiPointBuilder multiPoint) {
|
||||
this.shapes.add(multiPoint);
|
||||
return this;
|
||||
}
|
||||
|
||||
public GeometryCollectionBuilder line(BaseLineStringBuilder<?> line) {
|
||||
|
||||
public GeometryCollectionBuilder line(BaseLineStringBuilder line) {
|
||||
this.shapes.add(line);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public GeometryCollectionBuilder multiLine(MultiLineStringBuilder multiLine) {
|
||||
this.shapes.add(multiLine);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public GeometryCollectionBuilder polygon(BasePolygonBuilder<?> polygon) {
|
||||
this.shapes.add(polygon);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public GeometryCollectionBuilder multiPolygon(MultiPolygonBuilder multiPolygon) {
|
||||
this.shapes.add(multiPolygon);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public GeometryCollectionBuilder envelope(EnvelopeBuilder envelope) {
|
||||
this.shapes.add(envelope);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public GeometryCollectionBuilder circle(CircleBuilder circle) {
|
||||
this.shapes.add(circle);
|
||||
return this;
|
||||
|
@ -120,11 +120,11 @@ public class GeometryCollectionBuilder extends ShapeBuilder {
|
|||
public Shape build() {
|
||||
|
||||
List<Shape> shapes = new ArrayList<>(this.shapes.size());
|
||||
|
||||
|
||||
for (ShapeBuilder shape : this.shapes) {
|
||||
shapes.add(shape.build());
|
||||
}
|
||||
|
||||
|
||||
if (shapes.size() == 1)
|
||||
return shapes.get(0);
|
||||
else
|
||||
|
|
|
@ -19,12 +19,23 @@
|
|||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
||||
public class LineStringBuilder extends BaseLineStringBuilder<LineStringBuilder> {
|
||||
|
||||
public LineStringBuilder() {
|
||||
this(new ArrayList<Coordinate>());
|
||||
}
|
||||
|
||||
public LineStringBuilder(ArrayList<Coordinate> points) {
|
||||
super(points);
|
||||
}
|
||||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.LINESTRING;
|
||||
|
||||
@Override
|
||||
|
@ -42,4 +53,16 @@ public class LineStringBuilder extends BaseLineStringBuilder<LineStringBuilder>
|
|||
return TYPE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the current lineString by adding the starting point as the end point
|
||||
*/
|
||||
public LineStringBuilder close() {
|
||||
Coordinate start = points.get(0);
|
||||
Coordinate end = points.get(points.size()-1);
|
||||
if(start.x != end.x || start.y != end.y) {
|
||||
points.add(start);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.common.geo.builders;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import com.spatial4j.core.shape.Shape;
|
||||
import com.spatial4j.core.shape.jts.JtsGeometry;
|
||||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
import com.vividsolutions.jts.geom.Geometry;
|
||||
import com.vividsolutions.jts.geom.LineString;
|
||||
|
@ -35,15 +34,9 @@ public class MultiLineStringBuilder extends ShapeBuilder {
|
|||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.MULTILINESTRING;
|
||||
|
||||
private final ArrayList<BaseLineStringBuilder<?>> lines = new ArrayList<>();
|
||||
private final ArrayList<LineStringBuilder> lines = new ArrayList<>();
|
||||
|
||||
public InternalLineStringBuilder linestring() {
|
||||
InternalLineStringBuilder line = new InternalLineStringBuilder(this);
|
||||
this.lines.add(line);
|
||||
return line;
|
||||
}
|
||||
|
||||
public MultiLineStringBuilder linestring(BaseLineStringBuilder<?> line) {
|
||||
public MultiLineStringBuilder linestring(LineStringBuilder line) {
|
||||
this.lines.add(line);
|
||||
return this;
|
||||
}
|
||||
|
@ -67,7 +60,7 @@ public class MultiLineStringBuilder extends ShapeBuilder {
|
|||
builder.field(FIELD_TYPE, TYPE.shapename);
|
||||
builder.field(FIELD_COORDINATES);
|
||||
builder.startArray();
|
||||
for(BaseLineStringBuilder<?> line : lines) {
|
||||
for(BaseLineStringBuilder line : lines) {
|
||||
line.coordinatesToXcontent(builder, false);
|
||||
}
|
||||
builder.endArray();
|
||||
|
@ -80,7 +73,7 @@ public class MultiLineStringBuilder extends ShapeBuilder {
|
|||
final Geometry geometry;
|
||||
if(wrapdateline) {
|
||||
ArrayList<LineString> parts = new ArrayList<>();
|
||||
for (BaseLineStringBuilder<?> line : lines) {
|
||||
for (BaseLineStringBuilder line : lines) {
|
||||
BaseLineStringBuilder.decompose(FACTORY, line.coordinates(false), parts);
|
||||
}
|
||||
if(parts.size() == 1) {
|
||||
|
@ -91,7 +84,7 @@ public class MultiLineStringBuilder extends ShapeBuilder {
|
|||
}
|
||||
} else {
|
||||
LineString[] lineStrings = new LineString[lines.size()];
|
||||
Iterator<BaseLineStringBuilder<?>> iterator = lines.iterator();
|
||||
Iterator<LineStringBuilder> iterator = lines.iterator();
|
||||
for (int i = 0; iterator.hasNext(); i++) {
|
||||
lineStrings[i] = FACTORY.createLineString(iterator.next().coordinates(false));
|
||||
}
|
||||
|
@ -99,27 +92,4 @@ public class MultiLineStringBuilder extends ShapeBuilder {
|
|||
}
|
||||
return jtsGeometry(geometry);
|
||||
}
|
||||
|
||||
public static class InternalLineStringBuilder extends BaseLineStringBuilder<InternalLineStringBuilder> {
|
||||
|
||||
private final MultiLineStringBuilder collection;
|
||||
|
||||
public InternalLineStringBuilder(MultiLineStringBuilder collection) {
|
||||
super();
|
||||
this.collection = collection;
|
||||
}
|
||||
|
||||
public MultiLineStringBuilder end() {
|
||||
return collection;
|
||||
}
|
||||
|
||||
public Coordinate[] coordinates() {
|
||||
return super.coordinates(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoShapeType type() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,16 +48,6 @@ public class MultiPolygonBuilder extends ShapeBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
public InternalPolygonBuilder polygon() {
|
||||
return polygon(Orientation.RIGHT);
|
||||
}
|
||||
|
||||
public InternalPolygonBuilder polygon(Orientation orientation) {
|
||||
InternalPolygonBuilder polygon = new InternalPolygonBuilder(this, orientation);
|
||||
this.polygon(polygon);
|
||||
return polygon;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
|
@ -81,7 +71,7 @@ public class MultiPolygonBuilder extends ShapeBuilder {
|
|||
public Shape build() {
|
||||
|
||||
List<Shape> shapes = new ArrayList<>(this.polygons.size());
|
||||
|
||||
|
||||
if(wrapdateline) {
|
||||
for (BasePolygonBuilder<?> polygon : this.polygons) {
|
||||
for(Coordinate[][] part : polygon.coordinates()) {
|
||||
|
@ -100,20 +90,5 @@ public class MultiPolygonBuilder extends ShapeBuilder {
|
|||
//note: ShapeCollection is probably faster than a Multi* geom.
|
||||
}
|
||||
|
||||
public static class InternalPolygonBuilder extends BasePolygonBuilder<InternalPolygonBuilder> {
|
||||
|
||||
private final MultiPolygonBuilder collection;
|
||||
|
||||
private InternalPolygonBuilder(MultiPolygonBuilder collection, Orientation orientation) {
|
||||
super(orientation);
|
||||
this.collection = collection;
|
||||
this.shell = new Ring<>(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MultiPolygonBuilder close() {
|
||||
super.close();
|
||||
return collection;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import com.vividsolutions.jts.geom.Coordinate;
|
||||
|
||||
/**
|
||||
* The {@link PointCollection} is an abstract base implementation for all GeoShapes. It simply handles a set of points.
|
||||
* The {@link PointCollection} is an abstract base implementation for all GeoShapes. It simply handles a set of points.
|
||||
*/
|
||||
public abstract class PointCollection<E extends PointCollection<E>> extends ShapeBuilder {
|
||||
|
||||
|
@ -43,7 +43,7 @@ public abstract class PointCollection<E extends PointCollection<E>> extends Shap
|
|||
protected PointCollection(ArrayList<Coordinate> points) {
|
||||
this.points = points;
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private E thisRef() {
|
||||
return (E)this;
|
||||
|
@ -57,7 +57,7 @@ public abstract class PointCollection<E extends PointCollection<E>> extends Shap
|
|||
*/
|
||||
public E point(double longitude, double latitude) {
|
||||
return this.point(coordinate(longitude, latitude));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new point to the collection
|
||||
|
@ -71,7 +71,7 @@ public abstract class PointCollection<E extends PointCollection<E>> extends Shap
|
|||
|
||||
/**
|
||||
* Add a array of points to the collection
|
||||
*
|
||||
*
|
||||
* @param coordinates array of {@link Coordinate}s to add
|
||||
* @return this
|
||||
*/
|
||||
|
@ -81,7 +81,7 @@ public abstract class PointCollection<E extends PointCollection<E>> extends Shap
|
|||
|
||||
/**
|
||||
* Add a collection of points to the collection
|
||||
*
|
||||
*
|
||||
* @param coordinates array of {@link Coordinate}s to add
|
||||
* @return this
|
||||
*/
|
||||
|
@ -92,7 +92,7 @@ public abstract class PointCollection<E extends PointCollection<E>> extends Shap
|
|||
|
||||
/**
|
||||
* Copy all points to a new Array
|
||||
*
|
||||
*
|
||||
* @param closed if set to true the first point of the array is repeated as last element
|
||||
* @return Array of coordinates
|
||||
*/
|
||||
|
@ -106,9 +106,9 @@ public abstract class PointCollection<E extends PointCollection<E>> extends Shap
|
|||
|
||||
/**
|
||||
* builds an array of coordinates to a {@link XContentBuilder}
|
||||
*
|
||||
* @param builder builder to use
|
||||
* @param closed repeat the first point at the end of the array if it's not already defines as last element of the array
|
||||
*
|
||||
* @param builder builder to use
|
||||
* @param closed repeat the first point at the end of the array if it's not already defines as last element of the array
|
||||
* @return the builder
|
||||
*/
|
||||
protected XContentBuilder coordinatesToXcontent(XContentBuilder builder, boolean closed) throws IOException {
|
||||
|
|
|
@ -35,7 +35,7 @@ public class PolygonBuilder extends BasePolygonBuilder<PolygonBuilder> {
|
|||
|
||||
protected PolygonBuilder(ArrayList<Coordinate> points, Orientation orientation) {
|
||||
super(orientation);
|
||||
this.shell = new Ring<>(this, points);
|
||||
this.shell = new LineStringBuilder(points);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -444,7 +444,7 @@ public abstract class ShapeBuilder extends ToXContentToBytes {
|
|||
* number of points
|
||||
* @return Array of edges
|
||||
*/
|
||||
protected static Edge[] ring(int component, boolean direction, boolean handedness, BaseLineStringBuilder<?> shell,
|
||||
protected static Edge[] ring(int component, boolean direction, boolean handedness, BaseLineStringBuilder shell,
|
||||
Coordinate[] points, int offset, Edge[] edges, int toffset, int length) {
|
||||
// calculate the direction of the points:
|
||||
// find the point a the top of the set and check its
|
||||
|
|
|
@ -25,11 +25,12 @@ import java.util.concurrent.TimeUnit;
|
|||
|
||||
/**
|
||||
* An exponentially-weighted moving average.
|
||||
*
|
||||
* <p>
|
||||
* Taken from codahale metric module, changed to use LongAdder
|
||||
*
|
||||
* @see <a href="http://www.teamquest.com/pdfs/whitepaper/ldavg1.pdf">UNIX Load Average Part 1: How It Works</a>
|
||||
* @see <a href="http://www.teamquest.com/pdfs/whitepaper/ldavg2.pdf">UNIX Load Average Part 2: Not Your Average Average</a>
|
||||
* <p>
|
||||
* Taken from codahale metric module, changed to use LongAdder
|
||||
*/
|
||||
public class EWMA {
|
||||
private static final double M1_ALPHA = 1 - Math.exp(-5 / 60.0);
|
||||
|
|
|
@ -30,9 +30,10 @@ import java.util.concurrent.TimeUnit;
|
|||
* A meter metric which measures mean throughput and one-, five-, and
|
||||
* fifteen-minute exponentially-weighted moving average throughputs.
|
||||
*
|
||||
* <p>
|
||||
* taken from codahale metric module, replaced with LongAdder
|
||||
*
|
||||
* @see <a href="http://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average">EMA</a>
|
||||
* <p>
|
||||
* taken from codahale metric module, replaced with LongAdder
|
||||
*/
|
||||
public class MeterMetric implements Metric {
|
||||
private static final long INTERVAL = 5; // seconds
|
||||
|
|
|
@ -225,7 +225,7 @@ public enum DistanceUnit {
|
|||
* @param in {@link StreamInput} to read the {@link DistanceUnit} from
|
||||
* @return {@link DistanceUnit} read from the {@link StreamInput}
|
||||
* @throws IOException if no unit can be read from the {@link StreamInput}
|
||||
* @thrown ElasticsearchIllegalArgumentException if no matching {@link DistanceUnit} can be found
|
||||
* @throws IllegalArgumentException if no matching {@link DistanceUnit} can be found
|
||||
*/
|
||||
public static DistanceUnit readDistanceUnit(StreamInput in) throws IOException {
|
||||
byte b = in.readByte();
|
||||
|
|
|
@ -421,8 +421,8 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
|
|||
* Parsing:
|
||||
* Acceptable format:
|
||||
* "STRING" - interpreted as field value (input)
|
||||
* "ARRAY" - each element can be one of {@link #parse(ParseContext, Token, XContentParser, Map)}
|
||||
* "OBJECT" - see {@link #parse(ParseContext, Token, XContentParser, Map)}
|
||||
* "ARRAY" - each element can be one of "OBJECT" (see below)
|
||||
* "OBJECT" - { "input": STRING|ARRAY, "weight": STRING|INT, "contexts": ARRAY|OBJECT }
|
||||
*
|
||||
* Indexing:
|
||||
* if context mappings are defined, delegates to {@link ContextMappings#addField(ParseContext.Document, String, String, int, Map)}
|
||||
|
|
|
@ -28,9 +28,10 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.network.InetAddresses;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Numbers;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.network.InetAddresses;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -46,10 +47,13 @@ import org.elasticsearch.index.mapper.ParseContext;
|
|||
import org.elasticsearch.index.mapper.core.LongFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.ipField;
|
||||
|
@ -61,6 +65,7 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField;
|
|||
public class IpFieldMapper extends NumberFieldMapper {
|
||||
|
||||
public static final String CONTENT_TYPE = "ip";
|
||||
public static final long MAX_IP = 4294967296l;
|
||||
|
||||
public static String longToIp(long longIp) {
|
||||
int octet3 = (int) ((longIp >> 24) % 256);
|
||||
|
@ -71,6 +76,7 @@ public class IpFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
|
||||
private static final Pattern pattern = Pattern.compile("\\.");
|
||||
private static final Pattern MASK_PATTERN = Pattern.compile("(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})\\.(\\d{1,3})/(\\d{1,3})");
|
||||
|
||||
public static long ipToLong(String ip) {
|
||||
try {
|
||||
|
@ -91,6 +97,64 @@ public class IpFieldMapper extends NumberFieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the min & max ip addresses (represented as long values -
|
||||
* same way as stored in index) represented by the given CIDR mask
|
||||
* expression. The returned array has the length of 2, where the first entry
|
||||
* represents the {@code min} address and the second the {@code max}. A
|
||||
* {@code -1} value for either the {@code min} or the {@code max},
|
||||
* represents an unbounded end. In other words:
|
||||
*
|
||||
* <p>
|
||||
* {@code min == -1 == "0.0.0.0" }
|
||||
* </p>
|
||||
*
|
||||
* and
|
||||
*
|
||||
* <p>
|
||||
* {@code max == -1 == "255.255.255.255" }
|
||||
* </p>
|
||||
*/
|
||||
public static long[] cidrMaskToMinMax(String cidr) {
|
||||
Matcher matcher = MASK_PATTERN.matcher(cidr);
|
||||
if (!matcher.matches()) {
|
||||
return null;
|
||||
}
|
||||
int addr = ((Integer.parseInt(matcher.group(1)) << 24) & 0xFF000000) | ((Integer.parseInt(matcher.group(2)) << 16) & 0xFF0000)
|
||||
| ((Integer.parseInt(matcher.group(3)) << 8) & 0xFF00) | (Integer.parseInt(matcher.group(4)) & 0xFF);
|
||||
|
||||
int mask = (-1) << (32 - Integer.parseInt(matcher.group(5)));
|
||||
|
||||
if (Integer.parseInt(matcher.group(5)) == 0) {
|
||||
mask = 0 << 32;
|
||||
}
|
||||
|
||||
int from = addr & mask;
|
||||
long longFrom = intIpToLongIp(from);
|
||||
if (longFrom == 0) {
|
||||
longFrom = -1;
|
||||
}
|
||||
|
||||
int to = from + (~mask);
|
||||
long longTo = intIpToLongIp(to) + 1; // we have to +1 here as the range
|
||||
// is non-inclusive on the "to"
|
||||
// side
|
||||
|
||||
if (longTo == MAX_IP) {
|
||||
longTo = -1;
|
||||
}
|
||||
|
||||
return new long[] { longFrom, longTo };
|
||||
}
|
||||
|
||||
private static long intIpToLongIp(int i) {
|
||||
long p1 = ((long) ((i >> 24) & 0xFF)) << 24;
|
||||
int p2 = ((i >> 16) & 0xFF) << 16;
|
||||
int p3 = ((i >> 8) & 0xFF) << 8;
|
||||
int p4 = i & 0xFF;
|
||||
return p1 + p2 + p3 + p4;
|
||||
}
|
||||
|
||||
public static class Defaults extends NumberFieldMapper.Defaults {
|
||||
public static final String NULL_VALUE = null;
|
||||
|
||||
|
@ -205,6 +269,23 @@ public class IpFieldMapper extends NumberFieldMapper {
|
|||
return bytesRef.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termQuery(Object value, @Nullable QueryShardContext context) {
|
||||
if (value != null) {
|
||||
long[] fromTo;
|
||||
if (value instanceof BytesRef) {
|
||||
fromTo = cidrMaskToMinMax(((BytesRef) value).utf8ToString());
|
||||
} else {
|
||||
fromTo = cidrMaskToMinMax(value.toString());
|
||||
}
|
||||
if (fromTo != null) {
|
||||
return rangeQuery(fromTo[0] < 0 ? null : fromTo[0],
|
||||
fromTo[1] < 0 ? null : fromTo[1], true, false);
|
||||
}
|
||||
}
|
||||
return super.termQuery(value, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
|
||||
return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(),
|
||||
|
|
|
@ -29,14 +29,12 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.TwoPhaseIterator;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
|
@ -190,6 +190,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
boolean success = false;
|
||||
ArrayList<ImmutableTranslogReader> foundTranslogs = new ArrayList<>();
|
||||
final Path tempFile = Files.createTempFile(location, TRANSLOG_FILE_PREFIX, TRANSLOG_FILE_SUFFIX); // a temp file to copy checkpoint to - note it must be in on the same FS otherwise atomic move won't work
|
||||
boolean tempFileRenamed = false;
|
||||
try (ReleasableLock lock = writeLock.acquire()) {
|
||||
logger.debug("open uncommitted translog checkpoint {}", checkpoint);
|
||||
final String checkpointTranslogFile = getFilename(checkpoint.generation);
|
||||
|
@ -215,6 +216,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
Files.copy(location.resolve(CHECKPOINT_FILE_NAME), tempFile, StandardCopyOption.REPLACE_EXISTING);
|
||||
IOUtils.fsync(tempFile, false);
|
||||
Files.move(tempFile, commitCheckpoint, StandardCopyOption.ATOMIC_MOVE);
|
||||
tempFileRenamed = true;
|
||||
// we only fsync the directory the tempFile was already fsynced
|
||||
IOUtils.fsync(commitCheckpoint.getParent(), true);
|
||||
}
|
||||
|
@ -223,10 +225,12 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
if (success == false) {
|
||||
IOUtils.closeWhileHandlingException(foundTranslogs);
|
||||
}
|
||||
try {
|
||||
Files.delete(tempFile);
|
||||
} catch (IOException ex) {
|
||||
logger.warn("failed to delete temp file {}", ex, tempFile);
|
||||
if (tempFileRenamed == false) {
|
||||
try {
|
||||
Files.delete(tempFile);
|
||||
} catch (IOException ex) {
|
||||
logger.warn("failed to delete temp file {}", ex, tempFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
return foundTranslogs;
|
||||
|
@ -253,7 +257,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
/**
|
||||
* Extracts the translog generation from a file name.
|
||||
*
|
||||
* @throw IllegalArgumentException if the path doesn't match the expected pattern.
|
||||
* @throws IllegalArgumentException if the path doesn't match the expected pattern.
|
||||
*/
|
||||
public static long parseIdFromFileName(Path translogFile) {
|
||||
final String fileName = translogFile.getFileName().toString();
|
||||
|
|
|
@ -48,12 +48,12 @@ import java.util.*;
|
|||
* The {@link org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider} is only a functional interface that allows to register factory constructors directly like the plugin example below:
|
||||
* <pre>
|
||||
* public class MyAnalysisPlugin extends Plugin {
|
||||
* @Override
|
||||
* \@Override
|
||||
* public String name() {
|
||||
* return "analysis-my-plugin";
|
||||
* }
|
||||
*
|
||||
* @Override
|
||||
* \@Override
|
||||
* public String description() {
|
||||
* return "my very fast and efficient analyzer";
|
||||
* }
|
||||
|
|
|
@ -29,9 +29,9 @@ import java.util.List;
|
|||
public interface GeoHashGrid extends MultiBucketsAggregation {
|
||||
|
||||
/**
|
||||
* A bucket that is associated with a {@code geohash_grid} cell. The key of the bucket is the {@cod geohash} of the cell
|
||||
* A bucket that is associated with a {@code geohash_grid} cell. The key of the bucket is the {@code geohash} of the cell
|
||||
*/
|
||||
public static interface Bucket extends MultiBucketsAggregation.Bucket {
|
||||
interface Bucket extends MultiBucketsAggregation.Bucket {
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -41,7 +41,7 @@ public class RangeBuilder extends AbstractRangeBuilder<RangeBuilder> {
|
|||
*
|
||||
* @param key the key to use for this range in the response
|
||||
* @param from the lower bound on the distances, inclusive
|
||||
* @parap to the upper bound on the distances, exclusive
|
||||
* @param to the upper bound on the distances, exclusive
|
||||
*/
|
||||
public RangeBuilder addRange(String key, double from, double to) {
|
||||
ranges.add(new Range(key, from, to));
|
||||
|
|
|
@ -42,7 +42,7 @@ public class DateRangeBuilder extends AbstractRangeBuilder<DateRangeBuilder> {
|
|||
*
|
||||
* @param key the key to use for this range in the response
|
||||
* @param from the lower bound on the distances, inclusive
|
||||
* @parap to the upper bound on the distances, exclusive
|
||||
* @param to the upper bound on the distances, exclusive
|
||||
*/
|
||||
public DateRangeBuilder addRange(String key, Object from, Object to) {
|
||||
ranges.add(new Range(key, from, to));
|
||||
|
|
|
@ -168,7 +168,7 @@ public class GeoDistanceBuilder extends AggregationBuilder<GeoDistanceBuilder> {
|
|||
*
|
||||
* @param key the key to use for this range in the response
|
||||
* @param from the lower bound on the distances, inclusive
|
||||
* @parap to the upper bound on the distances, exclusive
|
||||
* @param to the upper bound on the distances, exclusive
|
||||
*/
|
||||
public GeoDistanceBuilder addRange(String key, double from, double to) {
|
||||
ranges.add(new Range(key, from, to));
|
||||
|
|
|
@ -22,15 +22,13 @@ package org.elasticsearch.search.aggregations.bucket.range.ipv4;
|
|||
import org.elasticsearch.search.aggregations.bucket.range.AbstractRangeBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilderException;
|
||||
|
||||
import java.util.regex.Pattern;
|
||||
import static org.elasticsearch.index.mapper.ip.IpFieldMapper.cidrMaskToMinMax;
|
||||
|
||||
/**
|
||||
* Builder for the {@code IPv4Range} aggregation.
|
||||
*/
|
||||
public class IPv4RangeBuilder extends AbstractRangeBuilder<IPv4RangeBuilder> {
|
||||
|
||||
private static final Pattern MASK_PATTERN = Pattern.compile("[\\.|/]");
|
||||
|
||||
/**
|
||||
* Sole constructor.
|
||||
*/
|
||||
|
@ -43,7 +41,7 @@ public class IPv4RangeBuilder extends AbstractRangeBuilder<IPv4RangeBuilder> {
|
|||
*
|
||||
* @param key the key to use for this range in the response
|
||||
* @param from the lower bound on the distances, inclusive
|
||||
* @parap to the upper bound on the distances, exclusive
|
||||
* @param to the upper bound on the distances, exclusive
|
||||
*/
|
||||
public IPv4RangeBuilder addRange(String key, String from, String to) {
|
||||
ranges.add(new Range(key, from, to));
|
||||
|
@ -109,58 +107,4 @@ public class IPv4RangeBuilder extends AbstractRangeBuilder<IPv4RangeBuilder> {
|
|||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the min & max ip addresses (represented as long values - same way as stored in index) represented by the given CIDR mask
|
||||
* expression. The returned array has the length of 2, where the first entry represents the {@code min} address and the second the {@code max}.
|
||||
* A {@code -1} value for either the {@code min} or the {@code max}, represents an unbounded end. In other words:
|
||||
*
|
||||
* <p>
|
||||
* {@code min == -1 == "0.0.0.0" }
|
||||
* </p>
|
||||
*
|
||||
* and
|
||||
*
|
||||
* <p>
|
||||
* {@code max == -1 == "255.255.255.255" }
|
||||
* </p>
|
||||
*/
|
||||
static long[] cidrMaskToMinMax(String cidr) {
|
||||
String[] parts = MASK_PATTERN.split(cidr);
|
||||
if (parts.length != 5) {
|
||||
return null;
|
||||
}
|
||||
int addr = (( Integer.parseInt(parts[0]) << 24 ) & 0xFF000000)
|
||||
| (( Integer.parseInt(parts[1]) << 16 ) & 0xFF0000)
|
||||
| (( Integer.parseInt(parts[2]) << 8 ) & 0xFF00)
|
||||
| ( Integer.parseInt(parts[3]) & 0xFF);
|
||||
|
||||
int mask = (-1) << (32 - Integer.parseInt(parts[4]));
|
||||
|
||||
if (Integer.parseInt(parts[4]) == 0) {
|
||||
mask = 0 << 32;
|
||||
}
|
||||
|
||||
int from = addr & mask;
|
||||
long longFrom = intIpToLongIp(from);
|
||||
if (longFrom == 0) {
|
||||
longFrom = -1;
|
||||
}
|
||||
|
||||
int to = from + (~mask);
|
||||
long longTo = intIpToLongIp(to) + 1; // we have to +1 here as the range is non-inclusive on the "to" side
|
||||
|
||||
if (longTo == InternalIPv4Range.MAX_IP) {
|
||||
longTo = -1;
|
||||
}
|
||||
|
||||
return new long[] { longFrom, longTo };
|
||||
}
|
||||
|
||||
private static long intIpToLongIp(int i) {
|
||||
long p1 = ((long) ((i >> 24 ) & 0xFF)) << 24;
|
||||
int p2 = ((i >> 16 ) & 0xFF) << 16;
|
||||
int p3 = ((i >> 8 ) & 0xFF) << 8;
|
||||
int p4 = i & 0xFF;
|
||||
return p1 + p2 + p3 + p4;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,13 +32,13 @@ import java.io.IOException;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.ip.IpFieldMapper.MAX_IP;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class InternalIPv4Range extends InternalRange<InternalIPv4Range.Bucket, InternalIPv4Range> {
|
||||
|
||||
public static final long MAX_IP = 4294967296l;
|
||||
|
||||
public final static Type TYPE = new Type("ip_range", "iprange");
|
||||
|
||||
private final static AggregationStreams.Stream STREAM = new AggregationStreams.Stream() {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.elasticsearch.search.aggregations.bucket.range.ipv4;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
|
||||
import org.elasticsearch.search.SearchParseException;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
|
@ -124,7 +125,7 @@ public class IpRangeParser implements Aggregator.Parser {
|
|||
}
|
||||
|
||||
private static void parseMaskRange(String cidr, RangeAggregator.Range range, String aggregationName, SearchContext ctx) {
|
||||
long[] fromTo = IPv4RangeBuilder.cidrMaskToMinMax(cidr);
|
||||
long[] fromTo = IpFieldMapper.cidrMaskToMinMax(cidr);
|
||||
if (fromTo == null) {
|
||||
throw new SearchParseException(ctx, "invalid CIDR mask [" + cidr + "] in aggregation [" + aggregationName + "]",
|
||||
null);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.cluster.routing;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
|
@ -36,6 +37,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
/**
|
||||
*/
|
||||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0)
|
||||
@LuceneTestCase.AwaitsFix(bugUrl = "http://build-us-00.elastic.co/job/es_core_master_windows-2012-r2/2074/testReport/ (boaz on it)")
|
||||
public class DelayedAllocationIT extends ESIntegTestCase {
|
||||
|
||||
/**
|
||||
|
|
|
@ -29,6 +29,7 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
import com.vividsolutions.jts.geom.LineString;
|
||||
import com.vividsolutions.jts.geom.Polygon;
|
||||
|
||||
import org.elasticsearch.common.geo.builders.LineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.PolygonBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
||||
|
@ -141,35 +142,34 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
|
||||
public void testMultiLineString() {
|
||||
ShapeBuilders.newMultiLinestring()
|
||||
.linestring()
|
||||
.linestring(new LineStringBuilder()
|
||||
.point(-100.0, 50.0)
|
||||
.point(50.0, 50.0)
|
||||
.point(50.0, 20.0)
|
||||
.point(-100.0, 20.0)
|
||||
.end()
|
||||
.linestring()
|
||||
)
|
||||
.linestring(new LineStringBuilder()
|
||||
.point(-100.0, 20.0)
|
||||
.point(50.0, 20.0)
|
||||
.point(50.0, 0.0)
|
||||
.point(-100.0, 0.0)
|
||||
.end()
|
||||
)
|
||||
.build();
|
||||
|
||||
|
||||
// LineString that needs to be wrappped
|
||||
ShapeBuilders.newMultiLinestring()
|
||||
.linestring()
|
||||
.linestring(new LineStringBuilder()
|
||||
.point(150.0, 60.0)
|
||||
.point(200.0, 60.0)
|
||||
.point(200.0, 40.0)
|
||||
.point(150.0, 40.0)
|
||||
.end()
|
||||
.linestring()
|
||||
)
|
||||
.linestring(new LineStringBuilder()
|
||||
.point(150.0, 20.0)
|
||||
.point(200.0, 20.0)
|
||||
.point(200.0, 0.0)
|
||||
.point(150.0, 0.0)
|
||||
.end()
|
||||
)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
@ -251,7 +251,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(174,0);
|
||||
|
||||
// 3/4 of an embedded 'c', crossing dateline once
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(175, 1)
|
||||
.point(175, 7)
|
||||
.point(-178, 7)
|
||||
|
@ -260,15 +260,15 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(176, 2)
|
||||
.point(179, 2)
|
||||
.point(179,1)
|
||||
.point(175, 1);
|
||||
.point(175, 1));
|
||||
|
||||
// embedded hole right of the dateline
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(-179, 1)
|
||||
.point(-179, 2)
|
||||
.point(-177, 2)
|
||||
.point(-177,1)
|
||||
.point(-179,1);
|
||||
.point(-179,1));
|
||||
|
||||
Shape shape = builder.close().build();
|
||||
assertMultiPolygon(shape);
|
||||
|
@ -292,7 +292,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(-186,0);
|
||||
|
||||
// 3/4 of an embedded 'c', crossing dateline once
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(-185,1)
|
||||
.point(-181,1)
|
||||
.point(-181,2)
|
||||
|
@ -301,15 +301,15 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(-178,6)
|
||||
.point(-178,7)
|
||||
.point(-185,7)
|
||||
.point(-185,1);
|
||||
.point(-185,1));
|
||||
|
||||
// embedded hole right of the dateline
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(-179,1)
|
||||
.point(-177,1)
|
||||
.point(-177,2)
|
||||
.point(-179,2)
|
||||
.point(-179,1);
|
||||
.point(-179,1));
|
||||
|
||||
Shape shape = builder.close().build();
|
||||
assertMultiPolygon(shape);
|
||||
|
@ -356,7 +356,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(-85.0016455,37.1310491)
|
||||
.point(-85.0018514,37.1311314);
|
||||
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(-85.0000002,37.1317672)
|
||||
.point(-85.0001983,37.1317538)
|
||||
.point(-85.0003378,37.1317582)
|
||||
|
@ -382,7 +382,7 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(-84.9993527,37.1317788)
|
||||
.point(-84.9994931,37.1318061)
|
||||
.point(-84.9996815,37.1317979)
|
||||
.point(-85.0000002,37.1317672);
|
||||
.point(-85.0000002,37.1317672));
|
||||
|
||||
Shape shape = builder.close().build();
|
||||
assertPolygon(shape);
|
||||
|
@ -398,12 +398,12 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(-6, 0)
|
||||
.point(-4, 2);
|
||||
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(4, 1)
|
||||
.point(4, -1)
|
||||
.point(-4, -1)
|
||||
.point(-4, 1)
|
||||
.point(4, 1);
|
||||
.point(4, 1));
|
||||
|
||||
Shape shape = builder.close().build();
|
||||
assertPolygon(shape);
|
||||
|
@ -451,12 +451,12 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(176, -15)
|
||||
.point(-177, -10)
|
||||
.point(-177, 10);
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(176, 10)
|
||||
.point(180, 5)
|
||||
.point(180, -5)
|
||||
.point(176, -10)
|
||||
.point(176, 10);
|
||||
.point(176, 10));
|
||||
Shape shape = builder.close().build();
|
||||
assertMultiPolygon(shape);
|
||||
|
||||
|
@ -467,12 +467,12 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(179, -10)
|
||||
.point(-176, -15)
|
||||
.point(-172, 0);
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(-176, 10)
|
||||
.point(-176, -10)
|
||||
.point(-180, -5)
|
||||
.point(-180, 5)
|
||||
.point(-176, 10);
|
||||
.point(-176, 10));
|
||||
shape = builder.close().build();
|
||||
assertMultiPolygon(shape);
|
||||
}
|
||||
|
@ -486,12 +486,12 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(166, -15)
|
||||
.point(179, -10)
|
||||
.point(179, 10);
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(-177, 10)
|
||||
.point(-178, -10)
|
||||
.point(-180, -5)
|
||||
.point(-180, 5)
|
||||
.point(-177, 10);
|
||||
.point(-177, 10));
|
||||
Shape shape = builder.close().build();
|
||||
assertMultiPolygon(shape);
|
||||
}
|
||||
|
@ -505,12 +505,12 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(166, -15)
|
||||
.point(179, -10)
|
||||
.point(179, 10);
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(164, 0)
|
||||
.point(175, 10)
|
||||
.point(175, 5)
|
||||
.point(179, -10)
|
||||
.point(164, 0);
|
||||
.point(164, 0));
|
||||
try {
|
||||
builder.close().build();
|
||||
fail("Expected InvalidShapeException");
|
||||
|
@ -528,17 +528,17 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(176, -15)
|
||||
.point(-177, -10)
|
||||
.point(-177, 10);
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(-177, 10)
|
||||
.point(-178, -10)
|
||||
.point(-180, -5)
|
||||
.point(-180, 5)
|
||||
.point(-177, 10);
|
||||
builder.hole()
|
||||
.point(-177, 10));
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(172, 0)
|
||||
.point(176, 10)
|
||||
.point(176, -5)
|
||||
.point(172, 0);
|
||||
.point(172, 0));
|
||||
Shape shape = builder.close().build();
|
||||
assertMultiPolygon(shape);
|
||||
}
|
||||
|
@ -552,12 +552,12 @@ public class ShapeBuilderTests extends ESTestCase {
|
|||
.point(176, -15)
|
||||
.point(-177, -10)
|
||||
.point(-177, 10);
|
||||
builder.hole()
|
||||
builder.hole(new LineStringBuilder()
|
||||
.point(-177, 10)
|
||||
.point(172, 0)
|
||||
.point(180, -5)
|
||||
.point(176, -10)
|
||||
.point(-177, 10);
|
||||
.point(-177, 10));
|
||||
try {
|
||||
builder.close().build();
|
||||
fail("Expected InvalidShapeException");
|
||||
|
|
|
@ -0,0 +1,110 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.AbstractSearchScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.NativeScriptFactory;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.scriptQuery;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
*/
|
||||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE)
|
||||
public class SearchTimeoutIT extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(ScriptedTimeoutPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)).build();
|
||||
}
|
||||
|
||||
public void testSimpleTimeout() throws Exception {
|
||||
client().prepareIndex("test", "type", "1").setSource("field", "value").setRefresh(true).execute().actionGet();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS))
|
||||
.setQuery(scriptQuery(new Script(NativeTestScriptedTimeout.TEST_NATIVE_SCRIPT_TIMEOUT, ScriptType.INLINE, "native", null)))
|
||||
.execute().actionGet();
|
||||
assertThat(searchResponse.isTimedOut(), equalTo(true));
|
||||
}
|
||||
|
||||
public static class ScriptedTimeoutPlugin extends Plugin {
|
||||
@Override
|
||||
public String name() {
|
||||
return "test-scripted-search-timeout";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "Test for scripted timeouts on searches";
|
||||
}
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.registerScript(NativeTestScriptedTimeout.TEST_NATIVE_SCRIPT_TIMEOUT, NativeTestScriptedTimeout.Factory.class);
|
||||
}
|
||||
}
|
||||
|
||||
public static class NativeTestScriptedTimeout extends AbstractSearchScript {
|
||||
|
||||
public static final String TEST_NATIVE_SCRIPT_TIMEOUT = "native_test_search_timeout_script";
|
||||
|
||||
public static class Factory implements NativeScriptFactory {
|
||||
|
||||
@Override
|
||||
public ExecutableScript newScript(Map<String, Object> params) {
|
||||
return new NativeTestScriptedTimeout();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object run() {
|
||||
try {
|
||||
Thread.sleep(500);
|
||||
} catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -16,8 +16,7 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.search.aggregations.bucket;
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
|
@ -33,9 +32,7 @@ import org.elasticsearch.index.query.QueryShardException;
|
|||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
|
||||
|
@ -83,23 +80,16 @@ import static org.hamcrest.Matchers.is;
|
|||
*
|
||||
*/
|
||||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE)
|
||||
public class SignificantTermsSignificanceScoreTests extends ESIntegTestCase {
|
||||
public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
||||
static final String INDEX_NAME = "testidx";
|
||||
static final String DOC_TYPE = "doc";
|
||||
static final String TEXT_FIELD = "text";
|
||||
static final String CLASS_FIELD = "class";
|
||||
|
||||
@Override
|
||||
public Settings nodeSettings(int nodeOrdinal) {
|
||||
return settingsBuilder()
|
||||
.put(super.nodeSettings(nodeOrdinal))
|
||||
.put("path.conf", this.getDataPath("conf"))
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return pluginList(CustomSignificanceHeuristicPlugin.class, GroovyPlugin.class);
|
||||
return pluginList(CustomSignificanceHeuristicPlugin.class);
|
||||
}
|
||||
|
||||
public String randomExecutionHint() {
|
||||
|
@ -505,91 +495,15 @@ public class SignificantTermsSignificanceScoreTests extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testNoNumberFormatExceptionWithDefaultScriptingEngine() throws ExecutionException, InterruptedException, IOException {
|
||||
assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 1)));
|
||||
index("test", "doc", "1", "{\"field\":\"a\"}");
|
||||
index("test", "doc", "11", "{\"field\":\"a\"}");
|
||||
index("test", "doc", "2", "{\"field\":\"b\"}");
|
||||
index("test", "doc", "22", "{\"field\":\"b\"}");
|
||||
index("test", "doc", "3", "{\"field\":\"a b\"}");
|
||||
index("test", "doc", "33", "{\"field\":\"a b\"}");
|
||||
ScriptHeuristic.ScriptHeuristicBuilder scriptHeuristicBuilder = new ScriptHeuristic.ScriptHeuristicBuilder();
|
||||
scriptHeuristicBuilder.setScript(new Script("_subset_freq/(_superset_freq - _subset_freq + 1)"));
|
||||
ensureYellow();
|
||||
refresh();
|
||||
SearchResponse response = client()
|
||||
.prepareSearch("test")
|
||||
.addAggregation(
|
||||
new TermsBuilder("letters").field("field").subAggregation(
|
||||
new SignificantTermsBuilder("mySignificantTerms").field("field").executionHint(randomExecutionHint())
|
||||
.significanceHeuristic(scriptHeuristicBuilder).minDocCount(1).shardSize(2).size(2))).execute()
|
||||
.actionGet();
|
||||
assertSearchResponse(response);
|
||||
assertThat(((Terms) response.getAggregations().get("letters")).getBuckets().size(), equalTo(2));
|
||||
for (Terms.Bucket classBucket : ((Terms) response.getAggregations().get("letters")).getBuckets()) {
|
||||
assertThat(((SignificantStringTerms) classBucket.getAggregations().get("mySignificantTerms")).getBuckets().size(), equalTo(2));
|
||||
for (SignificantTerms.Bucket bucket : ((SignificantTerms) classBucket.getAggregations().get("mySignificantTerms")).getBuckets()) {
|
||||
assertThat(bucket.getSignificanceScore(),
|
||||
closeTo((double) bucket.getSubsetDf() / (bucket.getSupersetDf() - bucket.getSubsetDf() + 1), 1.e-6));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private ScriptHeuristic.ScriptHeuristicBuilder getScriptSignificanceHeuristicBuilder() throws IOException {
|
||||
Map<String, Object> params = null;
|
||||
Script script = null;
|
||||
String lang = null;
|
||||
if (randomBoolean()) {
|
||||
Map<String, Object> params = null;
|
||||
params = new HashMap<>();
|
||||
params.put("param", randomIntBetween(1, 100));
|
||||
}
|
||||
int randomScriptKind = randomIntBetween(0, 3);
|
||||
if (randomBoolean()) {
|
||||
lang = "groovy";
|
||||
}
|
||||
switch (randomScriptKind) {
|
||||
case 0: {
|
||||
if (params == null) {
|
||||
script = new Script("return _subset_freq + _subset_size + _superset_freq + _superset_size");
|
||||
} else {
|
||||
script = new Script("return param*(_subset_freq + _subset_size + _superset_freq + _superset_size)/param",
|
||||
ScriptType.INLINE, lang, params);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 1: {
|
||||
String scriptString;
|
||||
if (params == null) {
|
||||
scriptString = "return _subset_freq + _subset_size + _superset_freq + _superset_size";
|
||||
} else {
|
||||
scriptString = "return param*(_subset_freq + _subset_size + _superset_freq + _superset_size)/param";
|
||||
}
|
||||
client().prepareIndex().setIndex(ScriptService.SCRIPT_INDEX).setType(ScriptService.DEFAULT_LANG).setId("my_script")
|
||||
.setSource(XContentFactory.jsonBuilder().startObject().field("script", scriptString).endObject()).get();
|
||||
refresh();
|
||||
script = new Script("my_script", ScriptType.INDEXED, lang, params);
|
||||
break;
|
||||
}
|
||||
case 2: {
|
||||
if (params == null) {
|
||||
script = new Script("significance_script_no_params", ScriptType.FILE, lang, null);
|
||||
} else {
|
||||
script = new Script("significance_script_with_params", ScriptType.FILE, lang, params);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 3: {
|
||||
logger.info("NATIVE SCRIPT");
|
||||
if (params == null) {
|
||||
script = new Script("native_significance_score_script_no_params", ScriptType.INLINE, "native", null);
|
||||
} else {
|
||||
script = new Script("native_significance_score_script_with_params", ScriptType.INLINE, "native", params);
|
||||
}
|
||||
lang = "native";
|
||||
if (randomBoolean()) {
|
||||
}
|
||||
break;
|
||||
}
|
||||
script = new Script("native_significance_score_script_with_params", ScriptType.INLINE, "native", params);
|
||||
} else {
|
||||
script = new Script("native_significance_score_script_no_params", ScriptType.INLINE, "native", null);
|
||||
}
|
||||
ScriptHeuristic.ScriptHeuristicBuilder builder = new ScriptHeuristic.ScriptHeuristicBuilder().setScript(script);
|
||||
|
|
@ -16,21 +16,33 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.search.aggregations.metrics;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.LeafSearchScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.Global;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.Avg;
|
||||
import org.elasticsearch.search.lookup.LeafSearchLookup;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
|
@ -45,10 +57,13 @@ import static org.hamcrest.Matchers.notNullValue;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class AvgTests extends AbstractNumericTestCase {
|
||||
public class AvgIT extends AbstractNumericTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
return Arrays.asList(
|
||||
ExtractFieldScriptPlugin.class,
|
||||
FieldValueScriptPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -145,7 +160,8 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
public void testSingleValuedFieldWithValueScript() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").field("value").script(new Script("_value + 1")))
|
||||
.addAggregation(avg("avg").field("value")
|
||||
.script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -153,16 +169,16 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
assertThat(avg.getName(), equalTo("avg"));
|
||||
assertThat(avg.getValue(), equalTo((double) (2+3+4+5+6+7+8+9+10+11) / 10));
|
||||
assertThat(avg.getValue(), equalTo((double) (1+2+3+4+5+6+7+8+9+10) / 10));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testSingleValuedFieldWithValueScriptWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
Map<String, Object> params = Collections.singletonMap("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").field("value").script(new Script("_value + inc", ScriptType.INLINE, null, params)))
|
||||
.addAggregation(avg("avg").field("value")
|
||||
.script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -205,7 +221,8 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
public void testMultiValuedFieldWithValueScript() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").field("values").script(new Script("_value + 1")))
|
||||
.addAggregation(avg("avg").field("values")
|
||||
.script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -213,16 +230,16 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
assertThat(avg.getName(), equalTo("avg"));
|
||||
assertThat(avg.getValue(), equalTo((double) (3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12+12+13) / 20));
|
||||
assertThat(avg.getValue(), equalTo((double) (2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12) / 20));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testMultiValuedFieldWithValueScriptWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
Map<String, Object> params = Collections.singletonMap("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").field("values").script(new Script("_value + inc", ScriptType.INLINE, null, params)))
|
||||
.addAggregation(avg("avg").field("values")
|
||||
.script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -237,7 +254,8 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
public void testScriptSingleValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").script(new Script("doc['value'].value")))
|
||||
.addAggregation(avg("avg")
|
||||
.script(new Script("value", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -250,28 +268,11 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
|
||||
@Override
|
||||
public void testScriptSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
Map<String, Object> params = Collections.singletonMap("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
assertThat(avg.getName(), equalTo("avg"));
|
||||
assertThat(avg.getValue(), equalTo((double) (2+3+4+5+6+7+8+9+10+11) / 10));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.addAggregation(avg("avg")
|
||||
.script(new Script("value", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -286,7 +287,8 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
public void testScriptMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").script(new Script("[ doc['value'].value, doc['value'].value + 1 ]")))
|
||||
.addAggregation(avg("avg")
|
||||
.script(new Script("values", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -294,32 +296,16 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
assertThat(avg.getName(), equalTo("avg"));
|
||||
assertThat(avg.getValue(), equalTo((double) (1+2+2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11) / 20));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(avg("avg").script(new Script("[ doc['value'].value, doc['value'].value + 1 ]")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
assertThat(avg.getName(), equalTo("avg"));
|
||||
assertThat(avg.getValue(), equalTo((double) (1+2+2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11) / 20));
|
||||
assertThat(avg.getValue(), equalTo((double) (2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12) / 20));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
Map<String, Object> params = Collections.singletonMap("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
avg("avg").script(new Script("[ doc['value'].value, doc['value'].value + inc ]", ScriptType.INLINE, null, params)))
|
||||
.addAggregation(avg("avg")
|
||||
.script(new Script("values", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -327,6 +313,276 @@ public class AvgTests extends AbstractNumericTestCase {
|
|||
Avg avg = searchResponse.getAggregations().get("avg");
|
||||
assertThat(avg, notNullValue());
|
||||
assertThat(avg.getName(), equalTo("avg"));
|
||||
assertThat(avg.getValue(), equalTo((double) (1+2+2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11) / 20));
|
||||
assertThat(avg.getValue(), equalTo((double) (3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12+12+13) / 20));
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock plugin for the {@link ExtractFieldScriptEngine}
|
||||
*/
|
||||
public static class ExtractFieldScriptPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return ExtractFieldScriptEngine.NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "Mock script engine for " + AvgIT.class;
|
||||
}
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(ExtractFieldScriptEngine.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* This mock script returns the field that is specified by name in the script body
|
||||
*/
|
||||
public static class ExtractFieldScriptEngine implements ScriptEngineService {
|
||||
|
||||
public static final String NAME = "extract_field";
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] types() {
|
||||
return new String[] { NAME };
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] extensions() {
|
||||
return types();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean sandboxed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
return script;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExecutableScript executable(CompiledScript compiledScript, Map<String, Object> params) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@Override
|
||||
public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map<String, Object> vars) {
|
||||
final long inc;
|
||||
if (vars == null || vars.containsKey("inc") == false) {
|
||||
inc = 0;
|
||||
} else {
|
||||
inc = ((Number) vars.get("inc")).longValue();
|
||||
}
|
||||
return new SearchScript() {
|
||||
|
||||
@Override
|
||||
public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException {
|
||||
|
||||
final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context);
|
||||
|
||||
return new LeafSearchScript() {
|
||||
|
||||
@Override
|
||||
public Object unwrap(Object value) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNextVar(String name, Object value) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object run() {
|
||||
String fieldName = (String) compiledScript.compiled();
|
||||
List<Long> values = new ArrayList<>();
|
||||
for (Object v : (List<?>) leafLookup.doc().get(fieldName)) {
|
||||
values.add(((Number) v).longValue() + inc);
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setScorer(Scorer scorer) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setSource(Map<String, Object> source) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int doc) {
|
||||
if (leafLookup != null) {
|
||||
leafLookup.setDocument(doc);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long runAsLong() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public float runAsFloat() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double runAsDouble() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock plugin for the {@link FieldValueScriptEngine}
|
||||
*/
|
||||
public static class FieldValueScriptPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return FieldValueScriptEngine.NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "Mock script engine for " + AvgIT.class;
|
||||
}
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(FieldValueScriptEngine.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* This mock script returns the field value and adds one month to the returned date
|
||||
*/
|
||||
public static class FieldValueScriptEngine implements ScriptEngineService {
|
||||
|
||||
public static final String NAME = "field_value";
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] types() {
|
||||
return new String[] { NAME };
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] extensions() {
|
||||
return types();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean sandboxed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
return script;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExecutableScript executable(CompiledScript compiledScript, Map<String, Object> params) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@Override
|
||||
public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map<String, Object> vars) {
|
||||
final long inc;
|
||||
if (vars == null || vars.containsKey("inc") == false) {
|
||||
inc = 0;
|
||||
} else {
|
||||
inc = ((Number) vars.get("inc")).longValue();
|
||||
}
|
||||
return new SearchScript() {
|
||||
|
||||
private Map<String, Object> vars = new HashMap<>(2);
|
||||
|
||||
@Override
|
||||
public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException {
|
||||
|
||||
final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context);
|
||||
|
||||
return new LeafSearchScript() {
|
||||
|
||||
@Override
|
||||
public Object unwrap(Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNextVar(String name, Object value) {
|
||||
vars.put(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object run() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setScorer(Scorer scorer) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setSource(Map<String, Object> source) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int doc) {
|
||||
if (leafLookup != null) {
|
||||
leafLookup.setDocument(doc);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long runAsLong() {
|
||||
return ((Number) vars.get("_value")).longValue() + inc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float runAsFloat() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double runAsDouble() {
|
||||
return ((Number) vars.get("_value")).doubleValue() + inc;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
}
|
||||
}
|
|
@ -16,21 +16,32 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.search.aggregations.metrics;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.LeafSearchScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.Global;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
|
||||
import org.elasticsearch.search.lookup.LeafSearchLookup;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
|
@ -44,10 +55,11 @@ import static org.hamcrest.Matchers.notNullValue;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class SumTests extends AbstractNumericTestCase {
|
||||
public class SumIT extends AbstractNumericTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
return Arrays.asList(ExtractFieldScriptPlugin.class, FieldValueScriptPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -157,7 +169,7 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
public void testSingleValuedFieldWithValueScript() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").field("value").script(new Script("_value + 1")))
|
||||
.addAggregation(sum("sum").field("value").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -165,7 +177,7 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 2+3+4+5+6+7+8+9+10+11));
|
||||
assertThat(sum.getValue(), equalTo((double) 1+2+3+4+5+6+7+8+9+10));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -174,7 +186,7 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
params.put("increment", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").field("value").script(new Script("_value + increment", ScriptType.INLINE, null, params)))
|
||||
.addAggregation(sum("sum").field("value").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -182,14 +194,14 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 2+3+4+5+6+7+8+9+10+11));
|
||||
assertThat(sum.getValue(), equalTo((double) 1+2+3+4+5+6+7+8+9+10));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptSingleValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").script(new Script("doc['value'].value")))
|
||||
.addAggregation(sum("sum").script(new Script("value", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -206,7 +218,7 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
params.put("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.addAggregation(sum("sum").script(new Script("value", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -217,29 +229,11 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
assertThat(sum.getValue(), equalTo((double) 2+3+4+5+6+7+8+9+10+11));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 2+3+4+5+6+7+8+9+10+11));
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").script(new Script("[ doc['value'].value, doc['value'].value + 1 ]")))
|
||||
.addAggregation(sum("sum").script(new Script("values", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, null)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -247,22 +241,7 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 1+2+2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").script(new Script("[ doc['value'].value, doc['value'].value + 1 ]")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 1+2+2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11));
|
||||
assertThat(sum.getValue(), equalTo((double) 2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -272,7 +251,7 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
sum("sum").script(new Script("[ doc['value'].value, doc['value'].value + inc ]", ScriptType.INLINE, null, params)))
|
||||
sum("sum").script(new Script("values", ScriptType.INLINE, ExtractFieldScriptEngine.NAME, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -280,7 +259,7 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 1+2+2+3+3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11));
|
||||
assertThat(sum.getValue(), equalTo((double) 3+4+4+5+5+6+6+7+7+8+8+9+9+10+10+11+11+12+12+13));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -304,22 +283,7 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").field("values").script(new Script("_value + 1"))).execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 3 + 4 + 4 + 5 + 5 + 6 + 6 + 7 + 7 + 8 + 8 + 9 + 9 + 10 + 10 + 11 + 11 + 12 + 12 + 13));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testMultiValuedFieldWithValueScriptWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("increment", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").field("values").script(new Script("_value + increment", ScriptType.INLINE, null, params)))
|
||||
.addAggregation(sum("sum").field("values").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, null)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
@ -327,6 +291,296 @@ public class SumTests extends AbstractNumericTestCase {
|
|||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 3 + 4 + 4 + 5 + 5 + 6 + 6 + 7 + 7 + 8 + 8 + 9 + 9 + 10 + 10 + 11 + 11 + 12 + 12 + 13));
|
||||
assertThat(sum.getValue(), equalTo((double) 2 + 3 + 3 + 4 + 4 + 5 + 5 + 6 + 6 + 7 + 7 + 8 + 8 + 9 + 9 + 10 + 10 + 11 + 11 + 12));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testMultiValuedFieldWithValueScriptWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("increment", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(sum("sum").field("values").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Sum sum = searchResponse.getAggregations().get("sum");
|
||||
assertThat(sum, notNullValue());
|
||||
assertThat(sum.getName(), equalTo("sum"));
|
||||
assertThat(sum.getValue(), equalTo((double) 2 + 3 + 3 + 4 + 4 + 5 + 5 + 6 + 6 + 7 + 7 + 8 + 8 + 9 + 9 + 10 + 10 + 11 + 11 + 12));
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock plugin for the {@link ExtractFieldScriptEngine}
|
||||
*/
|
||||
public static class ExtractFieldScriptPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return ExtractFieldScriptEngine.NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "Mock script engine for " + SumIT.class;
|
||||
}
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(ExtractFieldScriptEngine.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* This mock script returns the field that is specified by name in the
|
||||
* script body
|
||||
*/
|
||||
public static class ExtractFieldScriptEngine implements ScriptEngineService {
|
||||
|
||||
public static final String NAME = "extract_field";
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] types() {
|
||||
return new String[] { NAME };
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] extensions() {
|
||||
return types();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean sandboxed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
return script;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExecutableScript executable(CompiledScript compiledScript, Map<String, Object> params) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map<String, Object> vars) {
|
||||
final long inc;
|
||||
if (vars == null || vars.containsKey("inc") == false) {
|
||||
inc = 0;
|
||||
} else {
|
||||
inc = ((Number) vars.get("inc")).longValue();
|
||||
}
|
||||
return new SearchScript() {
|
||||
|
||||
@Override
|
||||
public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException {
|
||||
|
||||
final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context);
|
||||
|
||||
return new LeafSearchScript() {
|
||||
|
||||
@Override
|
||||
public Object unwrap(Object value) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNextVar(String name, Object value) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object run() {
|
||||
String fieldName = (String) compiledScript.compiled();
|
||||
List<Long> values = new ArrayList<>();
|
||||
for (Object v : (List<?>) leafLookup.doc().get(fieldName)) {
|
||||
values.add(((Number) v).longValue() + inc);
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setScorer(Scorer scorer) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setSource(Map<String, Object> source) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int doc) {
|
||||
if (leafLookup != null) {
|
||||
leafLookup.setDocument(doc);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long runAsLong() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public float runAsFloat() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double runAsDouble() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock plugin for the {@link FieldValueScriptEngine}
|
||||
*/
|
||||
public static class FieldValueScriptPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return FieldValueScriptEngine.NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "Mock script engine for " + SumIT.class;
|
||||
}
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(FieldValueScriptEngine.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* This mock script returns the field value and adds one to the returned
|
||||
* value
|
||||
*/
|
||||
public static class FieldValueScriptEngine implements ScriptEngineService {
|
||||
|
||||
public static final String NAME = "field_value";
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] types() {
|
||||
return new String[] { NAME };
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] extensions() {
|
||||
return types();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean sandboxed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
return script;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExecutableScript executable(CompiledScript compiledScript, Map<String, Object> params) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map<String, Object> vars) {
|
||||
final long inc;
|
||||
if (vars == null || vars.containsKey("inc") == false) {
|
||||
inc = 0;
|
||||
} else {
|
||||
inc = ((Number) vars.get("inc")).longValue();
|
||||
}
|
||||
return new SearchScript() {
|
||||
|
||||
private Map<String, Object> vars = new HashMap<>(2);
|
||||
|
||||
@Override
|
||||
public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException {
|
||||
|
||||
final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context);
|
||||
|
||||
return new LeafSearchScript() {
|
||||
|
||||
@Override
|
||||
public Object unwrap(Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNextVar(String name, Object value) {
|
||||
vars.put(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object run() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setScorer(Scorer scorer) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setSource(Map<String, Object> source) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int doc) {
|
||||
if (leafLookup != null) {
|
||||
leafLookup.setDocument(doc);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long runAsLong() {
|
||||
return ((Number) vars.get("_value")).longValue() + inc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float runAsFloat() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double runAsDouble() {
|
||||
return ((Number) vars.get("_value")).doubleValue() + inc;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
}
|
||||
}
|
|
@ -16,21 +16,22 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.messy.tests;
|
||||
package org.elasticsearch.search.aggregations.metrics;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.*;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.Global;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount;
|
||||
import org.elasticsearch.search.lookup.LeafSearchLookup;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
|
@ -44,13 +45,7 @@ import static org.hamcrest.Matchers.notNullValue;
|
|||
*
|
||||
*/
|
||||
@ESIntegTestCase.SuiteScopeTestCase
|
||||
public class ValueCountTests extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
}
|
||||
|
||||
public class ValueCountIT extends ESIntegTestCase {
|
||||
@Override
|
||||
public void setupSuiteScopeCluster() throws Exception {
|
||||
createIndex("idx");
|
||||
|
@ -68,6 +63,11 @@ public class ValueCountTests extends ESIntegTestCase {
|
|||
ensureSearchable();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singletonList(FieldValueScriptPlugin.class);
|
||||
}
|
||||
|
||||
public void testUnmapped() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx_unmapped")
|
||||
.setQuery(matchAllQuery())
|
||||
|
@ -148,7 +148,7 @@ public class ValueCountTests extends ESIntegTestCase {
|
|||
|
||||
public void testSingleValuedScript() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(count("count").script(new Script("doc['value'].value"))).execute().actionGet();
|
||||
.addAggregation(count("count").script(new Script("value", ScriptType.INLINE, FieldValueScriptEngine.NAME, null))).execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
|
@ -160,7 +160,7 @@ public class ValueCountTests extends ESIntegTestCase {
|
|||
|
||||
public void testMultiValuedScript() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(count("count").script(new Script("doc['values'].values"))).execute().actionGet();
|
||||
.addAggregation(count("count").script(new Script("values", ScriptType.INLINE, FieldValueScriptEngine.NAME, null))).execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
|
@ -171,10 +171,9 @@ public class ValueCountTests extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValuedScriptWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("s", "value");
|
||||
Map<String, Object> params = Collections.singletonMap("s", "value");
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(count("count").script(new Script("doc[s].value", ScriptType.INLINE, null, params))).execute().actionGet();
|
||||
.addAggregation(count("count").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params))).execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
|
@ -185,10 +184,9 @@ public class ValueCountTests extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testMultiValuedScriptWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("s", "values");
|
||||
Map<String, Object> params = Collections.singletonMap("s", "values");
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(count("count").script(new Script("doc[s].values", ScriptType.INLINE, null, params))).execute().actionGet();
|
||||
.addAggregation(count("count").script(new Script("", ScriptType.INLINE, FieldValueScriptEngine.NAME, params))).execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
|
@ -197,4 +195,139 @@ public class ValueCountTests extends ESIntegTestCase {
|
|||
assertThat(valueCount.getName(), equalTo("count"));
|
||||
assertThat(valueCount.getValue(), equalTo(20l));
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock plugin for the {@link FieldValueScriptEngine}
|
||||
*/
|
||||
public static class FieldValueScriptPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return FieldValueScriptEngine.NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return "Mock script engine for " + ValueCountIT.class;
|
||||
}
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(FieldValueScriptEngine.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* This mock script returns the field value. If the parameter map contains a parameter "s", the corresponding is used as field name.
|
||||
*/
|
||||
public static class FieldValueScriptEngine implements ScriptEngineService {
|
||||
|
||||
public static final String NAME = "field_value";
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] types() {
|
||||
return new String[] { NAME };
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] extensions() {
|
||||
return types();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean sandboxed() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object compile(String script) {
|
||||
return script;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExecutableScript executable(CompiledScript compiledScript, Map<String, Object> params) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
@Override
|
||||
public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, Map<String, Object> vars) {
|
||||
final String fieldNameParam;
|
||||
if (vars == null || vars.containsKey("s") == false) {
|
||||
fieldNameParam = null;
|
||||
} else {
|
||||
fieldNameParam = (String) vars.get("s");
|
||||
}
|
||||
|
||||
return new SearchScript() {
|
||||
private Map<String, Object> vars = new HashMap<>(2);
|
||||
|
||||
@Override
|
||||
public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException {
|
||||
|
||||
final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context);
|
||||
|
||||
return new LeafSearchScript() {
|
||||
|
||||
@Override
|
||||
public Object unwrap(Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNextVar(String name, Object value) {
|
||||
vars.put(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object run() {
|
||||
String fieldName = (fieldNameParam != null) ? fieldNameParam : (String) compiledScript.compiled();
|
||||
return leafLookup.doc().get(fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setScorer(Scorer scorer) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setSource(Map<String, Object> source) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int doc) {
|
||||
if (leafLookup != null) {
|
||||
leafLookup.setDocument(doc);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long runAsLong() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public float runAsFloat() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double runAsDouble() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
}
|
||||
}
|
|
@ -41,7 +41,7 @@ import org.elasticsearch.common.Priority;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.geo.builders.LineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.MultiPolygonBuilder;
|
||||
import org.elasticsearch.common.geo.builders.PolygonBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
||||
|
@ -129,17 +129,17 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// polygon with hole
|
||||
ShapeBuilders.newPolygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.hole(new LineStringBuilder()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
.close().close().build();
|
||||
.close()).close().build();
|
||||
|
||||
try {
|
||||
// polygon with overlapping hole
|
||||
ShapeBuilders.newPolygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.hole(new LineStringBuilder()
|
||||
.point(-5, -5).point(-5, 11).point(5, 11).point(5, -5)
|
||||
.close().close().build();
|
||||
.close()).close().build();
|
||||
|
||||
fail("Self intersection not detected");
|
||||
} catch (InvalidShapeException e) {
|
||||
|
@ -149,12 +149,12 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// polygon with intersection holes
|
||||
ShapeBuilders.newPolygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.hole(new LineStringBuilder()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
.close()
|
||||
.hole()
|
||||
.close())
|
||||
.hole(new LineStringBuilder()
|
||||
.point(-5, -6).point(5, -6).point(5, -4).point(-5, -4)
|
||||
.close()
|
||||
.close())
|
||||
.close().build();
|
||||
fail("Intersection of holes not detected");
|
||||
} catch (InvalidShapeException e) {
|
||||
|
@ -175,52 +175,27 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
} catch (InvalidShapeException e) {
|
||||
}
|
||||
|
||||
// Not specified
|
||||
// try {
|
||||
// // two overlapping polygons within a multipolygon
|
||||
// ShapeBuilder.newMultiPolygon()
|
||||
// .polygon()
|
||||
// .point(-10, -10)
|
||||
// .point(-10, 10)
|
||||
// .point(10, 10)
|
||||
// .point(10, -10)
|
||||
// .close()
|
||||
// .polygon()
|
||||
// .point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
// .close().build();
|
||||
// fail("Polygon intersection not detected";
|
||||
// } catch (InvalidShapeException e) {}
|
||||
|
||||
// Multipolygon: polygon with hole and polygon within the whole
|
||||
ShapeBuilders.newMultiPolygon()
|
||||
.polygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
.close()
|
||||
.close()
|
||||
.polygon()
|
||||
.point(-4, -4).point(-4, 4).point(4, 4).point(4, -4)
|
||||
.close()
|
||||
ShapeBuilders
|
||||
.newMultiPolygon()
|
||||
.polygon(new PolygonBuilder()
|
||||
.point(-10, -10)
|
||||
.point(-10, 10)
|
||||
.point(10, 10)
|
||||
.point(10, -10)
|
||||
.hole(new LineStringBuilder().point(-5, -5)
|
||||
.point(-5, 5)
|
||||
.point(5, 5)
|
||||
.point(5, -5)
|
||||
.close())
|
||||
.close())
|
||||
.polygon(new PolygonBuilder()
|
||||
.point(-4, -4)
|
||||
.point(-4, 4)
|
||||
.point(4, 4)
|
||||
.point(4, -4)
|
||||
.close())
|
||||
.build();
|
||||
|
||||
// Not supported
|
||||
// try {
|
||||
// // Multipolygon: polygon with hole and polygon within the hole but overlapping
|
||||
// ShapeBuilder.newMultiPolygon()
|
||||
// .polygon()
|
||||
// .point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
// .hole()
|
||||
// .point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
// .close()
|
||||
// .close()
|
||||
// .polygon()
|
||||
// .point(-4, -4).point(-4, 6).point(4, 6).point(4, -4)
|
||||
// .close()
|
||||
// .build();
|
||||
// fail("Polygon intersection not detected";
|
||||
// } catch (InvalidShapeException e) {}
|
||||
|
||||
}
|
||||
|
||||
public void testShapeRelations() throws Exception {
|
||||
|
@ -248,15 +223,13 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// with a hole of size 5x5 equidistant from all sides. This hole in turn contains
|
||||
// the second polygon of size 4x4 equidistant from all sites
|
||||
MultiPolygonBuilder polygon = ShapeBuilders.newMultiPolygon()
|
||||
.polygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
.close()
|
||||
.close()
|
||||
.polygon()
|
||||
.point(-4, -4).point(-4, 4).point(4, 4).point(4, -4)
|
||||
.close();
|
||||
.polygon(new PolygonBuilder()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole(new LineStringBuilder()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5).close())
|
||||
.close())
|
||||
.polygon(new PolygonBuilder()
|
||||
.point(-4, -4).point(-4, 4).point(4, 4).point(4, -4).close());
|
||||
|
||||
BytesReference data = jsonBuilder().startObject().field("area", polygon).endObject().bytes();
|
||||
|
||||
|
@ -318,9 +291,8 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// Create a polygon that fills the empty area of the polygon defined above
|
||||
PolygonBuilder inverse = ShapeBuilders.newPolygon()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
.hole()
|
||||
.point(-4, -4).point(-4, 4).point(4, 4).point(4, -4)
|
||||
.close()
|
||||
.hole(new LineStringBuilder()
|
||||
.point(-4, -4).point(-4, 4).point(4, 4).point(4, -4).close())
|
||||
.close();
|
||||
|
||||
data = jsonBuilder().startObject().field("area", inverse).endObject().bytes();
|
||||
|
@ -338,9 +310,8 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// Create Polygon with hole and common edge
|
||||
PolygonBuilder builder = ShapeBuilders.newPolygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.point(-5, -5).point(-5, 5).point(10, 5).point(10, -5)
|
||||
.close()
|
||||
.hole(new LineStringBuilder()
|
||||
.point(-5, -5).point(-5, 5).point(10, 5).point(10, -5).close())
|
||||
.close();
|
||||
|
||||
if (withinSupport) {
|
||||
|
@ -367,7 +338,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
// Create a polygon crossing longitude 180 with hole.
|
||||
builder = ShapeBuilders.newPolygon()
|
||||
.point(170, -10).point(190, -10).point(190, 10).point(170, 10)
|
||||
.hole().point(175, -5).point(185, -5).point(185, 5).point(175, 5).close()
|
||||
.hole(new LineStringBuilder().point(175, -5).point(185, -5).point(185, 5).point(175, 5).close())
|
||||
.close();
|
||||
|
||||
data = jsonBuilder().startObject().field("area", builder).endObject().bytes();
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.action.search.SearchResponse;
|
|||
import org.elasticsearch.common.geo.ShapeRelation;
|
||||
import org.elasticsearch.common.geo.builders.EnvelopeBuilder;
|
||||
import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder;
|
||||
import org.elasticsearch.common.geo.builders.LineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilders;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -193,7 +194,7 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase {
|
|||
public void testReusableBuilder() throws IOException {
|
||||
ShapeBuilder polygon = ShapeBuilders.newPolygon()
|
||||
.point(170, -10).point(190, -10).point(190, 10).point(170, 10)
|
||||
.hole().point(175, -5).point(185, -5).point(185, 5).point(175, 5).close()
|
||||
.hole(new LineStringBuilder().point(175, -5).point(185, -5).point(185, 5).point(175, 5).close())
|
||||
.close();
|
||||
assertUnmodified(polygon);
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.action.search.SearchResponse;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.search.internal.DefaultSearchContext;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
|
@ -41,6 +42,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
|||
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
@ -104,6 +106,57 @@ public class SimpleSearchIT extends ESIntegTestCase {
|
|||
assertHitCount(search, 1l);
|
||||
}
|
||||
|
||||
public void testIpCIDR() throws Exception {
|
||||
createIndex("test");
|
||||
|
||||
client().admin().indices().preparePutMapping("test").setType("type1")
|
||||
.setSource(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
||||
.startObject("ip").field("type", "ip").endObject()
|
||||
.endObject().endObject().endObject())
|
||||
.execute().actionGet();
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1", "1").setSource("ip", "192.168.0.1").execute().actionGet();
|
||||
client().prepareIndex("test", "type1", "2").setSource("ip", "192.168.0.2").execute().actionGet();
|
||||
client().prepareIndex("test", "type1", "3").setSource("ip", "192.168.0.3").execute().actionGet();
|
||||
client().prepareIndex("test", "type1", "4").setSource("ip", "192.168.1.4").execute().actionGet();
|
||||
refresh();
|
||||
|
||||
SearchResponse search = client().prepareSearch()
|
||||
.setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.1/32")))
|
||||
.execute().actionGet();
|
||||
assertHitCount(search, 1l);
|
||||
|
||||
search = client().prepareSearch()
|
||||
.setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.1/24")))
|
||||
.execute().actionGet();
|
||||
assertHitCount(search, 3l);
|
||||
|
||||
search = client().prepareSearch()
|
||||
.setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.1/8")))
|
||||
.execute().actionGet();
|
||||
assertHitCount(search, 4l);
|
||||
|
||||
search = client().prepareSearch()
|
||||
.setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.1.1/24")))
|
||||
.execute().actionGet();
|
||||
assertHitCount(search, 1l);
|
||||
|
||||
search = client().prepareSearch()
|
||||
.setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "0.0.0.0/0")))
|
||||
.execute().actionGet();
|
||||
assertHitCount(search, 4l);
|
||||
|
||||
search = client().prepareSearch()
|
||||
.setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.1.5/32")))
|
||||
.execute().actionGet();
|
||||
assertHitCount(search, 0l);
|
||||
|
||||
assertFailures(client().prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "0/0/0/0/0"))),
|
||||
RestStatus.BAD_REQUEST,
|
||||
containsString("not a valid ip address"));
|
||||
}
|
||||
|
||||
public void testSimpleId() {
|
||||
createIndex("test");
|
||||
|
||||
|
|
|
@ -31,7 +31,6 @@ import com.vividsolutions.jts.geom.Coordinate;
|
|||
import com.vividsolutions.jts.geom.Geometry;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.geo.builders.BaseLineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder;
|
||||
import org.elasticsearch.common.geo.builders.LineStringBuilder;
|
||||
import org.elasticsearch.common.geo.builders.MultiLineStringBuilder;
|
||||
|
@ -198,7 +197,7 @@ public class RandomShapeGenerator extends RandomGeoGenerator {
|
|||
case MULTILINESTRING:
|
||||
MultiLineStringBuilder mlsb = new MultiLineStringBuilder();
|
||||
for (int i=0; i<RandomInts.randomIntBetween(r, 1, 10); ++i) {
|
||||
mlsb.linestring((BaseLineStringBuilder) createShape(r, nearPoint, within, ShapeType.LINESTRING, false));
|
||||
mlsb.linestring((LineStringBuilder) createShape(r, nearPoint, within, ShapeType.LINESTRING, false));
|
||||
}
|
||||
return mlsb;
|
||||
case POLYGON:
|
||||
|
|
|
@ -72,27 +72,27 @@ sub dump_issues {
|
|||
$month++;
|
||||
$year += 1900;
|
||||
|
||||
print <<"HTML";
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
</head>
|
||||
<body>
|
||||
HTML
|
||||
print <<"ASCIIDOC";
|
||||
:issue: https://github.com/${User_Repo}issues/
|
||||
:pull: https://github.com/${User_Repo}pull/
|
||||
|
||||
[[release-notes-$version]]
|
||||
== $version Release Notes
|
||||
|
||||
ASCIIDOC
|
||||
|
||||
for my $group ( @Groups, 'other' ) {
|
||||
my $group_issues = $issues->{$group} or next;
|
||||
print "<h2>$Group_Labels{$group}</h2>\n\n<ul>\n";
|
||||
print "[[$group-$version]]\n"
|
||||
. "[float]\n"
|
||||
. "=== $Group_Labels{$group}\n\n";
|
||||
|
||||
for my $header ( sort keys %$group_issues ) {
|
||||
my $header_issues = $group_issues->{$header};
|
||||
my $prefix = "<li>";
|
||||
if ($header) {
|
||||
print "<li>$header:<ul>";
|
||||
}
|
||||
print( $header || 'HEADER MISSING', "::\n" );
|
||||
|
||||
for my $issue (@$header_issues) {
|
||||
my $title = $issue->{title};
|
||||
$title =~ s{`([^`]+)`}{<code>$1</code>}g;
|
||||
|
||||
if ( $issue->{state} eq 'open' ) {
|
||||
$title .= " [OPEN]";
|
||||
|
@ -102,30 +102,23 @@ HTML
|
|||
}
|
||||
my $number = $issue->{number};
|
||||
|
||||
print encode_utf8( $prefix
|
||||
. $title
|
||||
. qq[ <a href="${Issue_URL}${number}">#${number}</a>] );
|
||||
print encode_utf8("* $title {pull}${number}[#${number}]");
|
||||
|
||||
if ( my $related = $issue->{related_issues} ) {
|
||||
my %uniq = map { $_ => 1 } @$related;
|
||||
print keys %uniq > 1
|
||||
? " (issues: "
|
||||
: " (issue: ";
|
||||
print join ", ",
|
||||
map {qq[<a href="${Issue_URL}${_}">#${_}</a>]}
|
||||
print join ", ", map {"{issue}${_}[#${_}]"}
|
||||
sort keys %uniq;
|
||||
print ")";
|
||||
}
|
||||
print "</li>\n";
|
||||
}
|
||||
if ($header) {
|
||||
print "</ul></li>\n";
|
||||
print "\n";
|
||||
}
|
||||
print "\n";
|
||||
}
|
||||
print "</ul>";
|
||||
print "\n\n";
|
||||
}
|
||||
print "</body></html>\n";
|
||||
}
|
||||
|
||||
#===================================
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
|
||||
import org.apache.tools.ant.filters.FixCrLfFilter
|
||||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
import org.elasticsearch.gradle.precommit.DependencyLicensesTask
|
||||
import org.elasticsearch.gradle.test.RunTask
|
||||
import org.elasticsearch.gradle.EmptyDirTask
|
||||
|
@ -142,6 +143,7 @@ configure(subprojects.findAll { it.name == 'zip' || it.name == 'tar' }) {
|
|||
* MavenFilteringHack or any other copy-style action.
|
||||
*/
|
||||
configure(subprojects.findAll { it.name == 'deb' || it.name == 'rpm' }) {
|
||||
integTest.enabled = Os.isFamily(Os.FAMILY_WINDOWS) == false
|
||||
File packagingFiles = new File(buildDir, 'packaging')
|
||||
project.ext.packagingFiles = packagingFiles
|
||||
task processPackagingFiles(type: Copy) {
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
:jdk: 1.8.0_25
|
||||
:defguide: https://www.elastic.co/guide/en/elasticsearch/guide/current
|
||||
:plugins: https://www.elastic.co/guide/en/elasticsearch/plugins/master
|
||||
:issue: https://github.com/elastic/elasticsearch/issues
|
||||
:pull: https://github.com/elastic/elasticsearch/pull
|
||||
|
||||
include::getting-started.asciidoc[]
|
||||
|
||||
|
@ -42,6 +44,10 @@ include::testing.asciidoc[]
|
|||
|
||||
include::glossary.asciidoc[]
|
||||
|
||||
//////////////////////////////////////////
|
||||
include::release-notes.asciidoc[]
|
||||
//////////////////////////////////////////
|
||||
|
||||
include::redirects.asciidoc[]
|
||||
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
[[token-count]]
|
||||
=== Token count datatype
|
||||
|
||||
A field of type `token_count` is really an <<number,`integer>> field which
|
||||
A field of type `token_count` is really an <<number,`integer`>> field which
|
||||
accepts string values, analyzes them, then indexes the number of tokens in the
|
||||
string.
|
||||
|
||||
|
|
|
@ -6,11 +6,7 @@ Returns documents that have at least one non-`null` value in the original field:
|
|||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"constant_score" : {
|
||||
"filter" : {
|
||||
"exists" : { "field" : "user" }
|
||||
}
|
||||
}
|
||||
"exists" : { "field" : "user" }
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
[[es-release-notes]]
|
||||
= Release Notes
|
||||
|
||||
[partintro]
|
||||
--
|
||||
This section will summarize the changes in released versions.
|
||||
--
|
|
@ -37,7 +37,6 @@ import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; // ja
|
|||
* final content char.
|
||||
* <p>
|
||||
*
|
||||
* @lucene.experimental
|
||||
* @deprecated Implement {@link TermToBytesRefAttribute} and store bytes directly
|
||||
* instead. This class WAS removed in Lucene 5.0
|
||||
*/
|
||||
|
|
|
@ -444,33 +444,6 @@ public class ExtendedStatsTests extends AbstractNumericTestCase {
|
|||
checkUpperLowerBounds(stats, sigma);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
double sigma = randomDouble() * randomIntBetween(1, 10);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
extendedStats("stats").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)).sigma(sigma))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
assertThat(stats.getName(), equalTo("stats"));
|
||||
assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11) / 10));
|
||||
assertThat(stats.getMin(), equalTo(2.0));
|
||||
assertThat(stats.getMax(), equalTo(11.0));
|
||||
assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11));
|
||||
assertThat(stats.getCount(), equalTo(10l));
|
||||
assertThat(stats.getSumOfSquares(), equalTo((double) 4+9+16+25+36+49+64+81+100+121));
|
||||
assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11)));
|
||||
assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11)));
|
||||
checkUpperLowerBounds(stats, sigma);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
double sigma = randomDouble() * randomIntBetween(1, 10);
|
||||
|
@ -495,32 +468,6 @@ public class ExtendedStatsTests extends AbstractNumericTestCase {
|
|||
checkUpperLowerBounds(stats, sigma);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
double sigma = randomDouble() * randomIntBetween(1, 10);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(extendedStats("stats").script(new Script("doc['values'].values")).sigma(sigma))
|
||||
.execute().actionGet();
|
||||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
ExtendedStats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
assertThat(stats.getName(), equalTo("stats"));
|
||||
assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12) / 20));
|
||||
assertThat(stats.getMin(), equalTo(2.0));
|
||||
assertThat(stats.getMax(), equalTo(12.0));
|
||||
assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12));
|
||||
assertThat(stats.getCount(), equalTo(20l));
|
||||
assertThat(stats.getSumOfSquares(), equalTo((double) 4+9+16+25+36+49+64+81+100+121+9+16+25+36+49+64+81+100+121+144));
|
||||
assertThat(stats.getVariance(), equalTo(variance(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 12)));
|
||||
assertThat(stats.getStdDeviation(), equalTo(stdDev(2, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 3, 4, 5, 6, 7, 8 ,9, 10, 11, 12)));
|
||||
checkUpperLowerBounds(stats, sigma);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
|
|
|
@ -380,26 +380,6 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase {
|
|||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
int sigDigits = randomSignificantDigits();
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("dec", 1);
|
||||
final double[] pcts = randomPercents(minValue - 1, maxValue - 1);
|
||||
SearchResponse searchResponse = client()
|
||||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits)
|
||||
.script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)).percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
int sigDigits = randomSignificantDigits();
|
||||
|
@ -417,23 +397,6 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase {
|
|||
assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
int sigDigits = randomSignificantDigits();
|
||||
final double[] pcts = randomPercents(minValues, maxValues);
|
||||
SearchResponse searchResponse = client()
|
||||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits)
|
||||
.script(new Script("doc['values'].values")).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
int sigDigits = randomSignificantDigits();
|
||||
|
|
|
@ -370,26 +370,6 @@ public class HDRPercentilesTests extends AbstractNumericTestCase {
|
|||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("dec", 1);
|
||||
final double[] pcts = randomPercentiles();
|
||||
int sigDigits = randomSignificantDigits();
|
||||
SearchResponse searchResponse = client()
|
||||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR)
|
||||
.script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)).percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1, sigDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
final double[] pcts = randomPercentiles();
|
||||
|
@ -407,23 +387,6 @@ public class HDRPercentilesTests extends AbstractNumericTestCase {
|
|||
assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
final double[] pcts = randomPercentiles();
|
||||
int sigDigits = randomSignificantDigits();
|
||||
SearchResponse searchResponse = client()
|
||||
.prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR)
|
||||
.script(new Script("doc['values'].values")).percentiles(pcts)).execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues, sigDigits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
|
|
|
@ -262,23 +262,6 @@ public class MaxTests extends AbstractNumericTestCase {
|
|||
assertThat(max.getValue(), equalTo(11.0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(max("max").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
assertThat(max.getName(), equalTo("max"));
|
||||
assertThat(max.getValue(), equalTo(11.0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
|
@ -294,21 +277,6 @@ public class MaxTests extends AbstractNumericTestCase {
|
|||
assertThat(max.getValue(), equalTo(12.0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(max("max").script(new Script("doc['values'].values")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Max max = searchResponse.getAggregations().get("max");
|
||||
assertThat(max, notNullValue());
|
||||
assertThat(max.getName(), equalTo("max"));
|
||||
assertThat(max.getValue(), equalTo(12.0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
|
|
|
@ -272,22 +272,6 @@ public class MinTests extends AbstractNumericTestCase {
|
|||
assertThat(min.getValue(), equalTo(0.0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("dec", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(min("min").script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params))).execute()
|
||||
.actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
assertThat(min.getName(), equalTo("min"));
|
||||
assertThat(min.getValue(), equalTo(0.0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
|
@ -301,19 +285,6 @@ public class MinTests extends AbstractNumericTestCase {
|
|||
assertThat(min.getValue(), equalTo(2.0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
|
||||
.addAggregation(min("min").script(new Script("doc['values'].values"))).execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Min min = searchResponse.getAggregations().get("min");
|
||||
assertThat(min, notNullValue());
|
||||
assertThat(min.getName(), equalTo("min"));
|
||||
assertThat(min.getValue(), equalTo(2.0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
|
|
|
@ -1,61 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.scriptQuery;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
*/
|
||||
@ESIntegTestCase.ClusterScope(scope= ESIntegTestCase.Scope.SUITE)
|
||||
public class SearchTimeoutTests extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
return Settings.settingsBuilder().put(super.nodeSettings(nodeOrdinal)).build();
|
||||
}
|
||||
|
||||
public void testSimpleTimeout() throws Exception {
|
||||
client().prepareIndex("test", "type", "1").setSource("field", "value").setRefresh(true).execute().actionGet();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS))
|
||||
.setQuery(scriptQuery(new Script("Thread.sleep(500); return true;")))
|
||||
.execute().actionGet();
|
||||
assertThat(searchResponse.isTimedOut(), equalTo(true));
|
||||
}
|
||||
}
|
|
@ -353,29 +353,6 @@ public class StatsTests extends AbstractNumericTestCase {
|
|||
assertThat(stats.getCount(), equalTo(10l));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("inc", 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(stats("stats").script(new Script("doc['value'].value + inc", ScriptType.INLINE, null, params)))
|
||||
.execute().actionGet();
|
||||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
assertThat(stats.getName(), equalTo("stats"));
|
||||
assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11) / 10));
|
||||
assertThat(stats.getMin(), equalTo(2.0));
|
||||
assertThat(stats.getMax(), equalTo(11.0));
|
||||
assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11));
|
||||
assertThat(stats.getCount(), equalTo(10l));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
|
@ -397,27 +374,6 @@ public class StatsTests extends AbstractNumericTestCase {
|
|||
assertThat(stats.getCount(), equalTo(20l));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(stats("stats").script(new Script("doc['values'].values")))
|
||||
.execute().actionGet();
|
||||
|
||||
assertShardExecutionState(searchResponse, 0);
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
Stats stats = searchResponse.getAggregations().get("stats");
|
||||
assertThat(stats, notNullValue());
|
||||
assertThat(stats.getName(), equalTo("stats"));
|
||||
assertThat(stats.getAvg(), equalTo((double) (2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12) / 20));
|
||||
assertThat(stats.getMin(), equalTo(2.0));
|
||||
assertThat(stats.getMax(), equalTo(12.0));
|
||||
assertThat(stats.getSum(), equalTo((double) 2+3+4+5+6+7+8+9+10+11+3+4+5+6+7+8+9+10+11+12));
|
||||
assertThat(stats.getCount(), equalTo(20l));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
|
|
|
@ -363,25 +363,6 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase {
|
|||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("dec", 1);
|
||||
final double[] pcts = randomPercents(minValue -1 , maxValue - 1);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(randomCompression(percentileRanks("percentile_ranks"))
|
||||
.script(
|
||||
new Script("doc['value'].value - dec", ScriptType.INLINE, null, params))
|
||||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
final double[] pcts = randomPercents(minValues, maxValues);
|
||||
|
@ -398,22 +379,6 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase {
|
|||
assertConsistent(pcts, percentiles, minValues, maxValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
final double[] pcts = randomPercents(minValues, maxValues);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(randomCompression(percentileRanks("percentile_ranks"))
|
||||
.script(new Script("doc['values'].values"))
|
||||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final PercentileRanks percentiles = searchResponse.getAggregations().get("percentile_ranks");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
|
|
|
@ -347,25 +347,6 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase {
|
|||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitSingleValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("dec", 1);
|
||||
final double[] pcts = randomPercentiles();
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(randomCompression(percentiles("percentiles"))
|
||||
.script(
|
||||
new Script("doc['value'].value - dec", ScriptType.INLINE, null, params))
|
||||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
final double[] pcts = randomPercentiles();
|
||||
|
@ -382,22 +363,6 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase {
|
|||
assertConsistent(pcts, percentiles, minValues, maxValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptExplicitMultiValued() throws Exception {
|
||||
final double[] pcts = randomPercentiles();
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(matchAllQuery())
|
||||
.addAggregation(randomCompression(percentiles("percentiles"))
|
||||
.script(new Script("doc['values'].values"))
|
||||
.percentiles(pcts))
|
||||
.execute().actionGet();
|
||||
|
||||
assertHitCount(searchResponse, 10);
|
||||
|
||||
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
|
||||
assertConsistent(pcts, percentiles, minValues, maxValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testScriptMultiValuedWithParams() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
|
|
|
@ -80,7 +80,6 @@
|
|||
renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/TopHitsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TopHitsTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/index/mapper/TransformOnIndexMapperIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TransformOnIndexMapperTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ValueCountTests.java
|
||||
renamed: core/src/main/java/org/elasticsearch/script/groovy/GroovyScriptCompilationException.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java
|
||||
renamed: core/src/test/java/org/elasticsearch/script/GroovyScriptIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java
|
||||
renamed: core/src/test/java/org/elasticsearch/script/GroovySecurityIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovySecurityTests.java
|
||||
|
|
|
@ -94,11 +94,7 @@ public abstract class AbstractNumericTestCase extends ESIntegTestCase {
|
|||
|
||||
public abstract void testScriptSingleValuedWithParams() throws Exception;
|
||||
|
||||
public abstract void testScriptExplicitSingleValuedWithParams() throws Exception;
|
||||
|
||||
public abstract void testScriptMultiValued() throws Exception;
|
||||
|
||||
public abstract void testScriptExplicitMultiValued() throws Exception;
|
||||
|
||||
public abstract void testScriptMultiValuedWithParams() throws Exception;
|
||||
}
|
|
@ -192,7 +192,7 @@ import static org.hamcrest.Matchers.startsWith;
|
|||
* should be used, here is an example:
|
||||
* <pre>
|
||||
*
|
||||
* @ClusterScope(scope=Scope.TEST) public class SomeIT extends ESIntegTestCase {
|
||||
* {@literal @}ClusterScope(scope=Scope.TEST) public class SomeIT extends ESIntegTestCase {
|
||||
* public void testMethod() {}
|
||||
* }
|
||||
* </pre>
|
||||
|
@ -203,7 +203,7 @@ import static org.hamcrest.Matchers.startsWith;
|
|||
* determined at random and can change across tests. The {@link ClusterScope} allows configuring the initial number of nodes
|
||||
* that are created before the tests start.
|
||||
* <pre>
|
||||
* @ClusterScope(scope=Scope.SUITE, numDataNodes=3)
|
||||
* {@literal @}ClusterScope(scope=Scope.SUITE, numDataNodes=3)
|
||||
* public class SomeIT extends ESIntegTestCase {
|
||||
* public void testMethod() {}
|
||||
* }
|
||||
|
|
Loading…
Reference in New Issue