Merge branch 'master' into close-index-api-refactoring

This commit is contained in:
Tanguy Leroux 2018-12-19 09:34:59 +01:00
commit c99fd6a53b
336 changed files with 16566 additions and 4767 deletions

View File

@ -24,7 +24,7 @@ mainClassName = 'org.openjdk.jmh.Main'
assemble.enabled = false
archivesBaseName = 'elasticsearch-benchmarks'
test.enabled = false
unitTest.enabled = false
dependencies {
compile("org.elasticsearch:elasticsearch:${version}") {

View File

@ -163,8 +163,8 @@ task verifyVersions {
* the enabled state of every bwc task. It should be set back to true
* after the backport of the backcompat code is complete.
*/
final boolean bwc_tests_enabled = false
final String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/36555" /* place a PR link here when committing bwc changes */
final boolean bwc_tests_enabled = true
final String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */
if (bwc_tests_enabled == false) {
if (bwc_tests_disabled_issue.isEmpty()) {
throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false")

View File

@ -179,9 +179,7 @@ if (project != rootProject) {
jarHell.enabled = false
thirdPartyAudit.enabled = false
test {
include "**/*Tests.class"
exclude "**/*IT.class"
unitTest {
// The test task is configured to runtimeJava version, but build-tools doesn't support all of them, so test
// with compiler instead on the ones that are too old.
if (project.runtimeJavaVersion <= JavaVersion.VERSION_1_10) {

View File

@ -4,30 +4,14 @@ import com.carrotsearch.ant.tasks.junit4.JUnit4
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.api.Task
import org.gradle.api.UnknownTaskException
import org.gradle.api.plugins.JavaBasePlugin
import org.gradle.api.tasks.TaskContainer
import org.gradle.api.tasks.TaskProvider
import org.gradle.api.tasks.testing.Test
class RandomizedTestingPlugin implements Plugin<Project> {
void apply(Project project) {
setupSeed(project)
replaceTestTask(project.tasks)
createUnitTestTask(project.tasks)
configureAnt(project.ant)
configureSanityCheck(project)
}
private static void configureSanityCheck(Project project) {
// Check the task graph to confirm tasks were indeed replaced
// https://github.com/elastic/elasticsearch/issues/31324
project.rootProject.getGradle().getTaskGraph().whenReady {
Task test = project.getTasks().findByName("test")
if (test != null && (test instanceof RandomizedTestingTask) == false) {
throw new IllegalStateException("Test task was not replaced in project ${project.path}. Found ${test.getClass()}")
}
}
}
/**
@ -57,35 +41,15 @@ class RandomizedTestingPlugin implements Plugin<Project> {
}
}
static void replaceTestTask(TaskContainer tasks) {
// Gradle 4.8 introduced lazy tasks, thus we deal both with the `test` task as well as it's provider
// https://github.com/gradle/gradle/issues/5730#issuecomment-398822153
// since we can't be sure if the task was ever realized, we remove both the provider and the task
TaskProvider<Test> oldTestProvider
try {
oldTestProvider = tasks.named('test')
} catch (UnknownTaskException unused) {
// no test task, ok, user will use testing task on their own
return
static void createUnitTestTask(TaskContainer tasks) {
// only create a unitTest task if the `test` task exists as some project don't make use of it.
tasks.matching { it.name == "test" }.all {
// We don't want to run any tests with the Gradle test runner since we add our own randomized runner
it.enabled = false
RandomizedTestingTask unitTest = tasks.create('unitTest', RandomizedTestingTask)
unitTest.description = 'Runs unit tests with the randomized testing framework'
it.dependsOn unitTest
}
Test oldTestTask = oldTestProvider.get()
// we still have to use replace here despite the remove above because the task container knows about the provider
// by the same name
RandomizedTestingTask newTestTask = tasks.replace('test', RandomizedTestingTask)
newTestTask.configure{
group = JavaBasePlugin.VERIFICATION_GROUP
description = 'Runs unit tests with the randomized testing framework'
dependsOn oldTestTask.dependsOn, 'testClasses'
classpath = oldTestTask.classpath
testClassesDirs = oldTestTask.project.sourceSets.test.output.classesDirs
}
// hack so check task depends on custom test
Task checkTask = tasks.getByName('check')
checkTask.dependsOn.remove(oldTestProvider)
checkTask.dependsOn.remove(oldTestTask)
checkTask.dependsOn.add(newTestTask)
}
static void configureAnt(AntBuilder ant) {

View File

@ -40,6 +40,7 @@ import org.gradle.api.artifacts.ProjectDependency
import org.gradle.api.artifacts.ResolvedArtifact
import org.gradle.api.artifacts.dsl.RepositoryHandler
import org.gradle.api.execution.TaskExecutionGraph
import org.gradle.api.plugins.JavaBasePlugin
import org.gradle.api.plugins.JavaPlugin
import org.gradle.api.publish.maven.MavenPublication
import org.gradle.api.publish.maven.plugins.MavenPublishPlugin
@ -888,15 +889,22 @@ class BuildPlugin implements Plugin<Project> {
parallelism System.getProperty('tests.jvms', project.rootProject.ext.defaultParallel)
onNonEmptyWorkDirectory 'wipe'
leaveTemporary true
project.sourceSets.matching { it.name == "test" }.all { test ->
task.testClassesDirs = test.output.classesDirs
task.classpath = test.runtimeClasspath
}
group = JavaBasePlugin.VERIFICATION_GROUP
dependsOn 'testClasses'
// Make sure all test tasks are configured properly
if (name != "test") {
project.tasks.matching { it.name == "test"}.all { testTask ->
task.testClassesDirs = testTask.testClassesDirs
task.classpath = testTask.classpath
task.shouldRunAfter testTask
}
}
if (name == "unitTest") {
include("**/*Tests.class")
}
// TODO: why are we not passing maxmemory to junit4?
jvmArg '-Xmx' + System.getProperty('tests.heap.size', '512m')
@ -986,8 +994,6 @@ class BuildPlugin implements Plugin<Project> {
exclude '**/*$*.class'
dependsOn(project.tasks.testClasses)
project.plugins.withType(ShadowPlugin).whenPluginAdded {
// Test against a shadow jar if we made one
classpath -= project.tasks.compileJava.outputs.files

View File

@ -55,8 +55,6 @@ public class RestIntegTestTask extends DefaultTask {
super.dependsOn(runner)
clusterInit = project.tasks.create(name: "${name}Cluster#init", dependsOn: project.testClasses)
runner.dependsOn(clusterInit)
runner.classpath = project.sourceSets.test.runtimeClasspath
runner.testClassesDirs = project.sourceSets.test.output.classesDirs
clusterConfig = project.extensions.create("${name}Cluster", ClusterConfiguration.class, project)
// override/add more for rest tests

View File

@ -48,7 +48,6 @@
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]GcNames.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]HotThreads.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]node[/\\]Node.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginsService.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]RepositoriesService.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]Repository.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]repositories[/\\]VerifyNodeRepositoryAction.java" checks="LineLength" />
@ -63,12 +62,10 @@
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotsService.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]threadpool[/\\]ThreadPool.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]aliases[/\\]IndexAliasesIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]deps[/\\]joda[/\\]SimpleJodaTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]explain[/\\]ExplainActionIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]get[/\\]GetActionIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]indexing[/\\]IndexActionIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]monitor[/\\]jvm[/\\]JvmGcMonitorServiceSettingsTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]plugins[/\\]PluginsServiceTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]BytesRestResponseTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]routing[/\\]AliasRoutingIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]routing[/\\]SimpleRoutingIT.java" checks="LineLength" />

View File

@ -147,3 +147,15 @@ org.apache.logging.log4j.Logger#error(java.lang.Object)
org.apache.logging.log4j.Logger#error(java.lang.Object, java.lang.Throwable)
org.apache.logging.log4j.Logger#fatal(java.lang.Object)
org.apache.logging.log4j.Logger#fatal(java.lang.Object, java.lang.Throwable)
# Remove once Lucene 7.7 is integrated
@defaultMessage Use org.apache.lucene.document.XLatLonShape classes instead
org.apache.lucene.document.LatLonShape
org.apache.lucene.document.LatLonShapeBoundingBoxQuery
org.apache.lucene.document.LatLonShapeLineQuery
org.apache.lucene.document.LatLonShapePolygonQuery
org.apache.lucene.document.LatLonShapeQuery
org.apache.lucene.geo.Rectangle2D @ use @org.apache.lucene.geo.XRectangle2D instead
org.apache.lucene.geo.Tessellator @ use @org.apache.lucene.geo.XTessellator instead

View File

@ -27,7 +27,7 @@ forbiddenApisTest.enabled = false
// requires dependency on testing fw
jarHell.enabled = false
// we don't have tests for now
test.enabled = false
unitTest.enabled = false
task hello {
doFirst {

View File

@ -29,7 +29,7 @@ archivesBaseName = 'client-benchmarks'
mainClassName = 'org.elasticsearch.client.benchmark.BenchmarkMain'
// never try to invoke tests on the benchmark project - there aren't any
test.enabled = false
unitTest.enabled = false
dependencies {
compile 'org.apache.commons:commons-math3:3.2'

View File

@ -36,5 +36,5 @@ dependenciesInfo.enabled = false
compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked"
// no unit tests
test.enabled = false
unitTest.enabled = false
integTest.enabled = false

View File

@ -59,7 +59,6 @@ public class Job implements ToXContentObject {
public static final ParseField DATA_DESCRIPTION = new ParseField("data_description");
public static final ParseField DESCRIPTION = new ParseField("description");
public static final ParseField FINISHED_TIME = new ParseField("finished_time");
public static final ParseField ESTABLISHED_MODEL_MEMORY = new ParseField("established_model_memory");
public static final ParseField MODEL_PLOT_CONFIG = new ParseField("model_plot_config");
public static final ParseField RENORMALIZATION_WINDOW_DAYS = new ParseField("renormalization_window_days");
public static final ParseField BACKGROUND_PERSIST_INTERVAL = new ParseField("background_persist_interval");
@ -84,7 +83,6 @@ public class Job implements ToXContentObject {
(p) -> TimeUtil.parseTimeField(p, FINISHED_TIME.getPreferredName()),
FINISHED_TIME,
ValueType.VALUE);
PARSER.declareLong(Builder::setEstablishedModelMemory, ESTABLISHED_MODEL_MEMORY);
PARSER.declareObject(Builder::setAnalysisConfig, AnalysisConfig.PARSER, ANALYSIS_CONFIG);
PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, ANALYSIS_LIMITS);
PARSER.declareObject(Builder::setDataDescription, DataDescription.PARSER, DATA_DESCRIPTION);
@ -107,7 +105,6 @@ public class Job implements ToXContentObject {
private final String description;
private final Date createTime;
private final Date finishedTime;
private final Long establishedModelMemory;
private final AnalysisConfig analysisConfig;
private final AnalysisLimits analysisLimits;
private final DataDescription dataDescription;
@ -122,7 +119,7 @@ public class Job implements ToXContentObject {
private final Boolean deleting;
private Job(String jobId, String jobType, List<String> groups, String description,
Date createTime, Date finishedTime, Long establishedModelMemory,
Date createTime, Date finishedTime,
AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription,
ModelPlotConfig modelPlotConfig, Long renormalizationWindowDays, TimeValue backgroundPersistInterval,
Long modelSnapshotRetentionDays, Long resultsRetentionDays, Map<String, Object> customSettings,
@ -134,7 +131,6 @@ public class Job implements ToXContentObject {
this.description = description;
this.createTime = createTime;
this.finishedTime = finishedTime;
this.establishedModelMemory = establishedModelMemory;
this.analysisConfig = analysisConfig;
this.analysisLimits = analysisLimits;
this.dataDescription = dataDescription;
@ -204,16 +200,6 @@ public class Job implements ToXContentObject {
return finishedTime;
}
/**
* The established model memory of the job, or <code>null</code> if model
* memory has not reached equilibrium yet.
*
* @return The established model memory of the job
*/
public Long getEstablishedModelMemory() {
return establishedModelMemory;
}
/**
* The analysis configuration object
*
@ -306,9 +292,6 @@ public class Job implements ToXContentObject {
builder.timeField(FINISHED_TIME.getPreferredName(), FINISHED_TIME.getPreferredName() + humanReadableSuffix,
finishedTime.getTime());
}
if (establishedModelMemory != null) {
builder.field(ESTABLISHED_MODEL_MEMORY.getPreferredName(), establishedModelMemory);
}
builder.field(ANALYSIS_CONFIG.getPreferredName(), analysisConfig, params);
if (analysisLimits != null) {
builder.field(ANALYSIS_LIMITS.getPreferredName(), analysisLimits, params);
@ -364,7 +347,6 @@ public class Job implements ToXContentObject {
&& Objects.equals(this.description, that.description)
&& Objects.equals(this.createTime, that.createTime)
&& Objects.equals(this.finishedTime, that.finishedTime)
&& Objects.equals(this.establishedModelMemory, that.establishedModelMemory)
&& Objects.equals(this.analysisConfig, that.analysisConfig)
&& Objects.equals(this.analysisLimits, that.analysisLimits)
&& Objects.equals(this.dataDescription, that.dataDescription)
@ -381,7 +363,7 @@ public class Job implements ToXContentObject {
@Override
public int hashCode() {
return Objects.hash(jobId, jobType, groups, description, createTime, finishedTime, establishedModelMemory,
return Objects.hash(jobId, jobType, groups, description, createTime, finishedTime,
analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays,
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings,
modelSnapshotId, resultsIndexName, deleting);
@ -407,7 +389,6 @@ public class Job implements ToXContentObject {
private DataDescription dataDescription;
private Date createTime;
private Date finishedTime;
private Long establishedModelMemory;
private ModelPlotConfig modelPlotConfig;
private Long renormalizationWindowDays;
private TimeValue backgroundPersistInterval;
@ -435,7 +416,6 @@ public class Job implements ToXContentObject {
this.dataDescription = job.getDataDescription();
this.createTime = job.getCreateTime();
this.finishedTime = job.getFinishedTime();
this.establishedModelMemory = job.getEstablishedModelMemory();
this.modelPlotConfig = job.getModelPlotConfig();
this.renormalizationWindowDays = job.getRenormalizationWindowDays();
this.backgroundPersistInterval = job.getBackgroundPersistInterval();
@ -496,11 +476,6 @@ public class Job implements ToXContentObject {
return this;
}
public Builder setEstablishedModelMemory(Long establishedModelMemory) {
this.establishedModelMemory = establishedModelMemory;
return this;
}
public Builder setDataDescription(DataDescription.Builder description) {
dataDescription = Objects.requireNonNull(description, DATA_DESCRIPTION.getPreferredName()).build();
return this;
@ -555,7 +530,7 @@ public class Job implements ToXContentObject {
Objects.requireNonNull(id, "[" + ID.getPreferredName() + "] must not be null");
Objects.requireNonNull(jobType, "[" + JOB_TYPE.getPreferredName() + "] must not be null");
return new Job(
id, jobType, groups, description, createTime, finishedTime, establishedModelMemory,
id, jobType, groups, description, createTime, finishedTime,
analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays,
backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings,
modelSnapshotId, resultsIndexName, deleting);

View File

@ -125,9 +125,6 @@ public class JobTests extends AbstractXContentTestCase<Job> {
if (randomBoolean()) {
builder.setFinishedTime(new Date(randomNonNegativeLong()));
}
if (randomBoolean()) {
builder.setEstablishedModelMemory(randomNonNegativeLong());
}
builder.setAnalysisConfig(AnalysisConfigTests.createRandomized());
builder.setAnalysisLimits(AnalysisLimitsTests.createRandomized());

View File

@ -55,4 +55,4 @@ namingConventions.enabled = false
//we aren't releasing this jar
thirdPartyAudit.enabled = false
test.enabled = false
unitTest.enabled = false

View File

@ -39,7 +39,6 @@ bwcVersions.forPreviousUnreleased { VersionCollection.UnreleasedVersionInfo unre
apply plugin: 'distribution'
// Not published so no need to assemble
assemble.enabled = false
assemble.dependsOn.remove('buildBwcVersion')
File checkoutDir = file("${buildDir}/bwc/checkout-${bwcBranch}")

View File

@ -7,7 +7,7 @@ forbiddenApisMain {
replaceSignatureFiles 'jdk-signatures'
}
test.enabled = false
unitTest.enabled = false
namingConventions.enabled = false
javadoc.enabled = false
loggerUsageCheck.enabled = false

View File

@ -35,7 +35,7 @@ dependencyLicenses {
mapping from: /bc.*/, to: 'bouncycastle'
}
test {
unitTest {
// TODO: find a way to add permissions for the tests in this module
systemProperty 'tests.security.manager', 'false'
}

View File

@ -14,8 +14,9 @@ release-state can be: released | prerelease | unreleased
:release-state: prerelease
:issue: https://github.com/elastic/elasticsearch/issues/
:ml-issue: https://github.com/elastic/ml-cpp/issues/
:pull: https://github.com/elastic/elasticsearch/pull/
:ml-pull: https://github.com/elastic/ml-cpp/pull/
:docker-repo: docker.elastic.co/elasticsearch/elasticsearch
:docker-image: {docker-repo}:{version}
:plugin_url: https://artifacts.elastic.co/downloads/elasticsearch-plugins

View File

@ -430,8 +430,8 @@ include-tagged::{doc-tests-file}[{api}-request-profiling-queries-results]
<3> Retrieve the time in millis spent executing the Lucene query
<4> Retrieve the profile results for the sub-queries (if any)
The Rest API documentation contains more information about {ref}/_profiling_queries.html[Profiling Queries] with
a description of the {ref}/_profiling_queries.html#_literal_query_literal_section[query profiling information]
The Rest API documentation contains more information about {ref}/search-profile-queries.html[Profiling Queries] with
a description of the query profiling information.
The `QueryProfileShardResult` also gives access to the profiling information for the Lucene collectors:
@ -445,7 +445,7 @@ include-tagged::{doc-tests-file}[{api}-request-profiling-queries-collectors]
<4> Retrieve the profile results for the sub-collectors (if any)
The Rest API documentation contains more information about profiling information
{ref}/_profiling_queries.html#_literal_collectors_literal_section[for Lucene collectors].
for Lucene collectors. See {ref}/search-profile-queries.html[Profiling queries].
In a very similar manner to the query tree execution, the `QueryProfileShardResult` objects gives access
to the detailed aggregations tree execution:

View File

@ -7,8 +7,8 @@ Single terms are still indexed. It can be used as an alternative to the
Token Filter>> when we don't want to completely ignore common terms.
For example, the text "the quick brown is a fox" will be tokenized as
"the", "the_quick", "quick", "brown", "brown_is", "is_a", "a_fox",
"fox". Assuming "the", "is" and "a" are common words.
"the", "the_quick", "quick", "brown", "brown_is", "is", "is_a", "a",
"a_fox", "fox". Assuming "the", "is" and "a" are common words.
When `query_mode` is enabled, the token filter removes common words and
single terms followed by a common word. This parameter should be enabled
@ -45,7 +45,7 @@ PUT /common_grams_example
{
"settings": {
"analysis": {
"my_analyzer": {
"analyzer": {
"index_grams": {
"tokenizer": "whitespace",
"filter": ["common_grams"]

View File

@ -31,7 +31,8 @@ include::{docdir}/rest-api/timeoutparms.asciidoc[]
==== Authorization
include::ilm-cluster-mgt-privilege.asciidoc[]
You must have the `manage_ilm` cluster privilege to use this API.
For more information, see {stack-ov}/security-privileges.html[Security Privileges].
==== Examples

View File

@ -32,7 +32,9 @@ include::{docdir}/rest-api/timeoutparms.asciidoc[]
==== Authorization
include::ilm-index-mgt-privilege.asciidoc[]
You must have the `view_index_metadata` or `manage_ilm` or both privileges on the indices
being managed to use this API.
For more information, see {stack-ov}/security-privileges.html[Security Privileges].
==== Examples

View File

@ -31,7 +31,8 @@ include::{docdir}/rest-api/timeoutparms.asciidoc[]
==== Authorization
include::ilm-cluster-mgt-privilege.asciidoc[]
You must have the `manage_ilm` or `read_ilm` or both cluster privileges to use this API.
For more information, see {stack-ov}/security-privileges.html[Security Privileges].
==== Examples

View File

@ -27,7 +27,8 @@ include::{docdir}/rest-api/timeoutparms.asciidoc[]
==== Authorization
include::ilm-cluster-mgt-privilege.asciidoc[]
You must have the `manage_ilm` or `read_ilm` or both cluster privileges to use this API.
For more information, see {stack-ov}/security-privileges.html[Security Privileges].
==== Examples

View File

@ -1,2 +0,0 @@
You must have the cluster `manage` privilege to use this API.
For more information, see {stack-ov}/security-privileges.html[Security Privileges].

View File

@ -1,2 +0,0 @@
You must have the `manage` privilege on the indices being managed to use this API.
For more information, see {stack-ov}/security-privileges.html[Security Privileges].

View File

@ -40,7 +40,8 @@ include::{docdir}/rest-api/timeoutparms.asciidoc[]
==== Authorization
include::ilm-index-mgt-privilege.asciidoc[]
You must have the `manage_ilm` privileges on the indices being managed to use this API.
For more information, see {stack-ov}/security-privileges.html[Security Privileges].
==== Examples

View File

@ -33,7 +33,11 @@ include::{docdir}/rest-api/timeoutparms.asciidoc[]
==== Authorization
include::ilm-cluster-mgt-privilege.asciidoc[]
You must have the `manage_ilm` cluster privilege to use this API. You must
also have the `manage` index privilege on all indices being managed by `policy`.
All operations executed by {Ilm} for a policy are executed as the user that
put the latest version of a policy.
For more information, see {stack-ov}/security-privileges.html[Security Privileges].
==== Examples

View File

@ -31,7 +31,8 @@ include::{docdir}/rest-api/timeoutparms.asciidoc[]
==== Authorization
include::ilm-cluster-mgt-privilege.asciidoc[]
You must have the `manage_ilm` privileges on the indices being managed to use this API.
For more information, see {stack-ov}/security-privileges.html[Security Privileges].
==== Examples

View File

@ -31,7 +31,8 @@ include::{docdir}/rest-api/timeoutparms.asciidoc[]
==== Authorization
include::ilm-index-mgt-privilege.asciidoc[]
You must have the `manage_ilm` privileges on the indices being managed to use this API.
For more information, see {stack-ov}/security-privileges.html[Security Privileges].
==== Examples

View File

@ -26,7 +26,8 @@ include::{docdir}/rest-api/timeoutparms.asciidoc[]
==== Authorization
include::ilm-cluster-mgt-privilege.asciidoc[]
You must have the `manage_ilm` cluster privilege to use this API.
For more information, see {stack-ov}/security-privileges.html[Security Privileges].
==== Examples

View File

@ -31,7 +31,8 @@ include::{docdir}/rest-api/timeoutparms.asciidoc[]
==== Authorization
include::ilm-cluster-mgt-privilege.asciidoc[]
You must have the `manage_ilm` cluster privilege to use this API.
For more information, see {stack-ov}/security-privileges.html[Security Privileges].
==== Examples

View File

@ -21,48 +21,59 @@ type.
|=======================================================================
|Option |Description| Default
|`tree` |Name of the PrefixTree implementation to be used: `geohash` for
GeohashPrefixTree and `quadtree` for QuadPrefixTree.
| `geohash`
|`tree |deprecated[6.6, PrefixTrees no longer used] Name of the PrefixTree
implementation to be used: `geohash` for GeohashPrefixTree and `quadtree`
for QuadPrefixTree. Note: This parameter is only relevant for `term` and
`recursive` strategies.
| `quadtree`
|`precision` |This parameter may be used instead of `tree_levels` to set
an appropriate value for the `tree_levels` parameter. The value
specifies the desired precision and Elasticsearch will calculate the
best tree_levels value to honor this precision. The value should be a
number followed by an optional distance unit. Valid distance units
include: `in`, `inch`, `yd`, `yard`, `mi`, `miles`, `km`, `kilometers`,
`m`,`meters`, `cm`,`centimeters`, `mm`, `millimeters`.
|`precision` |deprecated[6.6, PrefixTrees no longer used] This parameter may
be used instead of `tree_levels` to set an appropriate value for the
`tree_levels` parameter. The value specifies the desired precision and
Elasticsearch will calculate the best tree_levels value to honor this
precision. The value should be a number followed by an optional distance
unit. Valid distance units include: `in`, `inch`, `yd`, `yard`, `mi`,
`miles`, `km`, `kilometers`, `m`,`meters`, `cm`,`centimeters`, `mm`,
`millimeters`. Note: This parameter is only relevant for `term` and
`recursive` strategies.
| `50m`
|`tree_levels` |Maximum number of layers to be used by the PrefixTree.
This can be used to control the precision of shape representations and
therefore how many terms are indexed. Defaults to the default value of
the chosen PrefixTree implementation. Since this parameter requires a
certain level of understanding of the underlying implementation, users
may use the `precision` parameter instead. However, Elasticsearch only
uses the tree_levels parameter internally and this is what is returned
via the mapping API even if you use the precision parameter.
|`tree_levels` |deprecated[6.6, PrefixTrees no longer used] Maximum number
of layers to be used by the PrefixTree. This can be used to control the
precision of shape representations andtherefore how many terms are
indexed. Defaults to the default value of the chosen PrefixTree
implementation. Since this parameter requires a certain level of
understanding of the underlying implementation, users may use the
`precision` parameter instead. However, Elasticsearch only uses the
tree_levels parameter internally and this is what is returned via the
mapping API even if you use the precision parameter. Note: This parameter
is only relevant for `term` and `recursive` strategies.
| various
|`strategy` |The strategy parameter defines the approach for how to
represent shapes at indexing and search time. It also influences the
capabilities available so it is recommended to let Elasticsearch set
this parameter automatically. There are two strategies available:
`recursive` and `term`. Term strategy supports point types only (the
`points_only` parameter will be automatically set to true) while
Recursive strategy supports all shape types. (IMPORTANT: see
<<prefix-trees, Prefix trees>> for more detailed information)
|`strategy` |deprecated[6.6, PrefixTrees no longer used] The strategy
parameter defines the approach for how to represent shapes at indexing
and search time. It also influences the capabilities available so it
is recommended to let Elasticsearch set this parameter automatically.
There are two strategies available: `recursive`, and `term`.
Recursive and Term strategies are deprecated and will be removed in a
future version. While they are still available, the Term strategy
supports point types only (the `points_only` parameter will be
automatically set to true) while Recursive strategy supports all
shape types. (IMPORTANT: see <<prefix-trees, Prefix trees>> for more
detailed information about these strategies)
| `recursive`
|`distance_error_pct` |Used as a hint to the PrefixTree about how
precise it should be. Defaults to 0.025 (2.5%) with 0.5 as the maximum
supported value. PERFORMANCE NOTE: This value will default to 0 if a `precision` or
`tree_level` definition is explicitly defined. This guarantees spatial precision
at the level defined in the mapping. This can lead to significant memory usage
for high resolution shapes with low error (e.g., large shapes at 1m with < 0.001 error).
To improve indexing performance (at the cost of query accuracy) explicitly define
`tree_level` or `precision` along with a reasonable `distance_error_pct`, noting
that large shapes will have greater false positives.
|`distance_error_pct` |deprecated[6.6, PrefixTrees no longer used] Used as a
hint to the PrefixTree about how precise it should be. Defaults to 0.025 (2.5%)
with 0.5 as the maximum supported value. PERFORMANCE NOTE: This value will
default to 0 if a `precision` or `tree_level` definition is explicitly defined.
This guarantees spatial precision at the level defined in the mapping. This can
lead to significant memory usage for high resolution shapes with low error
(e.g., large shapes at 1m with < 0.001 error). To improve indexing performance
(at the cost of query accuracy) explicitly define `tree_level` or `precision`
along with a reasonable `distance_error_pct`, noting that large shapes will have
greater false positives. Note: This parameter is only relevant for `term` and
`recursive` strategies.
| `0.025`
|`orientation` |Optionally define how to interpret vertex order for
@ -77,13 +88,13 @@ sets vertex order for the coordinate list of a geo_shape field but can be
overridden in each individual GeoJSON or WKT document.
| `ccw`
|`points_only` |Setting this option to `true` (defaults to `false`) configures
the `geo_shape` field type for point shapes only (NOTE: Multi-Points are not
yet supported). This optimizes index and search performance for the `geohash` and
`quadtree` when it is known that only points will be indexed. At present geo_shape
queries can not be executed on `geo_point` field types. This option bridges the gap
by improving point performance on a `geo_shape` field so that `geo_shape` queries are
optimal on a point only field.
|`points_only` |deprecated[6.6, PrefixTrees no longer used] Setting this option to
`true` (defaults to `false`) configures the `geo_shape` field type for point
shapes only (NOTE: Multi-Points are not yet supported). This optimizes index and
search performance for the `geohash` and `quadtree` when it is known that only points
will be indexed. At present geo_shape queries can not be executed on `geo_point`
field types. This option bridges the gap by improving point performance on a
`geo_shape` field so that `geo_shape` queries are optimal on a point only field.
| `false`
|`ignore_malformed` |If true, malformed GeoJSON or WKT shapes are ignored. If
@ -100,16 +111,35 @@ and reject the whole document.
|=======================================================================
[[geoshape-indexing-approach]]
[float]
==== Indexing approach
GeoShape types are indexed by decomposing the shape into a triangular mesh and
indexing each triangle as a 7 dimension point in a BKD tree. This provides
near perfect spatial resolution (down to 1e-7 decimal degree precision) since all
spatial relations are computed using an encoded vector representation of the
original shape instead of a raster-grid representation as used by the
<<prefix-trees>> indexing approach. Performance of the tessellator primarily
depends on the number of vertices that define the polygon/multi-polyogn. While
this is the default indexing technique prefix trees can still be used by setting
the `tree` or `strategy` parameters according to the appropriate
<<geo-shape-mapping-options>>. Note that these parameters are now deprecated
and will be removed in a future version.
[[prefix-trees]]
[float]
==== Prefix trees
To efficiently represent shapes in the index, Shapes are converted into
a series of hashes representing grid squares (commonly referred to as "rasters")
using implementations of a PrefixTree. The tree notion comes from the fact that
the PrefixTree uses multiple grid layers, each with an increasing level of
precision to represent the Earth. This can be thought of as increasing the level
of detail of a map or image at higher zoom levels.
deprecated[6.6, PrefixTrees no longer used] To efficiently represent shapes in
an inverted index, Shapes are converted into a series of hashes representing
grid squares (commonly referred to as "rasters") using implementations of a
PrefixTree. The tree notion comes from the fact that the PrefixTree uses multiple
grid layers, each with an increasing level of precision to represent the Earth.
This can be thought of as increasing the level of detail of a map or image at higher
zoom levels. Since this approach causes precision issues with indexed shape, it has
been deprecated in favor of a vector indexing approach that indexes the shapes as a
triangular mesh (see <<geoshape-indexing-approach>>).
Multiple PrefixTree implementations are provided:
@ -131,9 +161,10 @@ number of levels for the quad trees in Elasticsearch is 29; the default is 21.
[[spatial-strategy]]
[float]
===== Spatial strategies
The PrefixTree implementations rely on a SpatialStrategy for decomposing
the provided Shape(s) into approximated grid squares. Each strategy answers
the following:
deprecated[6.6, PrefixTrees no longer used] The indexing implementation
selected relies on a SpatialStrategy for choosing how to decompose the shapes
(either as grid squares or a tessellated triangular mesh). Each strategy
answers the following:
* What type of Shapes can be indexed?
* What types of Query Operations and Shapes can be used?
@ -146,7 +177,7 @@ are provided:
|=======================================================================
|Strategy |Supported Shapes |Supported Queries |Multiple Shapes
|`recursive` |<<input-structure, All>> |`INTERSECTS`, `DISJOINT`, `WITHIN`, `CONTAINS` |Yes
|`recursive` |<<input-structure, All>> |`INTERSECTS`, `DISJOINT`, `WITHIN`, `CONTAINS` |Yes
|`term` |<<point, Points>> |`INTERSECTS` |Yes
|=======================================================================
@ -154,13 +185,13 @@ are provided:
[float]
===== Accuracy
Geo_shape does not provide 100% accuracy and depending on how it is configured
it may return some false positives for `INTERSECTS`, `WITHIN` and `CONTAINS`
queries, and some false negatives for `DISJOINT` queries. To mitigate this, it
is important to select an appropriate value for the tree_levels parameter and
to adjust expectations accordingly. For example, a point may be near the border
of a particular grid cell and may thus not match a query that only matches the
cell right next to it -- even though the shape is very close to the point.
`Recursive` and `Term` strategies do not provide 100% accuracy and depending on
how they are configured it may return some false positives for `INTERSECTS`,
`WITHIN` and `CONTAINS` queries, and some false negatives for `DISJOINT` queries.
To mitigate this, it is important to select an appropriate value for the tree_levels
parameter and to adjust expectations accordingly. For example, a point may be near
the border of a particular grid cell and may thus not match a query that only matches
the cell right next to it -- even though the shape is very close to the point.
[float]
===== Example
@ -173,9 +204,7 @@ PUT /example
"doc": {
"properties": {
"location": {
"type": "geo_shape",
"tree": "quadtree",
"precision": "100m"
"type": "geo_shape"
}
}
}
@ -185,22 +214,23 @@ PUT /example
// CONSOLE
// TESTSETUP
This mapping maps the location field to the geo_shape type using the
quad_tree implementation and a precision of 100m. Elasticsearch translates
this into a tree_levels setting of 20.
This mapping definition maps the location field to the geo_shape
type using the default vector implementation. It provides
approximately 1e-7 decimal degree precision.
[float]
===== Performance considerations
===== Performance considerations with Prefix Trees
Elasticsearch uses the paths in the prefix tree as terms in the index
and in queries. The higher the level is (and thus the precision), the
more terms are generated. Of course, calculating the terms, keeping them in
deprecated[6.6, PrefixTrees no longer used] With prefix trees,
Elasticsearch uses the paths in the tree as terms in the inverted index
and in queries. The higher the level (and thus the precision), the more
terms are generated. Of course, calculating the terms, keeping them in
memory, and storing them on disk all have a price. Especially with higher
tree levels, indices can become extremely large even with a modest
amount of data. Additionally, the size of the features also matters.
Big, complex polygons can take up a lot of space at higher tree levels.
Which setting is right depends on the use case. Generally one trades off
accuracy against index size and query performance.
tree levels, indices can become extremely large even with a modest amount
of data. Additionally, the size of the features also matters. Big, complex
polygons can take up a lot of space at higher tree levels. Which setting
is right depends on the use case. Generally one trades off accuracy against
index size and query performance.
The defaults in Elasticsearch for both implementations are a compromise
between index size and a reasonable level of precision of 50m at the
@ -598,7 +628,10 @@ POST /example/doc
===== Circle
Elasticsearch supports a `circle` type, which consists of a center
point with a radius:
point with a radius. Note that this circle representation can only
be indexed when using the `recursive` Prefix Tree strategy. For
the default <<geoshape-indexing-approach>> circles should be approximated using
a `POLYGON`.
[source,js]
--------------------------------------------------
@ -612,6 +645,7 @@ POST /example/doc
}
--------------------------------------------------
// CONSOLE
// TEST[skip:not supported in default]
Note: The inner `radius` field is required. If not specified, then
the units of the `radius` will default to `METERS`.

View File

@ -52,3 +52,19 @@ as a better alternative.
An error will now be thrown when unknown configuration options are provided
to similarities. Such unknown parameters were ignored before.
[float]
==== deprecated `geo_shape` Prefix Tree indexing
`geo_shape` types now default to using a vector indexing approach based on Lucene's new
`LatLonShape` field type. This indexes shapes as a triangular mesh instead of decomposing
them into individual grid cells. To index using legacy prefix trees `recursive` or `term`
strategy must be explicitly defined. Note that these strategies are now deprecated and will
be removed in a future version.
[float]
==== deprecated `geo_shape` parameters
The following type parameters are deprecated for the `geo_shape` field type: `tree`,
`precision`, `tree_levels`, `distance_error_pct`, `points_only`, and `strategy`. They
will be removed in a future version.

View File

@ -65,9 +65,10 @@ A {dfeed} resource has the following properties:
releases earlier than 6.0.0. For more information, see <<removal-of-types>>.
`delayed_data_check_config`::
(object) Specifies if and with how large a window should the data feed check
for missing data. See <<ml-datafeed-delayed-data-check-config>>.
For example: `{"enabled": true, "check_window": "1h"}`
(object) Specifies whether the data feed checks for missing data and
and the size of the window. For example:
`{"enabled": true, "check_window": "1h"}` See
<<ml-datafeed-delayed-data-check-config>>.
[[ml-datafeed-chunking-config]]
==== Chunking Configuration Objects
@ -97,7 +98,8 @@ A chunking configuration object has the following properties:
The {dfeed} can optionally search over indices that have already been read in
an effort to find if any data has since been added to the index. If missing data
is found, it is a good indication that the `query_delay` option is set too low and
the data is being indexed after the {dfeed} has passed that moment in time.
the data is being indexed after the {dfeed} has passed that moment in time. See
{stack-ov}/ml-delayed-data-detection.html[Working with delayed data].
This check only runs on real-time {dfeeds}

View File

@ -42,11 +42,6 @@ so do not set the `background_persist_interval` value too low.
`description`::
(string) An optional description of the job.
`established_model_memory`::
(long) The approximate amount of memory resources that have been used for
analytical processing. This field is present only when the analytics have used
a stable amount of memory for several consecutive buckets.
`finished_time`::
(string) If the job closed or failed, this is the time the job finished,
otherwise it is `null`. This property is informational; you cannot change its

View File

@ -32,9 +32,10 @@ The scenarios in this section describe some best practices for generating useful
* <<ml-configuring-url>>
* <<ml-configuring-aggregation>>
* <<ml-configuring-categories>>
* <<ml-configuring-detector-custom-rules>>
* <<ml-configuring-pop>>
* <<ml-configuring-transform>>
* <<ml-configuring-detector-custom-rules>>
* <<ml-delayed-data-detection>>
:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/ml/customurl.asciidoc
include::customurl.asciidoc[]
@ -42,6 +43,9 @@ include::customurl.asciidoc[]
:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/ml/aggregations.asciidoc
include::aggregations.asciidoc[]
:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/ml/detector-custom-rules.asciidoc
include::detector-custom-rules.asciidoc[]
:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/ml/categories.asciidoc
include::categories.asciidoc[]
@ -51,5 +55,5 @@ include::populations.asciidoc[]
:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/ml/transforms.asciidoc
include::transforms.asciidoc[]
:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/ml/detector-custom-rules.asciidoc
include::detector-custom-rules.asciidoc[]
:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/docs/reference/ml/delayed-data-detection.asciidoc
include::delayed-data-detection.asciidoc[]

View File

@ -0,0 +1,42 @@
[role="xpack"]
[[ml-delayed-data-detection]]
=== Handling delayed data
Delayed data are documents that are indexed late. That is to say, it is data
related to a time that the {dfeed} has already processed.
When you create a datafeed, you can specify a {ref}/ml-datafeed-resource.html[`query_delay`] setting.
This setting enables the datafeed to wait for some time past real-time, which means any "late" data in this period
is fully indexed before the datafeed tries to gather it. However, if the setting is set too low, the datafeed may query
for data before it has been indexed and consequently miss that document. Conversely, if it is set too high,
analysis drifts farther away from real-time. The balance that is struck depends upon each use case and
the environmental factors of the cluster.
==== Why worry about delayed data?
This is a particularly prescient question. If data are delayed randomly (and consequently missing from analysis),
the results of certain types of functions are not really affected. It all comes out ok in the end
as the delayed data is distributed randomly. An example would be a `mean` metric for a field in a large collection of data.
In this case, checking for delayed data may not provide much benefit. If data are consistently delayed, however, jobs with a `low_count` function may
provide false positives. In this situation, it would be useful to see if data
comes in after an anomaly is recorded so that you can determine a next course of action.
==== How do we detect delayed data?
In addition to the `query_delay` field, there is a
{ref}/ml-datafeed-resource.html#ml-datafeed-delayed-data-check-config[delayed data check config], which enables you to
configure the datafeed to look in the past for delayed data. Every 15 minutes or every `check_window`,
whichever is smaller, the datafeed triggers a document search over the configured indices. This search looks over a
time span with a length of `check_window` ending with the latest finalized bucket. That time span is partitioned into buckets,
whose length equals the bucket span of the associated job. The `doc_count` of those buckets are then compared with the
job's finalized analysis buckets to see whether any data has arrived since the analysis. If there is indeed missing data
due to their ingest delay, the end user is notified.
==== What to do about delayed data?
The most common course of action is to simply to do nothing. For many functions and situations ignoring the data is
acceptable. However, if the amount of delayed data is too great or the situation calls for it, the next course
of action to consider is to increase the `query_delay` of the datafeed. This increased delay allows more time for data to be
indexed. If you have real-time constraints, however, an increased delay might not be desirable.
In which case, you would have to {ref}/tune-for-indexing-speed.html[tune for better indexing speed.]

View File

@ -43,7 +43,7 @@ The list of hosts is set using the `discovery.zen.ping.unicast.hosts` static
setting. This is either an array of hosts or a comma-delimited string. Each
value should be in the form of `host:port` or `host` (where `port` defaults to
the setting `transport.profiles.default.port` falling back to
`transport.tcp.port` if not set). Note that IPv6 hosts must be bracketed. The
`transport.port` if not set). Note that IPv6 hosts must be bracketed. The
default for this setting is `127.0.0.1, [::1]`
Additionally, the `discovery.zen.ping.unicast.resolve_timeout` configures the

View File

@ -50,7 +50,7 @@ range.
+
Defaults to `9200-9300`.
`transport.tcp.port`::
`transport.port`::
Port to bind for communication between nodes. Accepts a single value or a
range. If a range is specified, the node will bind to the first available port

View File

@ -161,6 +161,14 @@ PUT _cluster/settings
are sent according to the global `transport.ping_schedule` setting, which
defaults to ``-1` meaning that pings are not sent.
`cluster.remote.${cluster_alias}.transport.compress`::
Per cluster boolean setting that enables you to configure compression for
requests to a specific remote cluster. This setting impacts only requests
sent to the remote cluster. If the inbound request is compressed,
Elasticsearch compresses the response. If unset, the global
`transport.compress` is used as the fallback setting.
[float]
[[retrieve-remote-clusters-info]]
=== Retrieving remote clusters info

View File

@ -15,21 +15,21 @@ being the ideal solution for scatter (broadcast) / gather operations such
as search in Elasticsearch.
[float]
=== TCP Transport
=== Transport Settings
The TCP transport is an implementation of the transport module using
TCP. It allows for the following settings:
The internal transport communicates over TCP. You can configure it with the
following settings:
[cols="<,<",options="header",]
|=======================================================================
|Setting |Description
|`transport.tcp.port` |A bind port range. Defaults to `9300-9400`.
|`transport.port` |A bind port range. Defaults to `9300-9400`.
|`transport.publish_port` |The port that other nodes in the cluster
should use when communicating with this node. Useful when a cluster node
is behind a proxy or firewall and the `transport.tcp.port` is not directly
is behind a proxy or firewall and the `transport.port` is not directly
addressable from the outside. Defaults to the actual port assigned via
`transport.tcp.port`.
`transport.port`.
|`transport.bind_host` |The host address to bind the transport service to. Defaults to `transport.host` (if set) or `network.bind_host`.
@ -38,11 +38,11 @@ addressable from the outside. Defaults to the actual port assigned via
|`transport.host` |Used to set the `transport.bind_host` and the `transport.publish_host` Defaults to `transport.host` or `network.host`.
|`transport.tcp.connect_timeout` |The socket connect timeout setting (in
|`transport.connect_timeout` |The connect timeout for initiating a new connection (in
time setting format). Defaults to `30s`.
|`transport.tcp.compress` |Set to `true` to enable compression (`DEFLATE`)
between all nodes. Defaults to `false`.
|`transport.compress` |Set to `true` to enable compression (`DEFLATE`) between
all nodes. Defaults to `false`.
|`transport.ping_schedule` | Schedule a regular application-level ping message
to ensure that transport connections between nodes are kept alive. Defaults to
@ -57,7 +57,7 @@ It also uses the common
<<modules-network,network settings>>.
[float]
==== TCP Transport Profiles
==== Transport Profiles
Elasticsearch allows you to bind to multiple ports on different interfaces by
the use of transport profiles. See this example configuration
@ -82,11 +82,11 @@ example above:
* `port`: The port to bind to
* `bind_host`: The host to bind
* `publish_host`: The host which is published in informational APIs
* `tcp_no_delay`: Configures the `TCP_NO_DELAY` option for this socket
* `tcp_keep_alive`: Configures the `SO_KEEPALIVE` option for this socket
* `reuse_address`: Configures the `SO_REUSEADDR` option for this socket
* `tcp_send_buffer_size`: Configures the send buffer size of the socket
* `tcp_receive_buffer_size`: Configures the receive buffer size of the socket
* `tcp.no_delay`: Configures the `TCP_NO_DELAY` option for this socket
* `tcp.keep_alive`: Configures the `SO_KEEPALIVE` option for this socket
* `tcp.reuse_address`: Configures the `SO_REUSEADDR` option for this socket
* `tcp.send_buffer_size`: Configures the send buffer size of the socket
* `tcp.receive_buffer_size`: Configures the receive buffer size of the socket
[float]
==== Long-lived idle connections
@ -97,7 +97,7 @@ period of time. Nonetheless, Elasticsearch requires these connections to remain
open, and it can disrupt the operation of the cluster if any inter-node
connections are closed by an external influence such as a firewall. It is
important to configure your network to preserve long-lived idle connections
between Elasticsearch nodes, for instance by leaving `tcp_keep_alive` enabled
between Elasticsearch nodes, for instance by leaving `tcp.keep_alive` enabled
and ensuring that the keepalive interval is shorter than any timeout that might
cause idle connections to be closed, or by setting `transport.ping_schedule` if
keepalives cannot be configured.

View File

@ -7,7 +7,7 @@ Requires the <<geo-shape,`geo_shape` Mapping>>.
The `geo_shape` query uses the same grid square representation as the
`geo_shape` mapping to find documents that have a shape that intersects
with the query shape. It will also use the same PrefixTree configuration
with the query shape. It will also use the same Prefix Tree configuration
as defined for the field mapping.
The query supports two ways of defining the query shape, either by
@ -157,7 +157,8 @@ has nothing in common with the query geometry.
* `WITHIN` - Return all documents whose `geo_shape` field
is within the query geometry.
* `CONTAINS` - Return all documents whose `geo_shape` field
contains the query geometry.
contains the query geometry. Note: this is only supported using the
`recursive` Prefix Tree Strategy deprecated[6.6]
[float]
==== Ignore Unmapped

View File

@ -29,4 +29,11 @@ Suggesters::
* Plugins that register suggesters can now define their own types of suggestions and must
explicitly indicate the type of suggestion that they produce. Existing plugins will
require changes to their plugin registration. See the `custom-suggester` example
plugin {pull}30284[#30284]
plugin {pull}30284[#30284]
[float]
[[enhancement-7.0.0-alpha1]]
=== Enhancements
Machine learning::
* Adds categorical filter type to detector rules. {ml-pull}27[#27]

View File

@ -2,3 +2,9 @@
== {es} version 7.0.0-alpha2
coming[7.0.0-alpha2]
[float]
[[bug-7.0.0-alpha2]]
=== Bug fixes
* Fixes CPoissonMeanConjugate sampling error. {ml-pull}335[#335]

View File

@ -221,6 +221,7 @@ NOTE: As with other statistics apis, the Profile API supports human readable out
`?human=true` to the query string. In this case, the output contains the additional `time` field containing rounded,
human readable timing information (e.g. `"time": "391,9ms"`, `"time": "123.3micros"`).
[[search-profile-queries]]
=== Profiling Queries
[NOTE]
@ -678,6 +679,7 @@ ignore its children if you find the details too tricky to interpret.
Hopefully this will be fixed in future iterations, but it is a tricky problem to solve and still in-progress :)
[[search-profile-aggregations]]
=== Profiling Aggregations
==== `aggregations` Section
@ -851,6 +853,7 @@ The meaning of the stats are as follows:
Records the number of invocations of the particular method. For example, `"collect_count": 2,`
means the `collect()` method was called on two different documents.
[[search-profile-considerations]]
=== Profiling Considerations
==== Performance Notes

View File

@ -2,7 +2,7 @@
[[separating-node-client-traffic]]
=== Separating node-to-node and client traffic
Elasticsearch has the feature of so called {ref}/modules-transport.html#_tcp_transport_profiles[TCP transport profiles]
Elasticsearch has the feature of so called {ref}/modules-transport.html[TCP transport profiles]
that allows it to bind to several ports and addresses. {security} extends on this
functionality to enhance the security of the cluster by enabling the separation
of node-to-node transport traffic from client transport traffic. This is important

View File

@ -176,8 +176,8 @@ To index audit events to a remote {es} cluster, you configure the following
`xpack.security.audit.index.client.hosts`::
Specifies a comma-separated list of `host:port` pairs. These hosts should be
nodes in the remote cluster. If you are using default values for the
<<common-network-settings,`transport.tcp.port`>> setting, you can omit the
`port` value. Otherwise, it must match the `transport.tcp.port` setting.
<<common-network-settings,`transport.port`>> setting, you can omit the
`port` value. Otherwise, it must match the `transport.port` setting.
`xpack.security.audit.index.client.cluster.name`::
Specifies the name of the remote cluster.

View File

@ -26,7 +26,7 @@ discovery.zen.ping.unicast.hosts:
- seeds.mydomain.com <2>
--------------------------------------------------
<1> The port will default to `transport.profiles.default.port` and fallback to
`transport.tcp.port` if not specified.
`transport.port` if not specified.
<2> A hostname that resolves to multiple IP addresses will try all resolved
addresses.

View File

@ -26,7 +26,7 @@ dependencies {
compile "org.elasticsearch:elasticsearch-core:${version}"
}
test.enabled = false
unitTest.enabled = false
// Since CLI does not depend on :server, it cannot run the jarHell task
jarHell.enabled = false

View File

@ -17,7 +17,7 @@
* under the License.
*/
test.enabled = false
unitTest.enabled = false
// test depend on ES core...
forbiddenApisMain.enabled = false

View File

@ -37,7 +37,7 @@ dependencyLicenses {
mapping from: /asm-.*/, to: 'asm'
}
test {
unitTest {
jvmArg '-XX:-OmitStackTraceInFastThrow'
}

View File

@ -37,4 +37,4 @@ dependencies {
}
// no tests...yet?
test.enabled = false
unitTest.enabled = false

View File

@ -508,4 +508,242 @@ public class BoxedCastTests extends ScriptTestCase {
() -> exec("def u = (double)1; def b = Float.valueOf((float)1); b.compareTo(u);"));
assertEquals(0, exec("def u = (double)1; def b = Double.valueOf((double)1); b.compareTo(u);"));
}
public void testReturnToByteBoxedCasts() {
assertEquals((byte)1, exec("Byte rtn() {return (byte)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return (short)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return (char)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return (int)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return (long)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return (float)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return (double)1} rtn()"));
assertEquals((byte)1, exec("Byte rtn() {return Byte.valueOf((byte)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return Short.valueOf((short)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return Character.valueOf((char)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return Integer.valueOf((int)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return Long.valueOf((long)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return Float.valueOf((float)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {return Double.valueOf((double)1)} rtn()"));
assertEquals((byte)1, exec("Byte rtn() {def d = (byte)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = (short)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = (char)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = (int)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = (long)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = (float)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = (double)1; return d} rtn()"));
assertEquals((byte)1, exec("Byte rtn() {def d = Byte.valueOf((byte)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = Short.valueOf((short)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = Character.valueOf((char)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = Integer.valueOf((int)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = Long.valueOf((long)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = Float.valueOf((float)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Byte rtn() {def d = Double.valueOf((double)1); return d} rtn()"));
}
public void testReturnToShortBoxedCasts() {
assertEquals((short)1, exec("Short rtn() {return (byte)1} rtn()"));
assertEquals((short)1, exec("Short rtn() {return (short)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return (char)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return (int)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return (long)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return (float)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return (double)1} rtn()"));
assertEquals((short)1, exec("Short rtn() {return Byte.valueOf((byte)1)} rtn()"));
assertEquals((short)1, exec("Short rtn() {return Short.valueOf((short)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return Character.valueOf((char)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return Integer.valueOf((int)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return Long.valueOf((long)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return Float.valueOf((float)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {return Double.valueOf((double)1)} rtn()"));
assertEquals((short)1, exec("Short rtn() {def d = (byte)1; return d} rtn()"));
assertEquals((short)1, exec("Short rtn() {def d = (short)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = (char)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = (int)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = (long)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = (float)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = (double)1; return d} rtn()"));
assertEquals((short)1, exec("Short rtn() {def d = Byte.valueOf((byte)1); return d} rtn()"));
assertEquals((short)1, exec("Short rtn() {def d = Short.valueOf((short)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = Character.valueOf((char)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = Integer.valueOf((int)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = Long.valueOf((long)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = Float.valueOf((float)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Short rtn() {def d = Double.valueOf((double)1); return d} rtn()"));
}
public void testReturnToCharacterBoxedCasts() {
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return (byte)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return (short)1} rtn()"));
assertEquals((char)1, exec("Character rtn() {return (char)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return (int)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return (long)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return (float)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return (double)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return Byte.valueOf((byte)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return Short.valueOf((short)1)} rtn()"));
assertEquals((char)1, exec("Character rtn() {return Character.valueOf((char)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return Integer.valueOf((int)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return Long.valueOf((long)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return Float.valueOf((float)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {return Double.valueOf((double)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = (byte)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = (short)1; return d} rtn()"));
assertEquals((char)1, exec("Character rtn() {def d = (char)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = (int)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = (long)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = (float)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = (double)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = Byte.valueOf((byte)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = Short.valueOf((short)1); return d} rtn()"));
assertEquals((char)1, exec("Character rtn() {def d = Character.valueOf((char)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = Integer.valueOf((int)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = Long.valueOf((long)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = Float.valueOf((float)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Character rtn() {def d = Double.valueOf((double)1); return d} rtn()"));
}
public void testReturnToIntegerBoxedCasts() {
assertEquals(1, exec("Integer rtn() {return (byte)1} rtn()"));
assertEquals(1, exec("Integer rtn() {return (short)1} rtn()"));
assertEquals(1, exec("Integer rtn() {return (char)1} rtn()"));
assertEquals(1, exec("Integer rtn() {return (int)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Integer rtn() {return (long)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Integer rtn() {return (float)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Integer rtn() {return (double)1} rtn()"));
assertEquals(1, exec("Integer rtn() {return Byte.valueOf((byte)1)} rtn()"));
assertEquals(1, exec("Integer rtn() {return Short.valueOf((short)1)} rtn()"));
assertEquals(1, exec("Integer rtn() {return Character.valueOf((char)1)} rtn()"));
assertEquals(1, exec("Integer rtn() {return Integer.valueOf((int)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Integer rtn() {return Long.valueOf((long)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Integer rtn() {return Float.valueOf((float)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Integer rtn() {return Double.valueOf((double)1)} rtn()"));
assertEquals(1, exec("Integer rtn() {def d = (byte)1; return d} rtn()"));
assertEquals(1, exec("Integer rtn() {def d = (short)1; return d} rtn()"));
assertEquals(1, exec("Integer rtn() {def d = (char)1; return d} rtn()"));
assertEquals(1, exec("Integer rtn() {def d = (int)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Integer rtn() {def d = (long)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Integer rtn() {def d = (float)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Integer rtn() {def d = (double)1; return d} rtn()"));
assertEquals(1, exec("Integer rtn() {def d = Byte.valueOf((byte)1); return d} rtn()"));
assertEquals(1, exec("Integer rtn() {def d = Short.valueOf((short)1); return d} rtn()"));
assertEquals(1, exec("Integer rtn() {def d = Character.valueOf((char)1); return d} rtn()"));
assertEquals(1, exec("Integer rtn() {def d = Integer.valueOf((int)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Integer rtn() {def d = Long.valueOf((long)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Integer rtn() {def d = Float.valueOf((float)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Integer rtn() {def d = Double.valueOf((double)1); return d} rtn()"));
}
public void testReturnToLongBoxedCasts() {
assertEquals((long)1, exec("Long rtn() {return (byte)1} rtn()"));
assertEquals((long)1, exec("Long rtn() {return (short)1} rtn()"));
assertEquals((long)1, exec("Long rtn() {return (char)1} rtn()"));
assertEquals((long)1, exec("Long rtn() {return (int)1} rtn()"));
assertEquals((long)1, exec("Long rtn() {return (long)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Long rtn() {return (float)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Long rtn() {return (double)1} rtn()"));
assertEquals((long)1, exec("Long rtn() {return Byte.valueOf((byte)1)} rtn()"));
assertEquals((long)1, exec("Long rtn() {return Short.valueOf((short)1)} rtn()"));
assertEquals((long)1, exec("Long rtn() {return Character.valueOf((char)1)} rtn()"));
assertEquals((long)1, exec("Long rtn() {return Integer.valueOf((int)1)} rtn()"));
assertEquals((long)1, exec("Long rtn() {return Long.valueOf((long)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Long rtn() {return Float.valueOf((float)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Long rtn() {return Double.valueOf((double)1)} rtn()"));
assertEquals((long)1, exec("Long rtn() {def d = (byte)1; return d} rtn()"));
assertEquals((long)1, exec("Long rtn() {def d = (short)1; return d} rtn()"));
assertEquals((long)1, exec("Long rtn() {def d = (char)1; return d} rtn()"));
assertEquals((long)1, exec("Long rtn() {def d = (int)1; return d} rtn()"));
assertEquals((long)1, exec("Long rtn() {def d = (long)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Long rtn() {def d = (float)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Long rtn() {def d = (double)1; return d} rtn()"));
assertEquals((long)1, exec("Long rtn() {def d = Byte.valueOf((byte)1); return d} rtn()"));
assertEquals((long)1, exec("Long rtn() {def d = Short.valueOf((short)1); return d} rtn()"));
assertEquals((long)1, exec("Long rtn() {def d = Character.valueOf((char)1); return d} rtn()"));
assertEquals((long)1, exec("Long rtn() {def d = Integer.valueOf((int)1); return d} rtn()"));
assertEquals((long)1, exec("Long rtn() {def d = Long.valueOf((long)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Long rtn() {def d = Float.valueOf((float)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Long rtn() {def d = Double.valueOf((double)1); return d} rtn()"));
}
public void testReturnToFloatBoxedCasts() {
assertEquals((float)1, exec("Float rtn() {return (byte)1} rtn()"));
assertEquals((float)1, exec("Float rtn() {return (short)1} rtn()"));
assertEquals((float)1, exec("Float rtn() {return (char)1} rtn()"));
assertEquals((float)1, exec("Float rtn() {return (int)1} rtn()"));
assertEquals((float)1, exec("Float rtn() {return (long)1} rtn()"));
assertEquals((float)1, exec("Float rtn() {return (float)1} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Float rtn() {return (double)1} rtn()"));
assertEquals((float)1, exec("Float rtn() {return Byte.valueOf((byte)1)} rtn()"));
assertEquals((float)1, exec("Float rtn() {return Short.valueOf((short)1)} rtn()"));
assertEquals((float)1, exec("Float rtn() {return Character.valueOf((char)1)} rtn()"));
assertEquals((float)1, exec("Float rtn() {return Integer.valueOf((int)1)} rtn()"));
assertEquals((float)1, exec("Float rtn() {return Long.valueOf((long)1)} rtn()"));
assertEquals((float)1, exec("Float rtn() {return Float.valueOf((float)1)} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Float rtn() {return Double.valueOf((double)1)} rtn()"));
assertEquals((float)1, exec("Float rtn() {def d = (byte)1; return d} rtn()"));
assertEquals((float)1, exec("Float rtn() {def d = (short)1; return d} rtn()"));
assertEquals((float)1, exec("Float rtn() {def d = (char)1; return d} rtn()"));
assertEquals((float)1, exec("Float rtn() {def d = (int)1; return d} rtn()"));
assertEquals((float)1, exec("Float rtn() {def d = (long)1; return d} rtn()"));
assertEquals((float)1, exec("Float rtn() {def d = (float)1; return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Float rtn() {def d = (double)1; return d} rtn()"));
assertEquals((float)1, exec("Float rtn() {def d = Byte.valueOf((byte)1); return d} rtn()"));
assertEquals((float)1, exec("Float rtn() {def d = Short.valueOf((short)1); return d} rtn()"));
assertEquals((float)1, exec("Float rtn() {def d = Character.valueOf((char)1); return d} rtn()"));
assertEquals((float)1, exec("Float rtn() {def d = Integer.valueOf((int)1); return d} rtn()"));
assertEquals((float)1, exec("Float rtn() {def d = Long.valueOf((long)1); return d} rtn()"));
assertEquals((float)1, exec("Float rtn() {def d = Float.valueOf((float)1); return d} rtn()"));
expectScriptThrows(ClassCastException.class, () -> exec("Float rtn() {def d = Double.valueOf((double)1); return d} rtn()"));
}
public void testReturnToDoubleBoxedCasts() {
assertEquals((double)1, exec("Double rtn() {return (byte)1} rtn()"));
assertEquals((double)1, exec("Double rtn() {return (short)1} rtn()"));
assertEquals((double)1, exec("Double rtn() {return (char)1} rtn()"));
assertEquals((double)1, exec("Double rtn() {return (int)1} rtn()"));
assertEquals((double)1, exec("Double rtn() {return (long)1} rtn()"));
assertEquals((double)1, exec("Double rtn() {return (float)1} rtn()"));
assertEquals((double)1, exec("Double rtn() {return (double)1} rtn()"));
assertEquals((double)1, exec("Double rtn() {return Byte.valueOf((byte)1)} rtn()"));
assertEquals((double)1, exec("Double rtn() {return Short.valueOf((short)1)} rtn()"));
assertEquals((double)1, exec("Double rtn() {return Character.valueOf((char)1)} rtn()"));
assertEquals((double)1, exec("Double rtn() {return Integer.valueOf((int)1)} rtn()"));
assertEquals((double)1, exec("Double rtn() {return Long.valueOf((long)1)} rtn()"));
assertEquals((double)1, exec("Double rtn() {return Float.valueOf((float)1)} rtn()"));
assertEquals((double)1, exec("Double rtn() {return Double.valueOf((double)1)} rtn()"));
assertEquals((double)1, exec("Double rtn() {def d = (byte)1; return d} rtn()"));
assertEquals((double)1, exec("Double rtn() {def d = (short)1; return d} rtn()"));
assertEquals((double)1, exec("Double rtn() {def d = (char)1; return d} rtn()"));
assertEquals((double)1, exec("Double rtn() {def d = (int)1; return d} rtn()"));
assertEquals((double)1, exec("Double rtn() {def d = (long)1; return d} rtn()"));
assertEquals((double)1, exec("Double rtn() {def d = (float)1; return d} rtn()"));
assertEquals((double)1, exec("Double rtn() {def d = (double)1; return d} rtn()"));
assertEquals((double)1, exec("Double rtn() {def d = Byte.valueOf((byte)1); return d} rtn()"));
assertEquals((double)1, exec("Double rtn() {def d = Short.valueOf((short)1); return d} rtn()"));
assertEquals((double)1, exec("Double rtn() {def d = Character.valueOf((char)1); return d} rtn()"));
assertEquals((double)1, exec("Double rtn() {def d = Integer.valueOf((int)1); return d} rtn()"));
assertEquals((double)1, exec("Double rtn() {def d = Long.valueOf((long)1); return d} rtn()"));
assertEquals((double)1, exec("Double rtn() {def d = Float.valueOf((float)1); return d} rtn()"));
assertEquals((double)1, exec("Double rtn() {def d = Double.valueOf((double)1); return d} rtn()"));
}
}

View File

@ -45,7 +45,7 @@ run {
setting 'reindex.remote.whitelist', '127.0.0.1:*'
}
test {
unitTest {
/*
* We have to disable setting the number of available processors as tests in the
* same JVM randomize processors and will step on each other if we allow them to

View File

@ -47,7 +47,7 @@ dependencyLicenses {
mapping from: /netty-.*/, to: 'netty'
}
test {
unitTest {
/*
* We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each
* other if we allow them to set the number of available processors as it's set-once in Netty.

View File

@ -64,7 +64,7 @@ task writeTestJavaPolicy {
}
}
test {
unitTest {
dependsOn writeTestJavaPolicy
// this is needed for insecure plugins, remove if possible!
systemProperty 'tests.artifact', project.name

View File

@ -31,7 +31,7 @@ check {
dependsOn 'qa:gce:check'
}
test {
unitTest {
// this is needed for insecure plugins, remove if possible!
systemProperty 'tests.artifact', project.name
}

View File

@ -32,4 +32,4 @@ integTestCluster {
}
// this plugin has no unit tests, only rest tests
tasks.test.enabled = false
tasks.unitTest.enabled = false

View File

@ -35,4 +35,4 @@ if (System.getProperty('tests.distribution') == null) {
integTestCluster.distribution = 'oss-zip'
}
test.enabled = false
unitTest.enabled = false

View File

@ -27,7 +27,7 @@ esplugin {
}
// No unit tests in this example
test.enabled = false
unitTest.enabled = false
task exampleFixture(type: org.elasticsearch.gradle.test.AntFixture) {
dependsOn testClasses

View File

@ -26,5 +26,5 @@ esplugin {
noticeFile rootProject.file('NOTICE.txt')
}
test.enabled = false
unitTest.enabled = false

View File

@ -16,6 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.ingest.geoip;
import com.maxmind.geoip2.DatabaseReader;
@ -27,30 +28,120 @@ import org.elasticsearch.core.internal.io.IOUtils;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Objects;
/**
* Facilitates lazy loading of the database reader, so that when the geoip plugin is installed, but not used,
* no memory is being wasted on the database reader.
*/
final class DatabaseReaderLazyLoader implements Closeable {
class DatabaseReaderLazyLoader implements Closeable {
private static final Logger LOGGER = LogManager.getLogger(DatabaseReaderLazyLoader.class);
private final String databaseFileName;
private final Path databasePath;
private final CheckedSupplier<DatabaseReader, IOException> loader;
// package protected for testing only:
final SetOnce<DatabaseReader> databaseReader;
DatabaseReaderLazyLoader(String databaseFileName, CheckedSupplier<DatabaseReader, IOException> loader) {
this.databaseFileName = databaseFileName;
this.loader = loader;
// cache the database type so that we do not re-read it on every pipeline execution
final SetOnce<String> databaseType;
DatabaseReaderLazyLoader(final Path databasePath, final CheckedSupplier<DatabaseReader, IOException> loader) {
this.databasePath = Objects.requireNonNull(databasePath);
this.loader = Objects.requireNonNull(loader);
this.databaseReader = new SetOnce<>();
this.databaseType = new SetOnce<>();
}
synchronized DatabaseReader get() throws IOException {
/**
* Read the database type from the database. We do this manually instead of relying on the built-in mechanism to avoid reading the
* entire database into memory merely to read the type. This is especially important to maintain on master nodes where pipelines are
* validated. If we read the entire database into memory, we could potentially run into low-memory constraints on such nodes where
* loading this data would otherwise be wasteful if they are not also ingest nodes.
*
* @return the database type
* @throws IOException if an I/O exception occurs reading the database type
*/
final String getDatabaseType() throws IOException {
if (databaseType.get() == null) {
synchronized (databaseType) {
if (databaseType.get() == null) {
final long fileSize = databaseFileSize();
if (fileSize <= 512) {
throw new IOException("unexpected file length [" + fileSize + "] for [" + databasePath + "]");
}
final int[] databaseTypeMarker = {'d', 'a', 't', 'a', 'b', 'a', 's', 'e', '_', 't', 'y', 'p', 'e'};
try (InputStream in = databaseInputStream()) {
// read the last 512 bytes
final long skipped = in.skip(fileSize - 512);
if (skipped != fileSize - 512) {
throw new IOException("failed to skip [" + (fileSize - 512) + "] bytes while reading [" + databasePath + "]");
}
final byte[] tail = new byte[512];
int read = 0;
do {
final int actualBytesRead = in.read(tail, read, 512 - read);
if (actualBytesRead == -1) {
throw new IOException("unexpected end of stream [" + databasePath + "] after reading [" + read + "] bytes");
}
read += actualBytesRead;
} while (read != 512);
// find the database_type header
int metadataOffset = -1;
int markerOffset = 0;
for (int i = 0; i < tail.length; i++) {
byte b = tail[i];
if (b == databaseTypeMarker[markerOffset]) {
markerOffset++;
} else {
markerOffset = 0;
}
if (markerOffset == databaseTypeMarker.length) {
metadataOffset = i + 1;
break;
}
}
if (metadataOffset == -1) {
throw new IOException("database type marker not found");
}
// read the database type
final int offsetByte = tail[metadataOffset] & 0xFF;
final int type = offsetByte >>> 5;
if (type != 2) {
throw new IOException("type must be UTF-8 string");
}
int size = offsetByte & 0x1f;
databaseType.set(new String(tail, metadataOffset + 1, size, StandardCharsets.UTF_8));
}
}
}
}
return databaseType.get();
}
long databaseFileSize() throws IOException {
return Files.size(databasePath);
}
InputStream databaseInputStream() throws IOException {
return Files.newInputStream(databasePath);
}
DatabaseReader get() throws IOException {
if (databaseReader.get() == null) {
databaseReader.set(loader.get());
LOGGER.debug("Loaded [{}] geoip database", databaseFileName);
synchronized (databaseReader) {
if (databaseReader.get() == null) {
databaseReader.set(loader.get());
LOGGER.debug("loaded [{}] geo-IP database", databasePath);
}
}
}
return databaseReader.get();
}
@ -59,4 +150,5 @@ final class DatabaseReaderLazyLoader implements Closeable {
public synchronized void close() throws IOException {
IOUtils.close(databaseReader.get());
}
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.ingest.geoip;
import com.maxmind.geoip2.DatabaseReader;
import com.maxmind.geoip2.exception.AddressNotFoundException;
import com.maxmind.geoip2.model.AsnResponse;
import com.maxmind.geoip2.model.CityResponse;
@ -38,6 +37,7 @@ import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.Processor;
import org.elasticsearch.ingest.geoip.IngestGeoIpPlugin.GeoIpCache;
import java.io.IOException;
import java.net.InetAddress;
import java.security.AccessController;
import java.security.PrivilegedAction;
@ -64,18 +64,34 @@ public final class GeoIpProcessor extends AbstractProcessor {
private final String field;
private final String targetField;
private final DatabaseReader dbReader;
private final DatabaseReaderLazyLoader lazyLoader;
private final Set<Property> properties;
private final boolean ignoreMissing;
private final GeoIpCache cache;
GeoIpProcessor(String tag, String field, DatabaseReader dbReader, String targetField, Set<Property> properties, boolean ignoreMissing,
GeoIpCache cache) {
/**
* Construct a geo-IP processor.
*
* @param tag the processor tag
* @param field the source field to geo-IP map
* @param lazyLoader a supplier of a geo-IP database reader; ideally this is lazily-loaded once on first use
* @param targetField the target field
* @param properties the properties; ideally this is lazily-loaded once on first use
* @param ignoreMissing true if documents with a missing value for the field should be ignored
* @param cache a geo-IP cache
*/
GeoIpProcessor(
final String tag,
final String field,
final DatabaseReaderLazyLoader lazyLoader,
final String targetField,
final Set<Property> properties,
final boolean ignoreMissing,
final GeoIpCache cache) {
super(tag);
this.field = field;
this.targetField = targetField;
this.dbReader = dbReader;
this.lazyLoader = lazyLoader;
this.properties = properties;
this.ignoreMissing = ignoreMissing;
this.cache = cache;
@ -86,7 +102,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
}
@Override
public IngestDocument execute(IngestDocument ingestDocument) {
public IngestDocument execute(IngestDocument ingestDocument) throws IOException {
String ip = ingestDocument.getFieldValue(field, String.class, ignoreMissing);
if (ip == null && ignoreMissing) {
@ -98,7 +114,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
final InetAddress ipAddress = InetAddresses.forString(ip);
Map<String, Object> geoData;
String databaseType = dbReader.getMetadata().getDatabaseType();
String databaseType = lazyLoader.getDatabaseType();
if (databaseType.endsWith(CITY_DB_SUFFIX)) {
try {
@ -119,7 +135,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
geoData = Collections.emptyMap();
}
} else {
throw new ElasticsearchParseException("Unsupported database type [" + dbReader.getMetadata().getDatabaseType()
throw new ElasticsearchParseException("Unsupported database type [" + lazyLoader.getDatabaseType()
+ "]", new IllegalStateException());
}
if (geoData.isEmpty() == false) {
@ -141,8 +157,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
return targetField;
}
DatabaseReader getDbReader() {
return dbReader;
String getDatabaseType() throws IOException {
return lazyLoader.getDatabaseType();
}
Set<Property> getProperties() {
@ -154,7 +170,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
CityResponse response = AccessController.doPrivileged((PrivilegedAction<CityResponse>) () ->
cache.putIfAbsent(ipAddress, CityResponse.class, ip -> {
try {
return dbReader.city(ip);
return lazyLoader.get().city(ip);
} catch (AddressNotFoundException e) {
throw new AddressNotFoundRuntimeException(e);
} catch (Exception e) {
@ -240,7 +256,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
CountryResponse response = AccessController.doPrivileged((PrivilegedAction<CountryResponse>) () ->
cache.putIfAbsent(ipAddress, CountryResponse.class, ip -> {
try {
return dbReader.country(ip);
return lazyLoader.get().country(ip);
} catch (AddressNotFoundException e) {
throw new AddressNotFoundRuntimeException(e);
} catch (Exception e) {
@ -285,7 +301,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
AsnResponse response = AccessController.doPrivileged((PrivilegedAction<AsnResponse>) () ->
cache.putIfAbsent(ipAddress, AsnResponse.class, ip -> {
try {
return dbReader.asn(ip);
return lazyLoader.get().asn(ip);
} catch (AddressNotFoundException e) {
throw new AddressNotFoundRuntimeException(e);
} catch (Exception e) {
@ -318,18 +334,23 @@ public final class GeoIpProcessor extends AbstractProcessor {
}
public static final class Factory implements Processor.Factory {
static final Set<Property> DEFAULT_CITY_PROPERTIES = EnumSet.of(
static final Set<Property> DEFAULT_CITY_PROPERTIES = Collections.unmodifiableSet(EnumSet.of(
Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_ISO_CODE,
Property.REGION_NAME, Property.CITY_NAME, Property.LOCATION
);
static final Set<Property> DEFAULT_COUNTRY_PROPERTIES = EnumSet.of(
));
static final Set<Property> DEFAULT_COUNTRY_PROPERTIES = Collections.unmodifiableSet(EnumSet.of(
Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE
);
static final Set<Property> DEFAULT_ASN_PROPERTIES = EnumSet.of(
));
static final Set<Property> DEFAULT_ASN_PROPERTIES = Collections.unmodifiableSet(EnumSet.of(
Property.IP, Property.ASN, Property.ORGANIZATION_NAME
);
));
private final Map<String, DatabaseReaderLazyLoader> databaseReaders;
Map<String, DatabaseReaderLazyLoader> databaseReaders() {
return Collections.unmodifiableMap(databaseReaders);
}
private final GeoIpCache cache;
public Factory(Map<String, DatabaseReaderLazyLoader> databaseReaders, GeoIpCache cache) {
@ -338,8 +359,10 @@ public final class GeoIpProcessor extends AbstractProcessor {
}
@Override
public GeoIpProcessor create(Map<String, Processor.Factory> registry, String processorTag,
Map<String, Object> config) throws Exception {
public GeoIpProcessor create(
final Map<String, Processor.Factory> registry,
final String processorTag,
final Map<String, Object> config) throws IOException {
String ipField = readStringProperty(TYPE, processorTag, config, "field");
String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "geoip");
String databaseFile = readStringProperty(TYPE, processorTag, config, "database_file", "GeoLite2-City.mmdb");
@ -352,19 +375,19 @@ public final class GeoIpProcessor extends AbstractProcessor {
"database_file", "database file [" + databaseFile + "] doesn't exist");
}
DatabaseReader databaseReader = lazyLoader.get();
String databaseType = databaseReader.getMetadata().getDatabaseType();
final String databaseType = lazyLoader.getDatabaseType();
final Set<Property> properties;
if (propertyNames != null) {
properties = EnumSet.noneOf(Property.class);
Set<Property> modifiableProperties = EnumSet.noneOf(Property.class);
for (String fieldName : propertyNames) {
try {
properties.add(Property.parseProperty(databaseType, fieldName));
modifiableProperties.add(Property.parseProperty(databaseType, fieldName));
} catch (IllegalArgumentException e) {
throw newConfigurationException(TYPE, processorTag, "properties", e.getMessage());
}
}
properties = Collections.unmodifiableSet(modifiableProperties);
} else {
if (databaseType.endsWith(CITY_DB_SUFFIX)) {
properties = DEFAULT_CITY_PROPERTIES;
@ -378,7 +401,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
}
}
return new GeoIpProcessor(processorTag, ipField, databaseReader, targetField, properties, ignoreMissing, cache);
return new GeoIpProcessor(processorTag, ipField, lazyLoader, targetField, properties, ignoreMissing, cache);
}
}

View File

@ -90,16 +90,17 @@ public class IngestGeoIpPlugin extends Plugin implements IngestPlugin, Closeable
Path databasePath = iterator.next();
if (Files.isRegularFile(databasePath) && pathMatcher.matches(databasePath)) {
String databaseFileName = databasePath.getFileName().toString();
DatabaseReaderLazyLoader holder = new DatabaseReaderLazyLoader(databaseFileName,
() -> {
DatabaseReader.Builder builder = createDatabaseBuilder(databasePath).withCache(NoCache.getInstance());
if (loadDatabaseOnHeap) {
builder.fileMode(Reader.FileMode.MEMORY);
} else {
builder.fileMode(Reader.FileMode.MEMORY_MAPPED);
}
return builder.build();
});
DatabaseReaderLazyLoader holder = new DatabaseReaderLazyLoader(
databasePath,
() -> {
DatabaseReader.Builder builder = createDatabaseBuilder(databasePath).withCache(NoCache.getInstance());
if (loadDatabaseOnHeap) {
builder.fileMode(Reader.FileMode.MEMORY);
} else {
builder.fileMode(Reader.FileMode.MEMORY_MAPPED);
}
return builder.build();
});
databaseReaders.put(databaseFileName, holder);
}
}

View File

@ -23,6 +23,8 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.util.Constants;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.geoip.IngestGeoIpPlugin.GeoIpCache;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.StreamsUtils;
@ -100,7 +102,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
assertThat(processor.getTag(), equalTo(processorTag));
assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getTargetField(), equalTo("geoip"));
assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-City"));
assertThat(processor.getDatabaseType(), equalTo("GeoLite2-City"));
assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_CITY_PROPERTIES));
assertFalse(processor.isIgnoreMissing());
}
@ -120,7 +122,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
assertThat(processor.getTag(), equalTo(processorTag));
assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getTargetField(), equalTo("geoip"));
assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-City"));
assertThat(processor.getDatabaseType(), equalTo("GeoLite2-City"));
assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_CITY_PROPERTIES));
assertTrue(processor.isIgnoreMissing());
}
@ -141,7 +143,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
assertThat(processor.getTag(), equalTo(processorTag));
assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getTargetField(), equalTo("geoip"));
assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-Country"));
assertThat(processor.getDatabaseType(), equalTo("GeoLite2-Country"));
assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_COUNTRY_PROPERTIES));
assertFalse(processor.isIgnoreMissing());
}
@ -162,7 +164,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
assertThat(processor.getTag(), equalTo(processorTag));
assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getTargetField(), equalTo("geoip"));
assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-ASN"));
assertThat(processor.getDatabaseType(), equalTo("GeoLite2-ASN"));
assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_ASN_PROPERTIES));
assertFalse(processor.isIgnoreMissing());
}
@ -192,7 +194,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
GeoIpProcessor processor = factory.create(null, null, config);
assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getTargetField(), equalTo("geoip"));
assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-Country"));
assertThat(processor.getDatabaseType(), equalTo("GeoLite2-Country"));
assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_COUNTRY_PROPERTIES));
assertFalse(processor.isIgnoreMissing());
}
@ -315,21 +317,41 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
assertNull(lazyLoader.databaseReader.get());
}
final Map<String, Object> field = Collections.singletonMap("_field", "1.1.1.1");
final IngestDocument document = new IngestDocument("index", "type", "id", "routing", 1L, VersionType.EXTERNAL, field);
Map<String, Object> config = new HashMap<>();
config.put("field", "_field");
config.put("database_file", "GeoLite2-City.mmdb");
factory.create(null, "_tag", config);
final GeoIpProcessor city = factory.create(null, "_tag", config);
// these are lazy loaded until first use so we expect null here
assertNull(databaseReaders.get("GeoLite2-City.mmdb").databaseReader.get());
city.execute(document);
// the first ingest should trigger a database load
assertNotNull(databaseReaders.get("GeoLite2-City.mmdb").databaseReader.get());
config = new HashMap<>();
config.put("field", "_field");
config.put("database_file", "GeoLite2-Country.mmdb");
factory.create(null, "_tag", config);
final GeoIpProcessor country = factory.create(null, "_tag", config);
// these are lazy loaded until first use so we expect null here
assertNull(databaseReaders.get("GeoLite2-Country.mmdb").databaseReader.get());
country.execute(document);
// the first ingest should trigger a database load
assertNotNull(databaseReaders.get("GeoLite2-Country.mmdb").databaseReader.get());
config = new HashMap<>();
config.put("field", "_field");
config.put("database_file", "GeoLite2-ASN.mmdb");
factory.create(null, "_tag", config);
final GeoIpProcessor asn = factory.create(null, "_tag", config);
for (DatabaseReaderLazyLoader lazyLoader : databaseReaders.values()) {
assertNotNull(lazyLoader.databaseReader.get());
}
// these are lazy loaded until first use so we expect null here
assertNull(databaseReaders.get("GeoLite2-ASN.mmdb").databaseReader.get());
asn.execute(document);
// the first ingest should trigger a database load
assertNotNull(databaseReaders.get("GeoLite2-ASN.mmdb").databaseReader.get());
}
}

View File

@ -0,0 +1,167 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.ingest.geoip;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.ingest.PutPipelineRequest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.ingest.IngestService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.StreamsUtils;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.equalTo;
public class GeoIpProcessorNonIngestNodeTests extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singleton(IngestGeoIpPlugin.class);
}
@Override
protected Settings nodeSettings(final int nodeOrdinal) {
return Settings.builder().put("node.ingest", false).put(super.nodeSettings(nodeOrdinal)).build();
}
@Override
protected Path nodeConfigPath(final int nodeOrdinal) {
final Path configPath = createTempDir();
try {
final Path databasePath = configPath.resolve("ingest-geoip");
Files.createDirectories(databasePath);
Files.copy(
new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-City.mmdb")),
databasePath.resolve("GeoLite2-City.mmdb"));
Files.copy(
new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-Country.mmdb")),
databasePath.resolve("GeoLite2-Country.mmdb"));
Files.copy(
new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-ASN.mmdb")),
databasePath.resolve("GeoLite2-ASN.mmdb"));
} catch (final IOException e) {
throw new UncheckedIOException(e);
}
return configPath;
}
/**
* This test shows that we do not load the geo-IP databases on non-ingest nodes, and only load on ingest nodes on first use.
*
* @throws IOException if an I/O exception occurs building the JSON
*/
public void testLazyLoading() throws IOException {
final BytesReference bytes;
try (XContentBuilder builder = JsonXContent.contentBuilder()) {
builder.startObject();
{
builder.field("description", "test");
builder.startArray("processors");
{
builder.startObject();
{
builder.startObject("geoip");
{
builder.field("field", "ip");
builder.field("target_field", "ip-city");
builder.field("database_file", "GeoLite2-City.mmdb");
}
builder.endObject();
}
builder.endObject();
builder.startObject();
{
builder.startObject("geoip");
{
builder.field("field", "ip");
builder.field("target_field", "ip-country");
builder.field("database_file", "GeoLite2-Country.mmdb");
}
builder.endObject();
}
builder.endObject();
builder.startObject();
{
builder.startObject("geoip");
{
builder.field("field", "ip");
builder.field("target_field", "ip-asn");
builder.field("database_file", "GeoLite2-ASN.mmdb");
}
builder.endObject();
}
builder.endObject();
}
builder.endArray();
}
builder.endObject();
bytes = BytesReference.bytes(builder);
}
assertAcked(client().admin().cluster().putPipeline(new PutPipelineRequest("geoip", bytes, XContentType.JSON)).actionGet());
// the geo-IP databases should not be loaded on any nodes as they are all non-ingest nodes
Arrays.stream(internalCluster().getNodeNames()).forEach(node -> assertDatabaseLoadStatus(node, false));
// start an ingest node
final String ingestNode = internalCluster().startNode(Settings.builder().put("node.ingest", true).build());
internalCluster().getInstance(IngestService.class, ingestNode);
// the geo-IP database should not be loaded yet as we have no indexed any documents using a pipeline that has a geo-IP processor
assertDatabaseLoadStatus(ingestNode, false);
final IndexRequest indexRequest = new IndexRequest("index", "_doc");
indexRequest.setPipeline("geoip");
indexRequest.source(Collections.singletonMap("ip", "1.1.1.1"));
final IndexResponse indexResponse = client().index(indexRequest).actionGet();
assertThat(indexResponse.status(), equalTo(RestStatus.CREATED));
// now the geo-IP database should be loaded on the ingest node
assertDatabaseLoadStatus(ingestNode, true);
// the geo-IP database should still not be loaded on the non-ingest nodes
Arrays.stream(internalCluster().getNodeNames())
.filter(node -> node.equals(ingestNode) == false)
.forEach(node -> assertDatabaseLoadStatus(node, false));
}
private void assertDatabaseLoadStatus(final String node, final boolean loaded) {
final IngestService ingestService = internalCluster().getInstance(IngestService.class, node);
final GeoIpProcessor.Factory factory = (GeoIpProcessor.Factory)ingestService.getProcessorFactories().get("geoip");
for (final DatabaseReaderLazyLoader loader : factory.databaseReaders().values()) {
if (loaded) {
assertNotNull(loader.databaseReader.get());
} else {
assertNull(loader.databaseReader.get());
}
}
}
}

View File

@ -20,16 +20,20 @@
package org.elasticsearch.ingest.geoip;
import com.maxmind.geoip2.DatabaseReader;
import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.RandomDocumentPicks;
import org.elasticsearch.ingest.geoip.IngestGeoIpPlugin.GeoIpCache;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Supplier;
import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument;
import static org.hamcrest.Matchers.containsString;
@ -39,10 +43,9 @@ import static org.hamcrest.Matchers.is;
public class GeoIpProcessorTests extends ESTestCase {
public void testCity() throws Exception {
InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field",
new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
Map<String, Object> document = new HashMap<>();
document.put("source_field", "8.8.8.8");
@ -64,10 +67,9 @@ public class GeoIpProcessorTests extends ESTestCase {
}
public void testNullValueWithIgnoreMissing() throws Exception {
InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field",
new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true,
new GeoIpCache(1000));
loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true,
new GeoIpCache(1000));
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(),
Collections.singletonMap("source_field", null));
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
@ -76,10 +78,9 @@ public class GeoIpProcessorTests extends ESTestCase {
}
public void testNonExistentWithIgnoreMissing() throws Exception {
InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field",
new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true,
new GeoIpCache(1000));
loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true,
new GeoIpCache(1000));
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
processor.execute(ingestDocument);
@ -87,10 +88,9 @@ public class GeoIpProcessorTests extends ESTestCase {
}
public void testNullWithoutIgnoreMissing() throws Exception {
InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field",
new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(),
Collections.singletonMap("source_field", null));
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
@ -99,9 +99,8 @@ public class GeoIpProcessorTests extends ESTestCase {
}
public void testNonExistentWithoutIgnoreMissing() throws Exception {
InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field",
new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
@ -110,10 +109,9 @@ public class GeoIpProcessorTests extends ESTestCase {
}
public void testCity_withIpV6() throws Exception {
InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field",
new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
String address = "2602:306:33d3:8000::3257:9652";
Map<String, Object> document = new HashMap<>();
@ -140,10 +138,9 @@ public class GeoIpProcessorTests extends ESTestCase {
}
public void testCityWithMissingLocation() throws Exception {
InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field",
new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
Map<String, Object> document = new HashMap<>();
document.put("source_field", "80.231.5.0");
@ -158,10 +155,9 @@ public class GeoIpProcessorTests extends ESTestCase {
}
public void testCountry() throws Exception {
InputStream database = getDatabaseFileInputStream("/GeoLite2-Country.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field",
new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
loader("/GeoLite2-Country.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
Map<String, Object> document = new HashMap<>();
document.put("source_field", "82.170.213.79");
@ -179,10 +175,9 @@ public class GeoIpProcessorTests extends ESTestCase {
}
public void testCountryWithMissingLocation() throws Exception {
InputStream database = getDatabaseFileInputStream("/GeoLite2-Country.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field",
new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
loader("/GeoLite2-Country.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
Map<String, Object> document = new HashMap<>();
document.put("source_field", "80.231.5.0");
@ -198,10 +193,9 @@ public class GeoIpProcessorTests extends ESTestCase {
public void testAsn() throws Exception {
String ip = "82.171.64.0";
InputStream database = getDatabaseFileInputStream("/GeoLite2-ASN.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field",
new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
loader("/GeoLite2-ASN.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
Map<String, Object> document = new HashMap<>();
document.put("source_field", ip);
@ -218,10 +212,9 @@ public class GeoIpProcessorTests extends ESTestCase {
}
public void testAddressIsNotInTheDatabase() throws Exception {
InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field",
new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
Map<String, Object> document = new HashMap<>();
document.put("source_field", "127.0.0.1");
@ -232,10 +225,9 @@ public class GeoIpProcessorTests extends ESTestCase {
/** Don't silently do DNS lookups or anything trappy on bogus data */
public void testInvalid() throws Exception {
InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field",
new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
loader("/GeoLite2-City.mmdb"), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false,
new GeoIpCache(1000));
Map<String, Object> document = new HashMap<>();
document.put("source_field", "www.google.com");
@ -244,8 +236,32 @@ public class GeoIpProcessorTests extends ESTestCase {
assertThat(e.getMessage(), containsString("not an IP string literal"));
}
private static InputStream getDatabaseFileInputStream(String path) {
return GeoIpProcessor.class.getResourceAsStream(path);
private DatabaseReaderLazyLoader loader(final String path) {
final Supplier<InputStream> databaseInputStreamSupplier = () -> GeoIpProcessor.class.getResourceAsStream(path);
final CheckedSupplier<DatabaseReader, IOException> loader =
() -> new DatabaseReader.Builder(databaseInputStreamSupplier.get()).build();
return new DatabaseReaderLazyLoader(PathUtils.get(path), loader) {
@Override
long databaseFileSize() throws IOException {
try (InputStream is = databaseInputStreamSupplier.get()) {
long bytesRead = 0;
do {
final byte[] bytes = new byte[1 << 10];
final int read = is.read(bytes);
if (read == -1) break;
bytesRead += read;
} while (true);
return bytesRead;
}
}
@Override
InputStream databaseInputStream() throws IOException {
return databaseInputStreamSupplier.get();
}
};
}
}

View File

@ -76,7 +76,7 @@ task testRepositoryCreds(type: RandomizedTestingTask) {
}
project.check.dependsOn(testRepositoryCreds)
test {
unitTest {
// these are tested explicitly in separate test tasks
exclude '**/*CredentialsTests.class'
exclude '**/S3BlobStoreRepositoryTests.class'

View File

@ -32,6 +32,6 @@ integTestRunner {
systemProperty 'runtime.java.home', "${project.runtimeJavaHome}"
}
test.enabled = false
unitTest.enabled = false
check.dependsOn integTest

View File

@ -31,7 +31,7 @@ dependencies {
// TODO: give each evil test its own fresh JVM for more isolation.
test {
unitTest {
systemProperty 'tests.security.manager', 'false'
}

View File

@ -93,7 +93,7 @@ for (Version version : bwcVersions.indexCompatible) {
}
}
test.enabled = false // no unit tests for rolling upgrades, only the rest integration test
unitTest.enabled = false // no unit tests for rolling upgrades, only the rest integration test
// basic integ tests includes testing bwc against the most recent version
task integTest {

View File

@ -60,7 +60,7 @@ for (Version version : bwcVersions.wireCompatible) {
}
}
test.enabled = false // no unit tests for rolling upgrades, only the rest integration test
unitTest.enabled = false // no unit tests for rolling upgrades, only the rest integration test
// basic integ tests includes testing bwc against the most recent version
task integTest {

View File

@ -53,6 +53,6 @@ task integTest {
dependsOn = [mixedClusterTest]
}
test.enabled = false // no unit tests for multi-cluster-search, only the rest integration test
unitTest.enabled = false // no unit tests for multi-cluster-search, only the rest integration test
check.dependsOn(integTest)

View File

@ -140,7 +140,7 @@ for (Version version : bwcVersions.wireCompatible) {
}
}
test.enabled = false // no unit tests for rolling upgrades, only the rest integration test
unitTest.enabled = false // no unit tests for rolling upgrades, only the rest integration test
// basic integ tests includes testing bwc against the most recent version
task integTest {

View File

@ -71,7 +71,7 @@ forbiddenApisMain {
}
// we don't have additional tests for the tests themselves
tasks.test.enabled = false
tasks.unitTest.enabled = false
// this project doesn't get published
tasks.dependencyLicenses.enabled = false

View File

@ -53,7 +53,7 @@ for (Version version : bwcVersions.indexCompatible) {
bwcTest.dependsOn(versionBwcTest)
}
test.enabled = false
unitTest.enabled = false
task integTest {
if (project.bwc_tests_enabled) {

View File

@ -211,7 +211,7 @@ if (!Os.isFamily(Os.FAMILY_WINDOWS)) {
check.dependsOn(integTest)
test.enabled = false
unitTest.enabled = false
dependencyLicenses.enabled = false
dependenciesInfo.enabled = false

View File

@ -2,5 +2,5 @@ apply plugin: 'elasticsearch.build'
apply plugin: 'nebula.maven-base-publish'
apply plugin: 'nebula.maven-scm'
test.enabled = false
unitTest.enabled = false
jarHell.enabled = false

View File

@ -322,7 +322,7 @@ if (isEclipse == false || project.path == ":server-tests") {
task integTest(type: RandomizedTestingTask,
group: JavaBasePlugin.VERIFICATION_GROUP,
description: 'Multi-node tests',
dependsOn: test.dependsOn) {
dependsOn: unitTest.dependsOn) {
include '**/*IT.class'
}
check.dependsOn integTest

View File

@ -0,0 +1,373 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.document;
import org.apache.lucene.geo.GeoUtils;
import org.apache.lucene.geo.Line;
import org.apache.lucene.geo.Polygon;
import org.apache.lucene.geo.XTessellator;
import org.apache.lucene.geo.XTessellator.Triangle;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import java.util.ArrayList;
import java.util.List;
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude;
/**
* An indexed shape utility class.
* <p>
* {@link Polygon}'s are decomposed into a triangular mesh using the {@link XTessellator} utility class
* Each {@link Triangle} is encoded and indexed as a multi-value field.
* <p>
* Finding all shapes that intersect a range (e.g., bounding box) at search time is efficient.
* <p>
* This class defines static factory methods for common operations:
* <ul>
* <li>{@link #createIndexableFields(String, Polygon)} for matching polygons that intersect a bounding box.
* <li>{@link #newBoxQuery newBoxQuery()} for matching polygons that intersect a bounding box.
* </ul>
* <b>WARNING</b>: Like {@link LatLonPoint}, vertex values are indexed with some loss of precision from the
* original {@code double} values (4.190951585769653E-8 for the latitude component
* and 8.381903171539307E-8 for longitude).
* @see PointValues
* @see LatLonDocValuesField
*
* @lucene.experimental
*/
public class XLatLonShape {
public static final int BYTES = LatLonPoint.BYTES;
protected static final FieldType TYPE = new FieldType();
static {
TYPE.setDimensions(7, 4, BYTES);
TYPE.freeze();
}
// no instance:
private XLatLonShape() {
}
/** create indexable fields for polygon geometry */
public static Field[] createIndexableFields(String fieldName, Polygon polygon) {
// the lionshare of the indexing is done by the tessellator
List<Triangle> tessellation = XTessellator.tessellate(polygon);
List<LatLonTriangle> fields = new ArrayList<>();
for (Triangle t : tessellation) {
fields.add(new LatLonTriangle(fieldName, t));
}
return fields.toArray(new Field[fields.size()]);
}
/** create indexable fields for line geometry */
public static Field[] createIndexableFields(String fieldName, Line line) {
int numPoints = line.numPoints();
Field[] fields = new Field[numPoints - 1];
// create "flat" triangles
for (int i = 0, j = 1; j < numPoints; ++i, ++j) {
fields[i] = new LatLonTriangle(fieldName, line.getLat(i), line.getLon(i), line.getLat(j), line.getLon(j),
line.getLat(i), line.getLon(i));
}
return fields;
}
/** create indexable fields for point geometry */
public static Field[] createIndexableFields(String fieldName, double lat, double lon) {
return new Field[] {new LatLonTriangle(fieldName, lat, lon, lat, lon, lat, lon)};
}
/** create a query to find all polygons that intersect a defined bounding box
**/
public static Query newBoxQuery(String field, QueryRelation queryRelation,
double minLatitude, double maxLatitude, double minLongitude, double maxLongitude) {
return new XLatLonShapeBoundingBoxQuery(field, queryRelation, minLatitude, maxLatitude, minLongitude, maxLongitude);
}
/** create a query to find all polygons that intersect a provided linestring (or array of linestrings)
* note: does not support dateline crossing
**/
public static Query newLineQuery(String field, QueryRelation queryRelation, Line... lines) {
return new XLatLonShapeLineQuery(field, queryRelation, lines);
}
/** create a query to find all polygons that intersect a provided polygon (or array of polygons)
* note: does not support dateline crossing
**/
public static Query newPolygonQuery(String field, QueryRelation queryRelation, Polygon... polygons) {
return new XLatLonShapePolygonQuery(field, queryRelation, polygons);
}
/** polygons are decomposed into tessellated triangles using {@link XTessellator}
* these triangles are encoded and inserted as separate indexed POINT fields
*/
private static class LatLonTriangle extends Field {
LatLonTriangle(String name, double aLat, double aLon, double bLat, double bLon, double cLat, double cLon) {
super(name, TYPE);
setTriangleValue(encodeLongitude(aLon), encodeLatitude(aLat), encodeLongitude(bLon), encodeLatitude(bLat),
encodeLongitude(cLon), encodeLatitude(cLat));
}
LatLonTriangle(String name, Triangle t) {
super(name, TYPE);
setTriangleValue(t.getEncodedX(0), t.getEncodedY(0), t.getEncodedX(1), t.getEncodedY(1),
t.getEncodedX(2), t.getEncodedY(2));
}
public void setTriangleValue(int aX, int aY, int bX, int bY, int cX, int cY) {
final byte[] bytes;
if (fieldsData == null) {
bytes = new byte[7 * BYTES];
fieldsData = new BytesRef(bytes);
} else {
bytes = ((BytesRef) fieldsData).bytes;
}
encodeTriangle(bytes, aY, aX, bY, bX, cY, cX);
}
}
/** Query Relation Types **/
public enum QueryRelation {
INTERSECTS, WITHIN, DISJOINT
}
private static final int MINY_MINX_MAXY_MAXX_Y_X = 0;
private static final int MINY_MINX_Y_X_MAXY_MAXX = 1;
private static final int MAXY_MINX_Y_X_MINY_MAXX = 2;
private static final int MAXY_MINX_MINY_MAXX_Y_X = 3;
private static final int Y_MINX_MINY_X_MAXY_MAXX = 4;
private static final int Y_MINX_MINY_MAXX_MAXY_X = 5;
private static final int MAXY_MINX_MINY_X_Y_MAXX = 6;
private static final int MINY_MINX_Y_MAXX_MAXY_X = 7;
/**
* A triangle is encoded using 6 points and an extra point with encoded information in three bits of how to reconstruct it.
* Triangles are encoded with CCW orientation and might be rotated to limit the number of possible reconstructions to 2^3.
* Reconstruction always happens from west to east.
*/
public static void encodeTriangle(byte[] bytes, int aLat, int aLon, int bLat, int bLon, int cLat, int cLon) {
assert bytes.length == 7 * BYTES;
int aX;
int bX;
int cX;
int aY;
int bY;
int cY;
//change orientation if CW
if (GeoUtils.orient(aLon, aLat, bLon, bLat, cLon, cLat) == -1) {
aX = cLon;
bX = bLon;
cX = aLon;
aY = cLat;
bY = bLat;
cY = aLat;
} else {
aX = aLon;
bX = bLon;
cX = cLon;
aY = aLat;
bY = bLat;
cY = cLat;
}
//rotate edges and place minX at the beginning
if (bX < aX || cX < aX) {
if (bX < cX) {
int tempX = aX;
int tempY = aY;
aX = bX;
aY = bY;
bX = cX;
bY = cY;
cX = tempX;
cY = tempY;
} else if (cX < aX) {
int tempX = aX;
int tempY = aY;
aX = cX;
aY = cY;
cX = bX;
cY = bY;
bX = tempX;
bY = tempY;
}
} else if (aX == bX && aX == cX) {
//degenerated case, all points with same longitude
//we need to prevent that aX is in the middle (not part of the MBS)
if (bY < aY || cY < aY) {
if (bY < cY) {
int tempX = aX;
int tempY = aY;
aX = bX;
aY = bY;
bX = cX;
bY = cY;
cX = tempX;
cY = tempY;
} else if (cY < aY) {
int tempX = aX;
int tempY = aY;
aX = cX;
aY = cY;
cX = bX;
cY = bY;
bX = tempX;
bY = tempY;
}
}
}
int minX = aX;
int minY = StrictMath.min(aY, StrictMath.min(bY, cY));
int maxX = StrictMath.max(aX, StrictMath.max(bX, cX));
int maxY = StrictMath.max(aY, StrictMath.max(bY, cY));
int bits, x, y;
if (minY == aY) {
if (maxY == bY && maxX == bX) {
y = cY;
x = cX;
bits = MINY_MINX_MAXY_MAXX_Y_X;
} else if (maxY == cY && maxX == cX) {
y = bY;
x = bX;
bits = MINY_MINX_Y_X_MAXY_MAXX;
} else {
y = bY;
x = cX;
bits = MINY_MINX_Y_MAXX_MAXY_X;
}
} else if (maxY == aY) {
if (minY == bY && maxX == bX) {
y = cY;
x = cX;
bits = MAXY_MINX_MINY_MAXX_Y_X;
} else if (minY == cY && maxX == cX) {
y = bY;
x = bX;
bits = MAXY_MINX_Y_X_MINY_MAXX;
} else {
y = cY;
x = bX;
bits = MAXY_MINX_MINY_X_Y_MAXX;
}
} else if (maxX == bX && minY == bY) {
y = aY;
x = cX;
bits = Y_MINX_MINY_MAXX_MAXY_X;
} else if (maxX == cX && maxY == cY) {
y = aY;
x = bX;
bits = Y_MINX_MINY_X_MAXY_MAXX;
} else {
throw new IllegalArgumentException("Could not encode the provided triangle");
}
NumericUtils.intToSortableBytes(minY, bytes, 0);
NumericUtils.intToSortableBytes(minX, bytes, BYTES);
NumericUtils.intToSortableBytes(maxY, bytes, 2 * BYTES);
NumericUtils.intToSortableBytes(maxX, bytes, 3 * BYTES);
NumericUtils.intToSortableBytes(y, bytes, 4 * BYTES);
NumericUtils.intToSortableBytes(x, bytes, 5 * BYTES);
NumericUtils.intToSortableBytes(bits, bytes, 6 * BYTES);
}
/**
* Decode a triangle encoded by {@link XLatLonShape#encodeTriangle(byte[], int, int, int, int, int, int)}.
*/
public static void decodeTriangle(byte[] t, int[] triangle) {
assert triangle.length == 6;
int bits = NumericUtils.sortableBytesToInt(t, 6 * XLatLonShape.BYTES);
//extract the first three bits
int tCode = (((1 << 3) - 1) & (bits >> 0));
switch (tCode) {
case MINY_MINX_MAXY_MAXX_Y_X:
triangle[0] = NumericUtils.sortableBytesToInt(t, 0 * XLatLonShape.BYTES);
triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * XLatLonShape.BYTES);
triangle[2] = NumericUtils.sortableBytesToInt(t, 2 * XLatLonShape.BYTES);
triangle[3] = NumericUtils.sortableBytesToInt(t, 3 * XLatLonShape.BYTES);
triangle[4] = NumericUtils.sortableBytesToInt(t, 4 * XLatLonShape.BYTES);
triangle[5] = NumericUtils.sortableBytesToInt(t, 5 * XLatLonShape.BYTES);
break;
case MINY_MINX_Y_X_MAXY_MAXX:
triangle[0] = NumericUtils.sortableBytesToInt(t, 0 * XLatLonShape.BYTES);
triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * XLatLonShape.BYTES);
triangle[2] = NumericUtils.sortableBytesToInt(t, 4 * XLatLonShape.BYTES);
triangle[3] = NumericUtils.sortableBytesToInt(t, 5 * XLatLonShape.BYTES);
triangle[4] = NumericUtils.sortableBytesToInt(t, 2 * XLatLonShape.BYTES);
triangle[5] = NumericUtils.sortableBytesToInt(t, 3 * XLatLonShape.BYTES);
break;
case MAXY_MINX_Y_X_MINY_MAXX:
triangle[0] = NumericUtils.sortableBytesToInt(t, 2 * XLatLonShape.BYTES);
triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * XLatLonShape.BYTES);
triangle[2] = NumericUtils.sortableBytesToInt(t, 4 * XLatLonShape.BYTES);
triangle[3] = NumericUtils.sortableBytesToInt(t, 5 * XLatLonShape.BYTES);
triangle[4] = NumericUtils.sortableBytesToInt(t, 0 * XLatLonShape.BYTES);
triangle[5] = NumericUtils.sortableBytesToInt(t, 3 * XLatLonShape.BYTES);
break;
case MAXY_MINX_MINY_MAXX_Y_X:
triangle[0] = NumericUtils.sortableBytesToInt(t, 2 * XLatLonShape.BYTES);
triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * XLatLonShape.BYTES);
triangle[2] = NumericUtils.sortableBytesToInt(t, 0 * XLatLonShape.BYTES);
triangle[3] = NumericUtils.sortableBytesToInt(t, 3 * XLatLonShape.BYTES);
triangle[4] = NumericUtils.sortableBytesToInt(t, 4 * XLatLonShape.BYTES);
triangle[5] = NumericUtils.sortableBytesToInt(t, 5 * XLatLonShape.BYTES);
break;
case Y_MINX_MINY_X_MAXY_MAXX:
triangle[0] = NumericUtils.sortableBytesToInt(t, 4 * XLatLonShape.BYTES);
triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * XLatLonShape.BYTES);
triangle[2] = NumericUtils.sortableBytesToInt(t, 0 * XLatLonShape.BYTES);
triangle[3] = NumericUtils.sortableBytesToInt(t, 5 * XLatLonShape.BYTES);
triangle[4] = NumericUtils.sortableBytesToInt(t, 2 * XLatLonShape.BYTES);
triangle[5] = NumericUtils.sortableBytesToInt(t, 3 * XLatLonShape.BYTES);
break;
case Y_MINX_MINY_MAXX_MAXY_X:
triangle[0] = NumericUtils.sortableBytesToInt(t, 4 * XLatLonShape.BYTES);
triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * XLatLonShape.BYTES);
triangle[2] = NumericUtils.sortableBytesToInt(t, 0 * XLatLonShape.BYTES);
triangle[3] = NumericUtils.sortableBytesToInt(t, 3 * XLatLonShape.BYTES);
triangle[4] = NumericUtils.sortableBytesToInt(t, 2 * XLatLonShape.BYTES);
triangle[5] = NumericUtils.sortableBytesToInt(t, 5 * XLatLonShape.BYTES);
break;
case MAXY_MINX_MINY_X_Y_MAXX:
triangle[0] = NumericUtils.sortableBytesToInt(t, 2 * XLatLonShape.BYTES);
triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * XLatLonShape.BYTES);
triangle[2] = NumericUtils.sortableBytesToInt(t, 0 * XLatLonShape.BYTES);
triangle[3] = NumericUtils.sortableBytesToInt(t, 5 * XLatLonShape.BYTES);
triangle[4] = NumericUtils.sortableBytesToInt(t, 4 * XLatLonShape.BYTES);
triangle[5] = NumericUtils.sortableBytesToInt(t, 3 * XLatLonShape.BYTES);
break;
case MINY_MINX_Y_MAXX_MAXY_X:
triangle[0] = NumericUtils.sortableBytesToInt(t, 0 * XLatLonShape.BYTES);
triangle[1] = NumericUtils.sortableBytesToInt(t, 1 * XLatLonShape.BYTES);
triangle[2] = NumericUtils.sortableBytesToInt(t, 4 * XLatLonShape.BYTES);
triangle[3] = NumericUtils.sortableBytesToInt(t, 3 * XLatLonShape.BYTES);
triangle[4] = NumericUtils.sortableBytesToInt(t, 2 * XLatLonShape.BYTES);
triangle[5] = NumericUtils.sortableBytesToInt(t, 5 * XLatLonShape.BYTES);
break;
default:
throw new IllegalArgumentException("Could not decode the provided triangle");
}
//Points of the decoded triangle must be co-planar or CCW oriented
assert GeoUtils.orient(triangle[1], triangle[0], triangle[3], triangle[2], triangle[5], triangle[4]) >= 0;
}
}

View File

@ -0,0 +1,96 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.document;
import org.apache.lucene.geo.Rectangle;
import org.apache.lucene.geo.XRectangle2D;
import org.apache.lucene.index.PointValues.Relation;
/**
* Finds all previously indexed shapes that intersect the specified bounding box.
*
* <p>The field must be indexed using
* {@link XLatLonShape#createIndexableFields} added per document.
*
* @lucene.experimental
**/
final class XLatLonShapeBoundingBoxQuery extends XLatLonShapeQuery {
final XRectangle2D rectangle2D;
XLatLonShapeBoundingBoxQuery(String field, XLatLonShape.QueryRelation queryRelation,
double minLat, double maxLat, double minLon, double maxLon) {
super(field, queryRelation);
Rectangle rectangle = new Rectangle(minLat, maxLat, minLon, maxLon);
this.rectangle2D = XRectangle2D.create(rectangle);
}
@Override
protected Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] minTriangle,
int maxXOffset, int maxYOffset, byte[] maxTriangle) {
return rectangle2D.relateRangeBBox(minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle);
}
/** returns true if the query matches the encoded triangle */
@Override
protected boolean queryMatches(byte[] t, int[] scratchTriangle) {
// decode indexed triangle
XLatLonShape.decodeTriangle(t, scratchTriangle);
int aY = scratchTriangle[0];
int aX = scratchTriangle[1];
int bY = scratchTriangle[2];
int bX = scratchTriangle[3];
int cY = scratchTriangle[4];
int cX = scratchTriangle[5];
if (queryRelation == XLatLonShape.QueryRelation.WITHIN) {
return rectangle2D.containsTriangle(aX, aY, bX, bY, cX, cY);
}
return rectangle2D.intersectsTriangle(aX, aY, bX, bY, cX, cY);
}
@Override
public boolean equals(Object o) {
return sameClassAs(o) && equalsTo(getClass().cast(o));
}
@Override
protected boolean equalsTo(Object o) {
return super.equalsTo(o) && rectangle2D.equals(((XLatLonShapeBoundingBoxQuery)o).rectangle2D);
}
@Override
public int hashCode() {
int hash = super.hashCode();
hash = 31 * hash + rectangle2D.hashCode();
return hash;
}
@Override
public String toString(String field) {
final StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
sb.append(':');
if (this.field.equals(field) == false) {
sb.append(" field=");
sb.append(this.field);
sb.append(':');
}
sb.append(rectangle2D.toString());
return sb.toString();
}
}

View File

@ -0,0 +1,134 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.document;
import org.apache.lucene.document.XLatLonShape.QueryRelation;
import org.apache.lucene.geo.GeoEncodingUtils;
import org.apache.lucene.geo.Line;
import org.apache.lucene.geo.Line2D;
import org.apache.lucene.index.PointValues.Relation;
import org.apache.lucene.util.NumericUtils;
import java.util.Arrays;
/**
* Finds all previously indexed shapes that intersect the specified arbitrary {@code Line}.
* <p>
* Note:
* <ul>
* <li>{@code QueryRelation.WITHIN} queries are not yet supported</li>
* <li>Dateline crossing is not yet supported</li>
* </ul>
* <p>
* todo:
* <ul>
* <li>Add distance support for buffered queries</li>
* </ul>
* <p>The field must be indexed using
* {@link XLatLonShape#createIndexableFields} added per document.
*
* @lucene.experimental
**/
final class XLatLonShapeLineQuery extends XLatLonShapeQuery {
final Line[] lines;
private final Line2D line2D;
XLatLonShapeLineQuery(String field, QueryRelation queryRelation, Line... lines) {
super(field, queryRelation);
/** line queries do not support within relations, only intersects and disjoint */
if (queryRelation == QueryRelation.WITHIN) {
throw new IllegalArgumentException("LatLonShapeLineQuery does not support " + QueryRelation.WITHIN + " queries");
}
if (lines == null) {
throw new IllegalArgumentException("lines must not be null");
}
if (lines.length == 0) {
throw new IllegalArgumentException("lines must not be empty");
}
for (int i = 0; i < lines.length; ++i) {
if (lines[i] == null) {
throw new IllegalArgumentException("line[" + i + "] must not be null");
} else if (lines[i].minLon > lines[i].maxLon) {
throw new IllegalArgumentException("LatLonShapeLineQuery does not currently support querying across dateline.");
}
}
this.lines = lines.clone();
this.line2D = Line2D.create(lines);
}
@Override
protected Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] minTriangle,
int maxXOffset, int maxYOffset, byte[] maxTriangle) {
double minLat = GeoEncodingUtils.decodeLatitude(NumericUtils.sortableBytesToInt(minTriangle, minYOffset));
double minLon = GeoEncodingUtils.decodeLongitude(NumericUtils.sortableBytesToInt(minTriangle, minXOffset));
double maxLat = GeoEncodingUtils.decodeLatitude(NumericUtils.sortableBytesToInt(maxTriangle, maxYOffset));
double maxLon = GeoEncodingUtils.decodeLongitude(NumericUtils.sortableBytesToInt(maxTriangle, maxXOffset));
// check internal node against query
return line2D.relate(minLat, maxLat, minLon, maxLon);
}
@Override
protected boolean queryMatches(byte[] t, int[] scratchTriangle) {
XLatLonShape.decodeTriangle(t, scratchTriangle);
double alat = GeoEncodingUtils.decodeLatitude(scratchTriangle[0]);
double alon = GeoEncodingUtils.decodeLongitude(scratchTriangle[1]);
double blat = GeoEncodingUtils.decodeLatitude(scratchTriangle[2]);
double blon = GeoEncodingUtils.decodeLongitude(scratchTriangle[3]);
double clat = GeoEncodingUtils.decodeLatitude(scratchTriangle[4]);
double clon = GeoEncodingUtils.decodeLongitude(scratchTriangle[5]);
if (queryRelation == XLatLonShape.QueryRelation.WITHIN) {
return line2D.relateTriangle(alon, alat, blon, blat, clon, clat) == Relation.CELL_INSIDE_QUERY;
}
// INTERSECTS
return line2D.relateTriangle(alon, alat, blon, blat, clon, clat) != Relation.CELL_OUTSIDE_QUERY;
}
@Override
public String toString(String field) {
final StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
sb.append(':');
if (this.field.equals(field) == false) {
sb.append(" field=");
sb.append(this.field);
sb.append(':');
}
sb.append("Line(" + lines[0].toGeoJSON() + ")");
return sb.toString();
}
@Override
public boolean equals(Object o) {
return super.equals(o);
}
@Override
protected boolean equalsTo(Object o) {
return super.equalsTo(o) && Arrays.equals(lines, ((XLatLonShapeLineQuery)o).lines);
}
@Override
public int hashCode() {
int hash = super.hashCode();
hash = 31 * hash + Arrays.hashCode(lines);
return hash;
}
}

View File

@ -0,0 +1,123 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.document;
import org.apache.lucene.document.XLatLonShape.QueryRelation;
import org.apache.lucene.geo.GeoEncodingUtils;
import org.apache.lucene.geo.Polygon;
import org.apache.lucene.geo.Polygon2D;
import org.apache.lucene.index.PointValues.Relation;
import org.apache.lucene.util.NumericUtils;
import java.util.Arrays;
/**
* Finds all previously indexed shapes that intersect the specified arbitrary.
*
* <p>The field must be indexed using
* {@link XLatLonShape#createIndexableFields} added per document.
*
* @lucene.experimental
**/
final class XLatLonShapePolygonQuery extends XLatLonShapeQuery {
final Polygon[] polygons;
private final Polygon2D poly2D;
/**
* Creates a query that matches all indexed shapes to the provided polygons
*/
XLatLonShapePolygonQuery(String field, QueryRelation queryRelation, Polygon... polygons) {
super(field, queryRelation);
if (polygons == null) {
throw new IllegalArgumentException("polygons must not be null");
}
if (polygons.length == 0) {
throw new IllegalArgumentException("polygons must not be empty");
}
for (int i = 0; i < polygons.length; i++) {
if (polygons[i] == null) {
throw new IllegalArgumentException("polygon[" + i + "] must not be null");
} else if (polygons[i].minLon > polygons[i].maxLon) {
throw new IllegalArgumentException("LatLonShapePolygonQuery does not currently support querying across dateline.");
}
}
this.polygons = polygons.clone();
this.poly2D = Polygon2D.create(polygons);
}
@Override
protected Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] minTriangle,
int maxXOffset, int maxYOffset, byte[] maxTriangle) {
double minLat = GeoEncodingUtils.decodeLatitude(NumericUtils.sortableBytesToInt(minTriangle, minYOffset));
double minLon = GeoEncodingUtils.decodeLongitude(NumericUtils.sortableBytesToInt(minTriangle, minXOffset));
double maxLat = GeoEncodingUtils.decodeLatitude(NumericUtils.sortableBytesToInt(maxTriangle, maxYOffset));
double maxLon = GeoEncodingUtils.decodeLongitude(NumericUtils.sortableBytesToInt(maxTriangle, maxXOffset));
// check internal node against query
return poly2D.relate(minLat, maxLat, minLon, maxLon);
}
@Override
protected boolean queryMatches(byte[] t, int[] scratchTriangle) {
XLatLonShape.decodeTriangle(t, scratchTriangle);
double alat = GeoEncodingUtils.decodeLatitude(scratchTriangle[0]);
double alon = GeoEncodingUtils.decodeLongitude(scratchTriangle[1]);
double blat = GeoEncodingUtils.decodeLatitude(scratchTriangle[2]);
double blon = GeoEncodingUtils.decodeLongitude(scratchTriangle[3]);
double clat = GeoEncodingUtils.decodeLatitude(scratchTriangle[4]);
double clon = GeoEncodingUtils.decodeLongitude(scratchTriangle[5]);
if (queryRelation == QueryRelation.WITHIN) {
return poly2D.relateTriangle(alon, alat, blon, blat, clon, clat) == Relation.CELL_INSIDE_QUERY;
}
// INTERSECTS
return poly2D.relateTriangle(alon, alat, blon, blat, clon, clat) != Relation.CELL_OUTSIDE_QUERY;
}
@Override
public String toString(String field) {
final StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
sb.append(':');
if (this.field.equals(field) == false) {
sb.append(" field=");
sb.append(this.field);
sb.append(':');
}
sb.append("Polygon(" + polygons[0].toGeoJSON() + ")");
return sb.toString();
}
@Override
public boolean equals(Object o) {
return super.equals(o);
}
@Override
protected boolean equalsTo(Object o) {
return super.equalsTo(o) && Arrays.equals(polygons, ((XLatLonShapePolygonQuery)o).polygons);
}
@Override
public int hashCode() {
int hash = super.hashCode();
hash = 31 * hash + Arrays.hashCode(polygons);
return hash;
}
}

View File

@ -0,0 +1,364 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.document;
import org.apache.lucene.document.XLatLonShape.QueryRelation;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.PointValues.IntersectVisitor;
import org.apache.lucene.index.PointValues.Relation;
import org.apache.lucene.search.ConstantScoreScorer;
import org.apache.lucene.search.ConstantScoreWeight;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.ScorerSupplier;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.BitSetIterator;
import org.apache.lucene.util.DocIdSetBuilder;
import org.apache.lucene.util.FixedBitSet;
import java.io.IOException;
import java.util.Objects;
/**
* Base LatLonShape Query class providing common query logic for
* {@link XLatLonShapeBoundingBoxQuery} and {@link XLatLonShapePolygonQuery}
*
* Note: this class implements the majority of the INTERSECTS, WITHIN, DISJOINT relation logic
*
* @lucene.experimental
**/
abstract class XLatLonShapeQuery extends Query {
/** field name */
final String field;
/** query relation
* disjoint: {@code CELL_OUTSIDE_QUERY}
* intersects: {@code CELL_CROSSES_QUERY},
* within: {@code CELL_WITHIN_QUERY} */
final XLatLonShape.QueryRelation queryRelation;
protected XLatLonShapeQuery(String field, final QueryRelation queryType) {
if (field == null) {
throw new IllegalArgumentException("field must not be null");
}
this.field = field;
this.queryRelation = queryType;
}
/**
* relates an internal node (bounding box of a range of triangles) to the target query
* Note: logic is specific to query type
* see {@link XLatLonShapeBoundingBoxQuery#relateRangeToQuery} and {@link XLatLonShapePolygonQuery#relateRangeToQuery}
*/
protected abstract Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] minTriangle,
int maxXOffset, int maxYOffset, byte[] maxTriangle);
/** returns true if the provided triangle matches the query */
protected abstract boolean queryMatches(byte[] triangle, int[] scratchTriangle);
/** relates a range of triangles (internal node) to the query */
protected Relation relateRangeToQuery(byte[] minTriangle, byte[] maxTriangle) {
// compute bounding box of internal node
Relation r = relateRangeBBoxToQuery(XLatLonShape.BYTES, 0, minTriangle, 3 * XLatLonShape.BYTES,
2 * XLatLonShape.BYTES, maxTriangle);
if (queryRelation == QueryRelation.DISJOINT) {
return transposeRelation(r);
}
return r;
}
@Override
public final Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException {
return new ConstantScoreWeight(this, boost) {
/** create a visitor that adds documents that match the query using a sparse bitset. (Used by INTERSECT) */
protected IntersectVisitor getSparseIntersectVisitor(DocIdSetBuilder result) {
return new IntersectVisitor() {
final int[] scratchTriangle = new int[6];
DocIdSetBuilder.BulkAdder adder;
@Override
public void grow(int count) {
adder = result.grow(count);
}
@Override
public void visit(int docID) throws IOException {
adder.add(docID);
}
@Override
public void visit(int docID, byte[] t) throws IOException {
if (queryMatches(t, scratchTriangle)) {
adder.add(docID);
}
}
@Override
public Relation compare(byte[] minTriangle, byte[] maxTriangle) {
return relateRangeToQuery(minTriangle, maxTriangle);
}
};
}
/** create a visitor that adds documents that match the query using a dense bitset. (Used by WITHIN, DISJOINT) */
protected IntersectVisitor getDenseIntersectVisitor(FixedBitSet intersect, FixedBitSet disjoint) {
return new IntersectVisitor() {
final int[] scratchTriangle = new int[6];
@Override
public void visit(int docID) throws IOException {
if (queryRelation == QueryRelation.DISJOINT) {
// if DISJOINT query set the doc in the disjoint bitset
disjoint.set(docID);
} else {
// for INTERSECT, and WITHIN queries we set the intersect bitset
intersect.set(docID);
}
}
@Override
public void visit(int docID, byte[] t) throws IOException {
if (queryMatches(t, scratchTriangle)) {
intersect.set(docID);
} else {
disjoint.set(docID);
}
}
@Override
public Relation compare(byte[] minTriangle, byte[] maxTriangle) {
return relateRangeToQuery(minTriangle, maxTriangle);
}
};
}
/** get a scorer supplier for INTERSECT queries */
protected ScorerSupplier getIntersectScorerSupplier(LeafReader reader, PointValues values, Weight weight,
ScoreMode scoreMode) throws IOException {
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
IntersectVisitor visitor = getSparseIntersectVisitor(result);
return new RelationScorerSupplier(values, visitor) {
@Override
public Scorer get(long leadCost) throws IOException {
return getIntersectsScorer(XLatLonShapeQuery.this, reader, weight, result, score(), scoreMode);
}
};
}
/** get a scorer supplier for all other queries (DISJOINT, WITHIN) */
protected ScorerSupplier getScorerSupplier(LeafReader reader, PointValues values, Weight weight,
ScoreMode scoreMode) throws IOException {
if (queryRelation == QueryRelation.INTERSECTS) {
return getIntersectScorerSupplier(reader, values, weight, scoreMode);
}
FixedBitSet intersect = new FixedBitSet(reader.maxDoc());
FixedBitSet disjoint = new FixedBitSet(reader.maxDoc());
IntersectVisitor visitor = getDenseIntersectVisitor(intersect, disjoint);
return new RelationScorerSupplier(values, visitor) {
@Override
public Scorer get(long leadCost) throws IOException {
return getScorer(XLatLonShapeQuery.this, weight, intersect, disjoint, score(), scoreMode);
}
};
}
@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
LeafReader reader = context.reader();
PointValues values = reader.getPointValues(field);
if (values == null) {
// No docs in this segment had any points fields
return null;
}
FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field);
if (fieldInfo == null) {
// No docs in this segment indexed this field at all
return null;
}
boolean allDocsMatch = true;
if (values.getDocCount() != reader.maxDoc() ||
relateRangeToQuery(values.getMinPackedValue(), values.getMaxPackedValue()) != Relation.CELL_INSIDE_QUERY) {
allDocsMatch = false;
}
final Weight weight = this;
if (allDocsMatch) {
return new ScorerSupplier() {
@Override
public Scorer get(long leadCost) throws IOException {
return new ConstantScoreScorer(weight, score(), scoreMode, DocIdSetIterator.all(reader.maxDoc()));
}
@Override
public long cost() {
return reader.maxDoc();
}
};
} else {
return getScorerSupplier(reader, values, weight, scoreMode);
}
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
ScorerSupplier scorerSupplier = scorerSupplier(context);
if (scorerSupplier == null) {
return null;
}
return scorerSupplier.get(Long.MAX_VALUE);
}
@Override
public boolean isCacheable(LeafReaderContext ctx) {
return true;
}
};
}
/** returns the field name */
public String getField() {
return field;
}
/** returns the query relation */
public QueryRelation getQueryRelation() {
return queryRelation;
}
@Override
public int hashCode() {
int hash = classHash();
hash = 31 * hash + field.hashCode();
hash = 31 * hash + queryRelation.hashCode();
return hash;
}
@Override
public boolean equals(Object o) {
return sameClassAs(o) && equalsTo(o);
}
protected boolean equalsTo(Object o) {
return Objects.equals(field, ((XLatLonShapeQuery)o).field) && this.queryRelation == ((XLatLonShapeQuery)o).queryRelation;
}
/** transpose the relation; INSIDE becomes OUTSIDE, OUTSIDE becomes INSIDE, CROSSES remains unchanged */
private static Relation transposeRelation(Relation r) {
if (r == Relation.CELL_INSIDE_QUERY) {
return Relation.CELL_OUTSIDE_QUERY;
} else if (r == Relation.CELL_OUTSIDE_QUERY) {
return Relation.CELL_INSIDE_QUERY;
}
return Relation.CELL_CROSSES_QUERY;
}
/** utility class for implementing constant score logic specific to INTERSECT, WITHIN, and DISJOINT */
private abstract static class RelationScorerSupplier extends ScorerSupplier {
PointValues values;
IntersectVisitor visitor;
long cost = -1;
RelationScorerSupplier(PointValues values, IntersectVisitor visitor) {
this.values = values;
this.visitor = visitor;
}
/** create a visitor that clears documents that do NOT match the polygon query; used with INTERSECTS */
private IntersectVisitor getInverseIntersectVisitor(XLatLonShapeQuery query, FixedBitSet result, int[] cost) {
return new IntersectVisitor() {
int[] scratchTriangle = new int[6];
@Override
public void visit(int docID) {
result.clear(docID);
cost[0]--;
}
@Override
public void visit(int docID, byte[] packedTriangle) {
if (query.queryMatches(packedTriangle, scratchTriangle) == false) {
result.clear(docID);
cost[0]--;
}
}
@Override
public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
return transposeRelation(query.relateRangeToQuery(minPackedValue, maxPackedValue));
}
};
}
/** returns a Scorer for INTERSECT queries that uses a sparse bitset */
protected Scorer getIntersectsScorer(XLatLonShapeQuery query, LeafReader reader, Weight weight,
DocIdSetBuilder docIdSetBuilder, final float boost,
ScoreMode scoreMode) throws IOException {
if (values.getDocCount() == reader.maxDoc()
&& values.getDocCount() == values.size()
&& cost() > reader.maxDoc() / 2) {
// If all docs have exactly one value and the cost is greater
// than half the leaf size then maybe we can make things faster
// by computing the set of documents that do NOT match the query
final FixedBitSet result = new FixedBitSet(reader.maxDoc());
result.set(0, reader.maxDoc());
int[] cost = new int[]{reader.maxDoc()};
values.intersect(getInverseIntersectVisitor(query, result, cost));
final DocIdSetIterator iterator = new BitSetIterator(result, cost[0]);
return new ConstantScoreScorer(weight, boost, scoreMode, iterator);
}
values.intersect(visitor);
DocIdSetIterator iterator = docIdSetBuilder.build().iterator();
return new ConstantScoreScorer(weight, boost, scoreMode, iterator);
}
/** returns a Scorer for all other (non INTERSECT) queries */
protected Scorer getScorer(XLatLonShapeQuery query, Weight weight,
FixedBitSet intersect, FixedBitSet disjoint, final float boost,
ScoreMode scoreMode) throws IOException {
values.intersect(visitor);
DocIdSetIterator iterator;
if (query.queryRelation == QueryRelation.DISJOINT) {
disjoint.andNot(intersect);
iterator = new BitSetIterator(disjoint, cost());
} else if (query.queryRelation == QueryRelation.WITHIN) {
intersect.andNot(disjoint);
iterator = new BitSetIterator(intersect, cost());
} else {
iterator = new BitSetIterator(intersect, cost());
}
return new ConstantScoreScorer(weight, boost, scoreMode, iterator);
}
@Override
public long cost() {
if (cost == -1) {
// Computing the cost may be expensive, so only do it if necessary
cost = values.estimatePointCount(visitor);
assert cost >= 0;
}
return cost;
}
}
}

View File

@ -0,0 +1,317 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.geo;
import org.apache.lucene.document.XLatLonShape;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.util.FutureArrays;
import org.apache.lucene.util.NumericUtils;
import java.util.Arrays;
import static org.apache.lucene.document.XLatLonShape.BYTES;
import static org.apache.lucene.geo.GeoEncodingUtils.MAX_LON_ENCODED;
import static org.apache.lucene.geo.GeoEncodingUtils.MIN_LON_ENCODED;
import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude;
import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude;
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitudeCeil;
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude;
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitudeCeil;
import static org.apache.lucene.geo.GeoUtils.orient;
/**
* 2D rectangle implementation containing spatial logic.
*
* @lucene.internal
*/
public class XRectangle2D {
final byte[] bbox;
final byte[] west;
final int minX;
final int maxX;
final int minY;
final int maxY;
private XRectangle2D(double minLat, double maxLat, double minLon, double maxLon) {
this.bbox = new byte[4 * BYTES];
int minXenc = encodeLongitudeCeil(minLon);
int maxXenc = encodeLongitude(maxLon);
int minYenc = encodeLatitudeCeil(minLat);
int maxYenc = encodeLatitude(maxLat);
if (minYenc > maxYenc) {
minYenc = maxYenc;
}
this.minY = minYenc;
this.maxY = maxYenc;
if (minLon > maxLon == true) {
// crossing dateline is split into east/west boxes
this.west = new byte[4 * BYTES];
this.minX = minXenc;
this.maxX = maxXenc;
encode(MIN_LON_ENCODED, this.maxX, this.minY, this.maxY, this.west);
encode(this.minX, MAX_LON_ENCODED, this.minY, this.maxY, this.bbox);
} else {
// encodeLongitudeCeil may cause minX to be > maxX iff
// the delta between the longitude < the encoding resolution
if (minXenc > maxXenc) {
minXenc = maxXenc;
}
this.west = null;
this.minX = minXenc;
this.maxX = maxXenc;
encode(this.minX, this.maxX, this.minY, this.maxY, bbox);
}
}
/** Builds a XRectangle2D from rectangle */
public static XRectangle2D create(Rectangle rectangle) {
return new XRectangle2D(rectangle.minLat, rectangle.maxLat, rectangle.minLon, rectangle.maxLon);
}
public boolean crossesDateline() {
return minX > maxX;
}
/** Checks if the rectangle contains the provided point **/
public boolean queryContainsPoint(int x, int y) {
if (this.crossesDateline() == true) {
return bboxContainsPoint(x, y, MIN_LON_ENCODED, this.maxX, this.minY, this.maxY)
|| bboxContainsPoint(x, y, this.minX, MAX_LON_ENCODED, this.minY, this.maxY);
}
return bboxContainsPoint(x, y, this.minX, this.maxX, this.minY, this.maxY);
}
/** compare this to a provided rangle bounding box **/
public PointValues.Relation relateRangeBBox(int minXOffset, int minYOffset, byte[] minTriangle,
int maxXOffset, int maxYOffset, byte[] maxTriangle) {
PointValues.Relation eastRelation = compareBBoxToRangeBBox(this.bbox, minXOffset, minYOffset, minTriangle,
maxXOffset, maxYOffset, maxTriangle);
if (this.crossesDateline() && eastRelation == PointValues.Relation.CELL_OUTSIDE_QUERY) {
return compareBBoxToRangeBBox(this.west, minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle);
}
return eastRelation;
}
/** Checks if the rectangle intersects the provided triangle **/
public boolean intersectsTriangle(int aX, int aY, int bX, int bY, int cX, int cY) {
// 1. query contains any triangle points
if (queryContainsPoint(aX, aY) || queryContainsPoint(bX, bY) || queryContainsPoint(cX, cY)) {
return true;
}
// compute bounding box of triangle
int tMinX = StrictMath.min(StrictMath.min(aX, bX), cX);
int tMaxX = StrictMath.max(StrictMath.max(aX, bX), cX);
int tMinY = StrictMath.min(StrictMath.min(aY, bY), cY);
int tMaxY = StrictMath.max(StrictMath.max(aY, bY), cY);
// 2. check bounding boxes are disjoint
if (this.crossesDateline() == true) {
if (boxesAreDisjoint(tMinX, tMaxX, tMinY, tMaxY, MIN_LON_ENCODED, this.maxX, this.minY, this.maxY)
&& boxesAreDisjoint(tMinX, tMaxX, tMinY, tMaxY, this.minX, MAX_LON_ENCODED, this.minY, this.maxY)) {
return false;
}
} else if (tMaxX < minX || tMinX > maxX || tMinY > maxY || tMaxY < minY) {
return false;
}
// 3. check triangle contains any query points
if (XTessellator.pointInTriangle(minX, minY, aX, aY, bX, bY, cX, cY)) {
return true;
} else if (XTessellator.pointInTriangle(maxX, minY, aX, aY, bX, bY, cX, cY)) {
return true;
} else if (XTessellator.pointInTriangle(maxX, maxY, aX, aY, bX, bY, cX, cY)) {
return true;
} else if (XTessellator.pointInTriangle(minX, maxY, aX, aY, bX, bY, cX, cY)) {
return true;
}
// 4. last ditch effort: check crossings
if (queryIntersects(aX, aY, bX, bY, cX, cY)) {
return true;
}
return false;
}
/** Checks if the rectangle contains the provided triangle **/
public boolean containsTriangle(int ax, int ay, int bx, int by, int cx, int cy) {
if (this.crossesDateline() == true) {
return bboxContainsTriangle(ax, ay, bx, by, cx, cy, MIN_LON_ENCODED, this.maxX, this.minY, this.maxY)
|| bboxContainsTriangle(ax, ay, bx, by, cx, cy, this.minX, MAX_LON_ENCODED, this.minY, this.maxY);
}
return bboxContainsTriangle(ax, ay, bx, by, cx, cy, minX, maxX, minY, maxY);
}
/** static utility method to compare a bbox with a range of triangles (just the bbox of the triangle collection) */
private static PointValues.Relation compareBBoxToRangeBBox(final byte[] bbox,
int minXOffset, int minYOffset, byte[] minTriangle,
int maxXOffset, int maxYOffset, byte[] maxTriangle) {
// check bounding box (DISJOINT)
if (FutureArrays.compareUnsigned(minTriangle, minXOffset, minXOffset + BYTES, bbox, 3 * BYTES, 4 * BYTES) > 0 ||
FutureArrays.compareUnsigned(maxTriangle, maxXOffset, maxXOffset + BYTES, bbox, BYTES, 2 * BYTES) < 0 ||
FutureArrays.compareUnsigned(minTriangle, minYOffset, minYOffset + BYTES, bbox, 2 * BYTES, 3 * BYTES) > 0 ||
FutureArrays.compareUnsigned(maxTriangle, maxYOffset, maxYOffset + BYTES, bbox, 0, BYTES) < 0) {
return PointValues.Relation.CELL_OUTSIDE_QUERY;
}
if (FutureArrays.compareUnsigned(minTriangle, minXOffset, minXOffset + BYTES, bbox, BYTES, 2 * BYTES) >= 0 &&
FutureArrays.compareUnsigned(maxTriangle, maxXOffset, maxXOffset + BYTES, bbox, 3 * BYTES, 4 * BYTES) <= 0 &&
FutureArrays.compareUnsigned(minTriangle, minYOffset, minYOffset + BYTES, bbox, 0, BYTES) >= 0 &&
FutureArrays.compareUnsigned(maxTriangle, maxYOffset, maxYOffset + BYTES, bbox, 2 * BYTES, 3 * BYTES) <= 0) {
return PointValues.Relation.CELL_INSIDE_QUERY;
}
return PointValues.Relation.CELL_CROSSES_QUERY;
}
/**
* encodes a bounding box into the provided byte array
*/
private static void encode(final int minX, final int maxX, final int minY, final int maxY, byte[] b) {
if (b == null) {
b = new byte[4 * XLatLonShape.BYTES];
}
NumericUtils.intToSortableBytes(minY, b, 0);
NumericUtils.intToSortableBytes(minX, b, BYTES);
NumericUtils.intToSortableBytes(maxY, b, 2 * BYTES);
NumericUtils.intToSortableBytes(maxX, b, 3 * BYTES);
}
/** returns true if the query intersects the provided triangle (in encoded space) */
private boolean queryIntersects(int ax, int ay, int bx, int by, int cx, int cy) {
// check each edge of the triangle against the query
if (edgeIntersectsQuery(ax, ay, bx, by) ||
edgeIntersectsQuery(bx, by, cx, cy) ||
edgeIntersectsQuery(cx, cy, ax, ay)) {
return true;
}
return false;
}
/** returns true if the edge (defined by (ax, ay) (bx, by)) intersects the query */
private boolean edgeIntersectsQuery(int ax, int ay, int bx, int by) {
if (this.crossesDateline() == true) {
return edgeIntersectsBox(ax, ay, bx, by, MIN_LON_ENCODED, this.maxX, this.minY, this.maxY)
|| edgeIntersectsBox(ax, ay, bx, by, this.minX, MAX_LON_ENCODED, this.minY, this.maxY);
}
return edgeIntersectsBox(ax, ay, bx, by, this.minX, this.maxX, this.minY, this.maxY);
}
/** static utility method to check if a bounding box contains a point */
private static boolean bboxContainsPoint(int x, int y, int minX, int maxX, int minY, int maxY) {
return (x < minX || x > maxX || y < minY || y > maxY) == false;
}
/** static utility method to check if a bounding box contains a triangle */
private static boolean bboxContainsTriangle(int ax, int ay, int bx, int by, int cx, int cy,
int minX, int maxX, int minY, int maxY) {
return bboxContainsPoint(ax, ay, minX, maxX, minY, maxY)
&& bboxContainsPoint(bx, by, minX, maxX, minY, maxY)
&& bboxContainsPoint(cx, cy, minX, maxX, minY, maxY);
}
/** returns true if the edge (defined by (ax, ay) (bx, by)) intersects the query */
private static boolean edgeIntersectsBox(int ax, int ay, int bx, int by,
int minX, int maxX, int minY, int maxY) {
// shortcut: if edge is a point (occurs w/ Line shapes); simply check bbox w/ point
if (ax == bx && ay == by) {
return Rectangle.containsPoint(ay, ax, minY, maxY, minX, maxX);
}
// shortcut: check if either of the end points fall inside the box
if (bboxContainsPoint(ax, ay, minX, maxX, minY, maxY)
|| bboxContainsPoint(bx, by, minX, maxX, minY, maxY)) {
return true;
}
// shortcut: check bboxes of edges are disjoint
if (boxesAreDisjoint(Math.min(ax, bx), Math.max(ax, bx), Math.min(ay, by), Math.max(ay, by),
minX, maxX, minY, maxY)) {
return false;
}
// shortcut: edge is a point
if (ax == bx && ay == by) {
return false;
}
// top
if (orient(ax, ay, bx, by, minX, maxY) * orient(ax, ay, bx, by, maxX, maxY) <= 0 &&
orient(minX, maxY, maxX, maxY, ax, ay) * orient(minX, maxY, maxX, maxY, bx, by) <= 0) {
return true;
}
// right
if (orient(ax, ay, bx, by, maxX, maxY) * orient(ax, ay, bx, by, maxX, minY) <= 0 &&
orient(maxX, maxY, maxX, minY, ax, ay) * orient(maxX, maxY, maxX, minY, bx, by) <= 0) {
return true;
}
// bottom
if (orient(ax, ay, bx, by, maxX, minY) * orient(ax, ay, bx, by, minX, minY) <= 0 &&
orient(maxX, minY, minX, minY, ax, ay) * orient(maxX, minY, minX, minY, bx, by) <= 0) {
return true;
}
// left
if (orient(ax, ay, bx, by, minX, minY) * orient(ax, ay, bx, by, minX, maxY) <= 0 &&
orient(minX, minY, minX, maxY, ax, ay) * orient(minX, minY, minX, maxY, bx, by) <= 0) {
return true;
}
return false;
}
/** utility method to check if two boxes are disjoint */
private static boolean boxesAreDisjoint(final int aMinX, final int aMaxX, final int aMinY, final int aMaxY,
final int bMinX, final int bMaxX, final int bMinY, final int bMaxY) {
return (aMaxX < bMinX || aMinX > bMaxX || aMaxY < bMinY || aMinY > bMaxY);
}
@Override
public boolean equals(Object o) {
return Arrays.equals(bbox, ((XRectangle2D)o).bbox)
&& Arrays.equals(west, ((XRectangle2D)o).west);
}
@Override
public int hashCode() {
int hash = super.hashCode();
hash = 31 * hash + Arrays.hashCode(bbox);
hash = 31 * hash + Arrays.hashCode(west);
return hash;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("Rectangle(lat=");
sb.append(decodeLatitude(minY));
sb.append(" TO ");
sb.append(decodeLatitude(maxY));
sb.append(" lon=");
sb.append(decodeLongitude(minX));
sb.append(" TO ");
sb.append(decodeLongitude(maxX));
if (maxX < minX) {
sb.append(" [crosses dateline!]");
}
sb.append(")");
return sb.toString();
}
}

View File

@ -0,0 +1,889 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.geo;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.geo.GeoUtils.WindingOrder;
import org.apache.lucene.util.BitUtil;
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude;
import static org.apache.lucene.geo.GeoUtils.orient;
/**
* Computes a triangular mesh tessellation for a given polygon.
* <p>
* This is inspired by mapbox's earcut algorithm (https://github.com/mapbox/earcut)
* which is a modification to FIST (https://www.cosy.sbg.ac.at/~held/projects/triang/triang.html)
* written by Martin Held, and ear clipping (https://www.geometrictools.com/Documentation/TriangulationByEarClipping.pdf)
* written by David Eberly.
* <p>
* Notes:
* <ul>
* <li>Requires valid polygons:
* <ul>
* <li>No self intersections
* <li>Holes may only touch at one vertex
* <li>Polygon must have an area (e.g., no "line" boxes)
* <li>sensitive to overflow (e.g, subatomic values such as E-200 can cause unexpected behavior)
* </ul>
* </ul>
* <p>
* The code is a modified version of the javascript implementation provided by MapBox
* under the following license:
* <p>
* ISC License
* <p>
* Copyright (c) 2016, Mapbox
* <p>
* Permission to use, copy, modify, and/or distribute this software for any purpose
* with or without fee is hereby granted, provided that the above copyright notice
* and this permission notice appear in all copies.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH'
* REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
* INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
* OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
* TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
* THIS SOFTWARE.
*
* @lucene.experimental
*/
public final class XTessellator {
// this is a dumb heuristic to control whether we cut over to sorted morton values
private static final int VERTEX_THRESHOLD = 80;
/** state of the tessellated split - avoids recursion */
private enum State {
INIT, CURE, SPLIT
}
// No Instance:
private XTessellator() {}
/** Produces an array of vertices representing the triangulated result set of the Points array */
public static List<Triangle> tessellate(final Polygon polygon) {
// Attempt to establish a doubly-linked list of the provided shell points (should be CCW, but this will correct);
// then filter instances of intersections.
Node outerNode = createDoublyLinkedList(polygon, 0, WindingOrder.CW);
// If an outer node hasn't been detected, the shape is malformed. (must comply with OGC SFA specification)
if(outerNode == null) {
throw new IllegalArgumentException("Malformed shape detected in XTessellator!");
}
// Determine if the specified list of points contains holes
if (polygon.numHoles() > 0) {
// Eliminate the hole triangulation.
outerNode = eliminateHoles(polygon, outerNode);
}
// If the shape crosses VERTEX_THRESHOLD, use z-order curve hashing:
final boolean mortonOptimized;
{
int threshold = VERTEX_THRESHOLD - polygon.numPoints();
for (int i = 0; threshold >= 0 && i < polygon.numHoles(); ++i) {
threshold -= polygon.getHole(i).numPoints();
}
// Link polygon nodes in Z-Order
mortonOptimized = threshold < 0;
if (mortonOptimized == true) {
sortByMorton(outerNode);
}
}
// Calculate the tessellation using the doubly LinkedList.
List<Triangle> result = earcutLinkedList(outerNode, new ArrayList<>(), State.INIT, mortonOptimized);
if (result.size() == 0) {
throw new IllegalArgumentException("Unable to Tessellate shape [" + polygon + "]. Possible malformed shape detected.");
}
return result;
}
/** Creates a circular doubly linked list using polygon points. The order is governed by the specified winding order */
private static Node createDoublyLinkedList(final Polygon polygon, int startIndex, final WindingOrder windingOrder) {
Node lastNode = null;
// Link points into the circular doubly-linked list in the specified winding order
if (windingOrder == polygon.getWindingOrder()) {
for (int i = 0; i < polygon.numPoints(); ++i) {
lastNode = insertNode(polygon, startIndex++, i, lastNode);
}
} else {
for (int i = polygon.numPoints() - 1; i >= 0; --i) {
lastNode = insertNode(polygon, startIndex++, i, lastNode);
}
}
// if first and last node are the same then remove the end node and set lastNode to the start
if (lastNode != null && isVertexEquals(lastNode, lastNode.next)) {
removeNode(lastNode);
lastNode = lastNode.next;
}
// Return the last node in the Doubly-Linked List
return filterPoints(lastNode, null);
}
/** Links every hole into the outer loop, producing a single-ring polygon without holes. **/
private static Node eliminateHoles(final Polygon polygon, Node outerNode) {
// Define a list to hole a reference to each filtered hole list.
final List<Node> holeList = new ArrayList<>();
// Iterate through each array of hole vertices.
Polygon[] holes = polygon.getHoles();
int nodeIndex = polygon.numPoints();
for(int i = 0; i < polygon.numHoles(); ++i) {
// create the doubly-linked hole list
Node list = createDoublyLinkedList(holes[i], nodeIndex, WindingOrder.CCW);
if (list == list.next) {
list.isSteiner = true;
}
// Determine if the resulting hole polygon was successful.
if(list != null) {
// Add the leftmost vertex of the hole.
holeList.add(fetchLeftmost(list));
}
nodeIndex += holes[i].numPoints();
}
// Sort the hole vertices by x coordinate
holeList.sort((Node pNodeA, Node pNodeB) ->
pNodeA.getX() < pNodeB.getX() ? -1 : pNodeA.getX() == pNodeB.getX() ? 0 : 1);
// Process holes from left to right.
for(int i = 0; i < holeList.size(); ++i) {
// Eliminate hole triangles from the result set
final Node holeNode = holeList.get(i);
eliminateHole(holeNode, outerNode);
// Filter the new polygon.
outerNode = filterPoints(outerNode, outerNode.next);
}
// Return a pointer to the list.
return outerNode;
}
/** Finds a bridge between vertices that connects a hole with an outer ring, and links it */
private static void eliminateHole(final Node holeNode, Node outerNode) {
// Attempt to find a logical bridge between the HoleNode and OuterNode.
outerNode = fetchHoleBridge(holeNode, outerNode);
// Determine whether a hole bridge could be fetched.
if(outerNode != null) {
// Split the resulting polygon.
Node node = splitPolygon(outerNode, holeNode);
// Filter the split nodes.
filterPoints(node, node.next);
}
}
/**
* David Eberly's algorithm for finding a bridge between a hole and outer polygon
*
* see: http://www.geometrictools.com/Documentation/TriangulationByEarClipping.pdf
**/
private static Node fetchHoleBridge(final Node holeNode, final Node outerNode) {
Node p = outerNode;
double qx = Double.NEGATIVE_INFINITY;
final double hx = holeNode.getX();
final double hy = holeNode.getY();
Node connection = null;
// 1. find a segment intersected by a ray from the hole's leftmost point to the left;
// segment's endpoint with lesser x will be potential connection point
{
do {
if (hy <= p.getY() && hy >= p.next.getY() && p.next.getY() != p.getY()) {
final double x = p.getX() + (hy - p.getY()) * (p.next.getX() - p.getX()) / (p.next.getY() - p.getY());
if (x <= hx && x > qx) {
qx = x;
if (x == hx) {
if (hy == p.getY()) return p;
if (hy == p.next.getY()) return p.next;
}
connection = p.getX() < p.next.getX() ? p : p.next;
}
}
p = p.next;
} while (p != outerNode);
}
if (connection == null) {
return null;
} else if (hx == qx) {
return connection.previous;
}
// 2. look for points inside the triangle of hole point, segment intersection, and endpoint
// its a valid connection iff there are no points found;
// otherwise choose the point of the minimum angle with the ray as the connection point
Node stop = connection;
final double mx = connection.getX();
final double my = connection.getY();
double tanMin = Double.POSITIVE_INFINITY;
double tan;
p = connection.next;
{
while (p != stop) {
if (hx >= p.getX() && p.getX() >= mx && hx != p.getX()
&& pointInEar(p.getX(), p.getY(), hy < my ? hx : qx, hy, mx, my, hy < my ? qx : hx, hy)) {
tan = Math.abs(hy - p.getY()) / (hx - p.getX()); // tangential
if ((tan < tanMin || (tan == tanMin && p.getX() > connection.getX())) && isLocallyInside(p, holeNode)) {
connection = p;
tanMin = tan;
}
}
p = p.next;
}
}
return connection;
}
/** Finds the left-most hole of a polygon ring. **/
private static Node fetchLeftmost(final Node start) {
Node node = start;
Node leftMost = start;
do {
// Determine if the current node possesses a lesser X position.
if (node.getX() < leftMost.getX()) {
// Maintain a reference to this Node.
leftMost = node;
}
// Progress the search to the next node in the doubly-linked list.
node = node.next;
} while (node != start);
// Return the node with the smallest X value.
return leftMost;
}
/** Main ear slicing loop which triangulates the vertices of a polygon, provided as a doubly-linked list. **/
private static List<Triangle> earcutLinkedList(Node currEar, final List<Triangle> tessellation,
State state, final boolean mortonOptimized) {
earcut : do {
if (currEar == null || currEar.previous == currEar.next) {
return tessellation;
}
Node stop = currEar;
Node prevNode;
Node nextNode;
// Iteratively slice ears
do {
prevNode = currEar.previous;
nextNode = currEar.next;
// Determine whether the current triangle must be cut off.
final boolean isReflex = area(prevNode.getX(), prevNode.getY(), currEar.getX(), currEar.getY(),
nextNode.getX(), nextNode.getY()) >= 0;
if (isReflex == false && isEar(currEar, mortonOptimized) == true) {
// Return the triangulated data
tessellation.add(new Triangle(prevNode, currEar, nextNode));
// Remove the ear node.
removeNode(currEar);
// Skipping to the next node leaves fewer slither triangles.
currEar = nextNode.next;
stop = nextNode.next;
continue;
}
currEar = nextNode;
// If the whole polygon has been iterated over and no more ears can be found.
if (currEar == stop) {
switch (state) {
case INIT:
// try filtering points and slicing again
currEar = filterPoints(currEar, null);
state = State.CURE;
continue earcut;
case CURE:
// if this didn't work, try curing all small self-intersections locally
currEar = cureLocalIntersections(currEar, tessellation);
state = State.SPLIT;
continue earcut;
case SPLIT:
// as a last resort, try splitting the remaining polygon into two
if (splitEarcut(currEar, tessellation, mortonOptimized) == false) {
//we could not process all points. Tessellation failed
tessellation.clear();
}
break;
}
break;
}
} while (currEar.previous != currEar.next);
break;
} while (true);
// Return the calculated tessellation
return tessellation;
}
/** Determines whether a polygon node forms a valid ear with adjacent nodes. **/
private static boolean isEar(final Node ear, final boolean mortonOptimized) {
if (mortonOptimized == true) {
return mortonIsEar(ear);
}
// make sure there aren't other points inside the potential ear
Node node = ear.next.next;
while (node != ear.previous) {
if (pointInEar(node.getX(), node.getY(), ear.previous.getX(), ear.previous.getY(), ear.getX(), ear.getY(),
ear.next.getX(), ear.next.getY())
&& area(node.previous.getX(), node.previous.getY(), node.getX(), node.getY(),
node.next.getX(), node.next.getY()) >= 0) {
return false;
}
node = node.next;
}
return true;
}
/** Uses morton code for speed to determine whether or a polygon node forms a valid ear w/ adjacent nodes */
private static boolean mortonIsEar(final Node ear) {
// triangle bbox (flip the bits so negative encoded values are < positive encoded values)
int minTX = StrictMath.min(StrictMath.min(ear.previous.x, ear.x), ear.next.x) ^ 0x80000000;
int minTY = StrictMath.min(StrictMath.min(ear.previous.y, ear.y), ear.next.y) ^ 0x80000000;
int maxTX = StrictMath.max(StrictMath.max(ear.previous.x, ear.x), ear.next.x) ^ 0x80000000;
int maxTY = StrictMath.max(StrictMath.max(ear.previous.y, ear.y), ear.next.y) ^ 0x80000000;
// z-order range for the current triangle bbox;
long minZ = BitUtil.interleave(minTX, minTY);
long maxZ = BitUtil.interleave(maxTX, maxTY);
// now make sure we don't have other points inside the potential ear;
// look for points inside the triangle in both directions
Node p = ear.previousZ;
Node n = ear.nextZ;
while (p != null && Long.compareUnsigned(p.morton, minZ) >= 0
&& n != null && Long.compareUnsigned(n.morton, maxZ) <= 0) {
if (p.idx != ear.previous.idx && p.idx != ear.next.idx &&
pointInEar(p.getX(), p.getY(), ear.previous.getX(), ear.previous.getY(), ear.getX(), ear.getY(),
ear.next.getX(), ear.next.getY()) &&
area(p.previous.getX(), p.previous.getY(), p.getX(), p.getY(), p.next.getX(), p.next.getY()) >= 0) return false;
p = p.previousZ;
if (n.idx != ear.previous.idx && n.idx != ear.next.idx &&
pointInEar(n.getX(), n.getY(), ear.previous.getX(), ear.previous.getY(), ear.getX(), ear.getY(),
ear.next.getX(), ear.next.getY()) &&
area(n.previous.getX(), n.previous.getY(), n.getX(), n.getY(), n.next.getX(), n.next.getY()) >= 0) return false;
n = n.nextZ;
}
// first look for points inside the triangle in decreasing z-order
while (p != null && Long.compareUnsigned(p.morton, minZ) >= 0) {
if (p.idx != ear.previous.idx && p.idx != ear.next.idx
&& pointInEar(p.getX(), p.getY(), ear.previous.getX(), ear.previous.getY(), ear.getX(), ear.getY(),
ear.next.getX(), ear.next.getY())
&& area(p.previous.getX(), p.previous.getY(), p.getX(), p.getY(), p.next.getX(), p.next.getY()) >= 0) {
return false;
}
p = p.previousZ;
}
// then look for points in increasing z-order
while (n != null &&
Long.compareUnsigned(n.morton, maxZ) <= 0) {
if (n.idx != ear.previous.idx && n.idx != ear.next.idx
&& pointInEar(n.getX(), n.getY(), ear.previous.getX(), ear.previous.getY(), ear.getX(), ear.getY(),
ear.next.getX(), ear.next.getY())
&& area(n.previous.getX(), n.previous.getY(), n.getX(), n.getY(), n.next.getX(), n.next.getY()) >= 0) {
return false;
}
n = n.nextZ;
}
return true;
}
/** Iterate through all polygon nodes and remove small local self-intersections **/
private static Node cureLocalIntersections(Node startNode, final List<Triangle> tessellation) {
Node node = startNode;
Node nextNode;
do {
nextNode = node.next;
Node a = node.previous;
Node b = nextNode.next;
// a self-intersection where edge (v[i-1],v[i]) intersects (v[i+1],v[i+2])
if (isVertexEquals(a, b) == false
&& isIntersectingPolygon(a, a.getX(), a.getY(), b.getX(), b.getY()) == false
&& linesIntersect(a.getX(), a.getY(), node.getX(), node.getY(), nextNode.getX(), nextNode.getY(), b.getX(), b.getY())
&& isLocallyInside(a, b) && isLocallyInside(b, a)) {
// Return the triangulated vertices to the tessellation
tessellation.add(new Triangle(a, node, b));
// remove two nodes involved
removeNode(node);
removeNode(node.next);
node = startNode = b;
}
node = node.next;
} while (node != startNode);
return node;
}
/** Attempt to split a polygon and independently triangulate each side. Return true if the polygon was splitted **/
private static boolean splitEarcut(final Node start, final List<Triangle> tessellation, final boolean mortonIndexed) {
// Search for a valid diagonal that divides the polygon into two.
Node searchNode = start;
Node nextNode;
do {
nextNode = searchNode.next;
Node diagonal = nextNode.next;
while (diagonal != searchNode.previous) {
if(isValidDiagonal(searchNode, diagonal)) {
// Split the polygon into two at the point of the diagonal
Node splitNode = splitPolygon(searchNode, diagonal);
// Filter the resulting polygon.
searchNode = filterPoints(searchNode, searchNode.next);
splitNode = filterPoints(splitNode, splitNode.next);
// Attempt to earcut both of the resulting polygons
if (mortonIndexed) {
sortByMortonWithReset(searchNode);
sortByMortonWithReset(splitNode);
}
earcutLinkedList(searchNode, tessellation, State.INIT, mortonIndexed);
earcutLinkedList(splitNode, tessellation, State.INIT, mortonIndexed);
// Finish the iterative search
return true;
}
diagonal = diagonal.next;
}
searchNode = searchNode.next;
} while (searchNode != start);
return false;
}
/** Links two polygon vertices using a bridge. **/
private static Node splitPolygon(final Node a, final Node b) {
final Node a2 = new Node(a);
final Node b2 = new Node(b);
final Node an = a.next;
final Node bp = b.previous;
a.next = b;
a.nextZ = b;
b.previous = a;
b.previousZ = a;
a2.next = an;
a2.nextZ = an;
an.previous = a2;
an.previousZ = a2;
b2.next = a2;
b2.nextZ = a2;
a2.previous = b2;
a2.previousZ = b2;
bp.next = b2;
bp.nextZ = b2;
return b2;
}
/** Determines whether a diagonal between two polygon nodes lies within a polygon interior.
* (This determines the validity of the ray.) **/
private static boolean isValidDiagonal(final Node a, final Node b) {
return a.next.idx != b.idx && a.previous.idx != b.idx
&& isIntersectingPolygon(a, a.getX(), a.getY(), b.getX(), b.getY()) == false
&& isLocallyInside(a, b) && isLocallyInside(b, a)
&& middleInsert(a, a.getX(), a.getY(), b.getX(), b.getY());
}
private static boolean isLocallyInside(final Node a, final Node b) {
// if a is cw
if (area(a.previous.getX(), a.previous.getY(), a.getX(), a.getY(), a.next.getX(), a.next.getY()) < 0) {
return area(a.getX(), a.getY(), b.getX(), b.getY(), a.next.getX(), a.next.getY()) >= 0
&& area(a.getX(), a.getY(), a.previous.getX(), a.previous.getY(), b.getX(), b.getY()) >= 0;
}
// ccw
return area(a.getX(), a.getY(), b.getX(), b.getY(), a.previous.getX(), a.previous.getY()) < 0
|| area(a.getX(), a.getY(), a.next.getX(), a.next.getY(), b.getX(), b.getY()) < 0;
}
/** Determine whether the middle point of a polygon diagonal is contained within the polygon */
private static boolean middleInsert(final Node start, final double x0, final double y0,
final double x1, final double y1) {
Node node = start;
Node nextNode;
boolean lIsInside = false;
final double lDx = (x0 + x1) / 2.0f;
final double lDy = (y0 + y1) / 2.0f;
do {
nextNode = node.next;
if (node.getY() > lDy != nextNode.getY() > lDy &&
lDx < (nextNode.getX() - node.getX()) * (lDy - node.getY()) / (nextNode.getY() - node.getY()) + node.getX()) {
lIsInside = !lIsInside;
}
node = node.next;
} while (node != start);
return lIsInside;
}
/** Determines if the diagonal of a polygon is intersecting with any polygon elements. **/
private static boolean isIntersectingPolygon(final Node start, final double x0, final double y0,
final double x1, final double y1) {
Node node = start;
Node nextNode;
do {
nextNode = node.next;
if(isVertexEquals(node, x0, y0) == false && isVertexEquals(node, x1, y1) == false) {
if (linesIntersect(node.getX(), node.getY(), nextNode.getX(), nextNode.getY(), x0, y0, x1, y1)) {
return true;
}
}
node = nextNode;
} while (node != start);
return false;
}
/** Determines whether two line segments intersect. **/
public static boolean linesIntersect(final double aX0, final double aY0, final double aX1, final double aY1,
final double bX0, final double bY0, final double bX1, final double bY1) {
return (area(aX0, aY0, aX1, aY1, bX0, bY0) > 0) != (area(aX0, aY0, aX1, aY1, bX1, bY1) > 0)
&& (area(bX0, bY0, bX1, bY1, aX0, aY0) > 0) != (area(bX0, bY0, bX1, bY1, aX1, aY1) > 0);
}
/** Interlinks polygon nodes in Z-Order. It reset the values on the z values**/
private static void sortByMortonWithReset(Node start) {
Node next = start;
do {
next.previousZ = next.previous;
next.nextZ = next.next;
next = next.next;
} while (next != start);
sortByMorton(start);
}
/** Interlinks polygon nodes in Z-Order. **/
private static void sortByMorton(Node start) {
start.previousZ.nextZ = null;
start.previousZ = null;
// Sort the generated ring using Z ordering.
tathamSort(start);
}
/**
* Simon Tatham's doubly-linked list O(n log n) mergesort
* see: http://www.chiark.greenend.org.uk/~sgtatham/algorithms/listsort.html
**/
private static void tathamSort(Node list) {
Node p, q, e, tail;
int i, numMerges, pSize, qSize;
int inSize = 1;
if (list == null) {
return;
}
do {
p = list;
list = null;
tail = null;
// count number of merges in this pass
numMerges = 0;
while(p != null) {
++numMerges;
// step 'insize' places along from p
q = p;
for (i = 0, pSize = 0; i < inSize && q != null; ++i, ++pSize, q = q.nextZ);
// if q hasn't fallen off end, we have two lists to merge
qSize = inSize;
// now we have two lists; merge
while (pSize > 0 || (qSize > 0 && q != null)) {
if (pSize != 0 && (qSize == 0 || q == null || Long.compareUnsigned(p.morton, q.morton) <= 0)) {
e = p;
p = p.nextZ;
--pSize;
} else {
e = q;
q = q.nextZ;
--qSize;
}
if (tail != null) {
tail.nextZ = e;
} else {
list = e;
}
// maintain reverse pointers
e.previousZ = tail;
tail = e;
}
// now p has stepped 'insize' places along, and q has too
p = q;
}
tail.nextZ = null;
inSize *= 2;
} while (numMerges > 1);
}
/** Eliminate colinear/duplicate points from the doubly linked list */
private static Node filterPoints(final Node start, Node end) {
if (start == null) {
return start;
}
if(end == null) {
end = start;
}
Node node = start;
Node nextNode;
Node prevNode;
boolean continueIteration;
do {
continueIteration = false;
nextNode = node.next;
prevNode = node.previous;
if (node.isSteiner == false && isVertexEquals(node, nextNode)
|| area(prevNode.getX(), prevNode.getY(), node.getX(), node.getY(), nextNode.getX(), nextNode.getY()) == 0) {
// Remove the node
removeNode(node);
node = end = prevNode;
if (node == nextNode) {
break;
}
continueIteration = true;
} else {
node = nextNode;
}
} while (continueIteration || node != end);
return end;
}
/** Creates a node and optionally links it with a previous node in a circular doubly-linked list */
private static Node insertNode(final Polygon polygon, int index, int vertexIndex, final Node lastNode) {
final Node node = new Node(polygon, index, vertexIndex);
if(lastNode == null) {
node.previous = node;
node.previousZ = node;
node.next = node;
node.nextZ = node;
} else {
node.next = lastNode.next;
node.nextZ = lastNode.next;
node.previous = lastNode;
node.previousZ = lastNode;
lastNode.next.previous = node;
lastNode.nextZ.previousZ = node;
lastNode.next = node;
lastNode.nextZ = node;
}
return node;
}
/** Removes a node from the doubly linked list */
private static void removeNode(Node node) {
node.next.previous = node.previous;
node.previous.next = node.next;
if (node.previousZ != null) {
node.previousZ.nextZ = node.nextZ;
}
if (node.nextZ != null) {
node.nextZ.previousZ = node.previousZ;
}
}
/** Determines if two point vertices are equal. **/
private static boolean isVertexEquals(final Node a, final Node b) {
return isVertexEquals(a, b.getX(), b.getY());
}
/** Determines if two point vertices are equal. **/
private static boolean isVertexEquals(final Node a, final double x, final double y) {
return a.getX() == x && a.getY() == y;
}
/** Compute signed area of triangle */
private static double area(final double aX, final double aY, final double bX, final double bY,
final double cX, final double cY) {
return (bY - aY) * (cX - bX) - (bX - aX) * (cY - bY);
}
/** Compute whether point is in a candidate ear */
private static boolean pointInEar(final double x, final double y, final double ax, final double ay,
final double bx, final double by, final double cx, final double cy) {
return (cx - x) * (ay - y) - (ax - x) * (cy - y) >= 0 &&
(ax - x) * (by - y) - (bx - x) * (ay - y) >= 0 &&
(bx - x) * (cy - y) - (cx - x) * (by - y) >= 0;
}
/** compute whether the given x, y point is in a triangle; uses the winding order method */
public static boolean pointInTriangle (double x, double y, double ax, double ay, double bx, double by, double cx, double cy) {
int a = orient(x, y, ax, ay, bx, by);
int b = orient(x, y, bx, by, cx, cy);
if (a == 0 || b == 0 || a < 0 == b < 0) {
int c = orient(x, y, cx, cy, ax, ay);
return c == 0 || (c < 0 == (b < 0 || a < 0));
}
return false;
}
/** Brute force compute if a point is in the polygon by traversing entire triangulation
* todo: speed this up using either binary tree or prefix coding (filtering by bounding box of triangle)
**/
public static boolean pointInPolygon(final List<Triangle> tessellation, double lat, double lon) {
// each triangle
for (int i = 0; i < tessellation.size(); ++i) {
if (tessellation.get(i).containsPoint(lat, lon)) {
return true;
}
}
return false;
}
/** Circular Doubly-linked list used for polygon coordinates */
protected static class Node {
// node index in the linked list
private final int idx;
// vertex index in the polygon
private final int vrtxIdx;
// reference to the polygon for lat/lon values
private final Polygon polygon;
// encoded x value
private final int x;
// encoded y value
private final int y;
// morton code for sorting
private final long morton;
// previous node
private Node previous;
// next node
private Node next;
// previous z node
private Node previousZ;
// next z node
private Node nextZ;
// triangle center
private boolean isSteiner = false;
protected Node(final Polygon polygon, final int index, final int vertexIndex) {
this.idx = index;
this.vrtxIdx = vertexIndex;
this.polygon = polygon;
this.y = encodeLatitude(polygon.getPolyLat(vrtxIdx));
this.x = encodeLongitude(polygon.getPolyLon(vrtxIdx));
this.morton = BitUtil.interleave(x ^ 0x80000000, y ^ 0x80000000);
this.previous = null;
this.next = null;
this.previousZ = null;
this.nextZ = null;
}
/** simple deep copy constructor */
protected Node(Node other) {
this.idx = other.idx;
this.vrtxIdx = other.vrtxIdx;
this.polygon = other.polygon;
this.morton = other.morton;
this.x = other.x;
this.y = other.y;
this.previous = other.previous;
this.next = other.next;
this.previousZ = other.previousZ;
this.nextZ = other.nextZ;
this.isSteiner = other.isSteiner;
}
/** get the x value */
public final double getX() {
return polygon.getPolyLon(vrtxIdx);
}
/** get the y value */
public final double getY() {
return polygon.getPolyLat(vrtxIdx);
}
/** get the longitude value */
public final double getLon() {
return polygon.getPolyLon(vrtxIdx);
}
/** get the latitude value */
public final double getLat() {
return polygon.getPolyLat(vrtxIdx);
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
if (this.previous == null)
builder.append("||-");
else
builder.append(this.previous.idx + " <- ");
builder.append(this.idx);
if (this.next == null)
builder.append(" -||");
else
builder.append(" -> " + this.next.idx);
return builder.toString();
}
}
/** Triangle in the tessellated mesh */
public static final class Triangle {
Node[] vertex;
protected Triangle(Node a, Node b, Node c) {
this.vertex = new Node[] {a, b, c};
}
/** get quantized x value for the given vertex */
public int getEncodedX(int vertex) {
return this.vertex[vertex].x;
}
/** get quantized y value for the given vertex */
public int getEncodedY(int vertex) {
return this.vertex[vertex].y;
}
/** get latitude value for the given vertex */
public double getLat(int vertex) {
return this.vertex[vertex].getLat();
}
/** get longitude value for the given vertex */
public double getLon(int vertex) {
return this.vertex[vertex].getLon();
}
/** utility method to compute whether the point is in the triangle */
protected boolean containsPoint(double lat, double lon) {
return pointInTriangle(lon, lat,
vertex[0].getLon(), vertex[0].getLat(),
vertex[1].getLon(), vertex[1].getLat(),
vertex[2].getLon(), vertex[2].getLat());
}
/** pretty print the triangle vertices */
public String toString() {
String result = vertex[0].x + ", " + vertex[0].y + " " +
vertex[1].x + ", " + vertex[1].y + " " +
vertex[2].x + ", " + vertex[2].y;
return result;
}
}
}

View File

@ -282,7 +282,7 @@ public class DeleteRequest extends ReplicatedWriteRequest<DeleteRequest>
}
version = in.readLong();
versionType = VersionType.fromValue(in.readByte());
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
if (in.getVersion().onOrAfter(Version.V_6_6_0)) {
ifSeqNo = in.readZLong();
ifPrimaryTerm = in.readVLong();
} else {
@ -302,7 +302,7 @@ public class DeleteRequest extends ReplicatedWriteRequest<DeleteRequest>
}
out.writeLong(version);
out.writeByte(versionType.getValue());
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
if (out.getVersion().onOrAfter(Version.V_6_6_0)) {
out.writeZLong(ifSeqNo);
out.writeVLong(ifPrimaryTerm);
} else if (ifSeqNo != SequenceNumbers.UNASSIGNED_SEQ_NO || ifPrimaryTerm != 0) {

View File

@ -616,7 +616,7 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
} else {
contentType = null;
}
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
if (in.getVersion().onOrAfter(Version.V_6_6_0)) {
ifSeqNo = in.readZLong();
ifPrimaryTerm = in.readVLong();
} else {
@ -654,7 +654,7 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
} else {
out.writeBoolean(false);
}
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
if (out.getVersion().onOrAfter(Version.V_6_6_0)) {
out.writeZLong(ifSeqNo);
out.writeVLong(ifPrimaryTerm);
} else if (ifSeqNo != SequenceNumbers.UNASSIGNED_SEQ_NO || ifPrimaryTerm != 0) {

View File

@ -163,6 +163,7 @@ public abstract class TransportWriteAction<
* Respond if the refresh has occurred and the listener is ready. Always called while synchronized on {@code this}.
*/
protected void respondIfPossible(Exception ex) {
assert Thread.holdsLock(this);
if (finishedAsyncActions && listener != null) {
if (ex == null) {
super.respond(listener);
@ -206,7 +207,7 @@ public abstract class TransportWriteAction<
}
@Override
public void respond(ActionListener<TransportResponse.Empty> listener) {
public synchronized void respond(ActionListener<TransportResponse.Empty> listener) {
this.listener = listener;
respondIfPossible(null);
}
@ -215,6 +216,7 @@ public abstract class TransportWriteAction<
* Respond if the refresh has occurred and the listener is ready. Always called while synchronized on {@code this}.
*/
protected void respondIfPossible(Exception ex) {
assert Thread.holdsLock(this);
if (finishedAsyncActions && listener != null) {
if (ex == null) {
super.respond(listener);
@ -225,7 +227,7 @@ public abstract class TransportWriteAction<
}
@Override
public void onFailure(Exception ex) {
public synchronized void onFailure(Exception ex) {
finishedAsyncActions = true;
respondIfPossible(ex);
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.common.geo;
import org.apache.lucene.document.XLatLonShape.QueryRelation;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
@ -62,6 +63,17 @@ public enum ShapeRelation implements Writeable {
return null;
}
/** Maps ShapeRelation to Lucene's LatLonShapeRelation */
public QueryRelation getLuceneRelation() {
switch (this) {
case INTERSECTS: return QueryRelation.INTERSECTS;
case DISJOINT: return QueryRelation.DISJOINT;
case WITHIN: return QueryRelation.WITHIN;
default:
throw new IllegalArgumentException("ShapeRelation [" + this + "] not supported");
}
}
public String getRelationName() {
return relationName;
}

View File

@ -197,9 +197,6 @@ public class GeometryCollectionBuilder extends ShapeBuilder<Shape, GeometryColle
}
}
if (shapes.size() == 1) {
return shapes.get(0);
}
return shapes.toArray(new Object[shapes.size()]);
}

View File

@ -25,10 +25,11 @@ import org.elasticsearch.common.geo.GeoShapeType;
import org.elasticsearch.common.geo.builders.CircleBuilder;
import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentSubParser;
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
import org.elasticsearch.index.mapper.BaseGeoShapeFieldMapper;
import org.locationtech.jts.geom.Coordinate;
import java.io.IOException;
@ -41,17 +42,22 @@ import java.util.List;
* complies with geojson specification: https://tools.ietf.org/html/rfc7946
*/
abstract class GeoJsonParser {
protected static ShapeBuilder parse(XContentParser parser, GeoShapeFieldMapper shapeMapper)
protected static ShapeBuilder parse(XContentParser parser, BaseGeoShapeFieldMapper shapeMapper)
throws IOException {
GeoShapeType shapeType = null;
DistanceUnit.Distance radius = null;
CoordinateNode coordinateNode = null;
GeometryCollectionBuilder geometryCollections = null;
ShapeBuilder.Orientation requestedOrientation =
(shapeMapper == null) ? ShapeBuilder.Orientation.RIGHT : shapeMapper.fieldType().orientation();
Explicit<Boolean> coerce = (shapeMapper == null) ? GeoShapeFieldMapper.Defaults.COERCE : shapeMapper.coerce();
Explicit<Boolean> ignoreZValue = (shapeMapper == null) ? GeoShapeFieldMapper.Defaults.IGNORE_Z_VALUE : shapeMapper.ignoreZValue();
Orientation orientation = (shapeMapper == null)
? BaseGeoShapeFieldMapper.Defaults.ORIENTATION.value()
: shapeMapper.orientation();
Explicit<Boolean> coerce = (shapeMapper == null)
? BaseGeoShapeFieldMapper.Defaults.COERCE
: shapeMapper.coerce();
Explicit<Boolean> ignoreZValue = (shapeMapper == null)
? BaseGeoShapeFieldMapper.Defaults.IGNORE_Z_VALUE
: shapeMapper.ignoreZValue();
String malformedException = null;
@ -102,7 +108,7 @@ abstract class GeoJsonParser {
malformedException = "cannot have [" + ShapeParser.FIELD_ORIENTATION + "] with type set to [" + shapeType + "]";
}
subParser.nextToken();
requestedOrientation = ShapeBuilder.Orientation.fromString(subParser.text());
orientation = ShapeBuilder.Orientation.fromString(subParser.text());
} else {
subParser.nextToken();
subParser.skipChildren();
@ -128,7 +134,7 @@ abstract class GeoJsonParser {
return geometryCollections;
}
return shapeType.getBuilder(coordinateNode, radius, requestedOrientation, coerce.value());
return shapeType.getBuilder(coordinateNode, radius, orientation, coerce.value());
}
/**
@ -202,7 +208,7 @@ abstract class GeoJsonParser {
* @return Geometry[] geometries of the GeometryCollection
* @throws IOException Thrown if an error occurs while reading from the XContentParser
*/
static GeometryCollectionBuilder parseGeometries(XContentParser parser, GeoShapeFieldMapper mapper) throws
static GeometryCollectionBuilder parseGeometries(XContentParser parser, BaseGeoShapeFieldMapper mapper) throws
IOException {
if (parser.currentToken() != XContentParser.Token.START_ARRAY) {
throw new ElasticsearchParseException("geometries must be an array of geojson objects");

View File

@ -34,7 +34,7 @@ import org.elasticsearch.common.geo.builders.PolygonBuilder;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
import org.elasticsearch.index.mapper.BaseGeoShapeFieldMapper;
import org.locationtech.jts.geom.Coordinate;
import java.io.IOException;
@ -63,7 +63,7 @@ public class GeoWKTParser {
// no instance
private GeoWKTParser() {}
public static ShapeBuilder parse(XContentParser parser, final GeoShapeFieldMapper shapeMapper)
public static ShapeBuilder parse(XContentParser parser, final BaseGeoShapeFieldMapper shapeMapper)
throws IOException, ElasticsearchParseException {
return parseExpectedType(parser, null, shapeMapper);
}
@ -75,12 +75,12 @@ public class GeoWKTParser {
/** throws an exception if the parsed geometry type does not match the expected shape type */
public static ShapeBuilder parseExpectedType(XContentParser parser, final GeoShapeType shapeType,
final GeoShapeFieldMapper shapeMapper)
final BaseGeoShapeFieldMapper shapeMapper)
throws IOException, ElasticsearchParseException {
try (StringReader reader = new StringReader(parser.text())) {
Explicit<Boolean> ignoreZValue = (shapeMapper == null) ? GeoShapeFieldMapper.Defaults.IGNORE_Z_VALUE :
Explicit<Boolean> ignoreZValue = (shapeMapper == null) ? BaseGeoShapeFieldMapper.Defaults.IGNORE_Z_VALUE :
shapeMapper.ignoreZValue();
Explicit<Boolean> coerce = (shapeMapper == null) ? GeoShapeFieldMapper.Defaults.COERCE : shapeMapper.coerce();
Explicit<Boolean> coerce = (shapeMapper == null) ? BaseGeoShapeFieldMapper.Defaults.COERCE : shapeMapper.coerce();
// setup the tokenizer; configured to read words w/o numbers
StreamTokenizer tokenizer = new StreamTokenizer(reader);
tokenizer.resetSyntax();
@ -257,7 +257,8 @@ public class GeoWKTParser {
if (nextEmptyOrOpen(stream).equals(EMPTY)) {
return null;
}
PolygonBuilder builder = new PolygonBuilder(parseLinearRing(stream, ignoreZValue, coerce), ShapeBuilder.Orientation.RIGHT);
PolygonBuilder builder = new PolygonBuilder(parseLinearRing(stream, ignoreZValue, coerce),
BaseGeoShapeFieldMapper.Defaults.ORIENTATION.value());
while (nextCloserOrComma(stream).equals(COMMA)) {
builder.hole(parseLinearRing(stream, ignoreZValue, coerce));
}

View File

@ -23,7 +23,7 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
import org.elasticsearch.index.mapper.BaseGeoShapeFieldMapper;
import java.io.IOException;
@ -46,7 +46,7 @@ public interface ShapeParser {
* if the parsers current token has been <code>null</code>
* @throws IOException if the input could not be read
*/
static ShapeBuilder parse(XContentParser parser, GeoShapeFieldMapper shapeMapper) throws IOException {
static ShapeBuilder parse(XContentParser parser, BaseGeoShapeFieldMapper shapeMapper) throws IOException {
if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
return null;
} if (parser.currentToken() == XContentParser.Token.START_OBJECT) {

View File

@ -47,6 +47,7 @@ import java.io.IOException;
import java.io.Writer;
import java.math.BigDecimal;
import java.util.Locale;
import java.util.regex.Pattern;
public class Joda {
@ -321,6 +322,8 @@ public class Joda {
public static class EpochTimeParser implements DateTimeParser {
private static final Pattern scientificNotation = Pattern.compile("[Ee]");
private final boolean hasMilliSecondPrecision;
public EpochTimeParser(boolean hasMilliSecondPrecision) {
@ -348,6 +351,11 @@ public class Joda {
int factor = hasMilliSecondPrecision ? 1 : 1000;
try {
long millis = new BigDecimal(text).longValue() * factor;
// check for deprecation, but after it has parsed correctly so the "e" isn't from something else
if (scientificNotation.matcher(text).find()) {
deprecationLogger.deprecatedAndMaybeLog("epoch-scientific-notation", "Use of scientific notation" +
"in epoch time formats is deprecated and will not be supported in the next major version of Elasticsearch.");
}
DateTime dt = new DateTime(millis, DateTimeZone.UTC);
bucket.saveField(DateTimeFieldType.year(), dt.getYear());
bucket.saveField(DateTimeFieldType.monthOfYear(), dt.getMonthOfYear());

View File

@ -34,7 +34,7 @@ public class Iterables {
public static <T> Iterable<T> concat(Iterable<T>... inputs) {
Objects.requireNonNull(inputs);
return new ConcatenatedIterable(inputs);
return new ConcatenatedIterable<>(inputs);
}
static class ConcatenatedIterable<T> implements Iterable<T> {

View File

@ -384,7 +384,7 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
index = in.readString();
type = in.readOptionalString();
id = in.readString();
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
if (in.getVersion().onOrAfter(Version.V_6_6_0)) {
seqNo = in.readZLong();
primaryTerm = in.readVLong();
} else {
@ -416,7 +416,7 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
out.writeString(index);
out.writeOptionalString(type);
out.writeString(id);
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
if (out.getVersion().onOrAfter(Version.V_6_6_0)) {
out.writeZLong(seqNo);
out.writeVLong(primaryTerm);
}

View File

@ -0,0 +1,348 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper.DeprecatedParameters;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_MALFORMED;
/**
* Base class for {@link GeoShapeFieldMapper} and {@link LegacyGeoShapeFieldMapper}
*/
public abstract class BaseGeoShapeFieldMapper extends FieldMapper {
public static final String CONTENT_TYPE = "geo_shape";
public static class Names {
public static final ParseField ORIENTATION = new ParseField("orientation");
public static final ParseField COERCE = new ParseField("coerce");
}
public static class Defaults {
public static final Explicit<Orientation> ORIENTATION = new Explicit<>(Orientation.RIGHT, false);
public static final Explicit<Boolean> COERCE = new Explicit<>(false, false);
public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit<>(false, false);
public static final Explicit<Boolean> IGNORE_Z_VALUE = new Explicit<>(true, false);
}
public abstract static class Builder<T extends Builder, Y extends BaseGeoShapeFieldMapper>
extends FieldMapper.Builder<T, Y> {
protected Boolean coerce;
protected Boolean ignoreMalformed;
protected Boolean ignoreZValue;
protected Orientation orientation;
/** default builder - used for external mapper*/
public Builder(String name, MappedFieldType fieldType, MappedFieldType defaultFieldType) {
super(name, fieldType, defaultFieldType);
}
public Builder(String name, MappedFieldType fieldType, MappedFieldType defaultFieldType,
boolean coerce, boolean ignoreMalformed, Orientation orientation, boolean ignoreZ) {
super(name, fieldType, defaultFieldType);
this.coerce = coerce;
this.ignoreMalformed = ignoreMalformed;
this.orientation = orientation;
this.ignoreZValue = ignoreZ;
}
public Builder coerce(boolean coerce) {
this.coerce = coerce;
return this;
}
protected Explicit<Boolean> coerce(BuilderContext context) {
if (coerce != null) {
return new Explicit<>(coerce, true);
}
if (context.indexSettings() != null) {
return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false);
}
return Defaults.COERCE;
}
public Builder orientation(Orientation orientation) {
this.orientation = orientation;
return this;
}
protected Explicit<Orientation> orientation() {
if (orientation != null) {
return new Explicit<>(orientation, true);
}
return Defaults.ORIENTATION;
}
@Override
protected boolean defaultDocValues(Version indexCreated) {
return false;
}
public Builder ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
return this;
}
protected Explicit<Boolean> ignoreMalformed(BuilderContext context) {
if (ignoreMalformed != null) {
return new Explicit<>(ignoreMalformed, true);
}
if (context.indexSettings() != null) {
return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false);
}
return Defaults.IGNORE_MALFORMED;
}
protected Explicit<Boolean> ignoreZValue() {
if (ignoreZValue != null) {
return new Explicit<>(ignoreZValue, true);
}
return Defaults.IGNORE_Z_VALUE;
}
public Builder ignoreZValue(final boolean ignoreZValue) {
this.ignoreZValue = ignoreZValue;
return this;
}
@Override
protected void setupFieldType(BuilderContext context) {
super.setupFieldType(context);
// field mapper handles this at build time
// but prefix tree strategies require a name, so throw a similar exception
if (name().isEmpty()) {
throw new IllegalArgumentException("name cannot be empty string");
}
BaseGeoShapeFieldType ft = (BaseGeoShapeFieldType)fieldType();
ft.setOrientation(orientation().value());
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Boolean coerce = null;
Boolean ignoreZ = null;
Boolean ignoreMalformed = null;
Orientation orientation = null;
DeprecatedParameters deprecatedParameters = new DeprecatedParameters();
boolean parsedDeprecatedParams = false;
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (DeprecatedParameters.parse(name, fieldName, fieldNode, deprecatedParameters)) {
parsedDeprecatedParams = true;
iterator.remove();
} else if (Names.ORIENTATION.match(fieldName, LoggingDeprecationHandler.INSTANCE)) {
orientation = ShapeBuilder.Orientation.fromString(fieldNode.toString());
iterator.remove();
} else if (IGNORE_MALFORMED.equals(fieldName)) {
ignoreMalformed = XContentMapValues.nodeBooleanValue(fieldNode, name + ".ignore_malformed");
iterator.remove();
} else if (Names.COERCE.match(fieldName, LoggingDeprecationHandler.INSTANCE)) {
coerce = XContentMapValues.nodeBooleanValue(fieldNode, name + "." + Names.COERCE.getPreferredName());
iterator.remove();
} else if (GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName().equals(fieldName)) {
ignoreZ = XContentMapValues.nodeBooleanValue(fieldNode,
name + "." + GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName());
iterator.remove();
}
}
final Builder builder;
if (parsedDeprecatedParams || parserContext.indexVersionCreated().before(Version.V_7_0_0)) {
// Legacy index-based shape
builder = new LegacyGeoShapeFieldMapper.Builder(name, deprecatedParameters);
} else {
// BKD-based shape
builder = new GeoShapeFieldMapper.Builder(name);
}
if (coerce != null) {
builder.coerce(coerce);
}
if (ignoreZ != null) {
builder.ignoreZValue(ignoreZ);
}
if (ignoreMalformed != null) {
builder.ignoreMalformed(ignoreMalformed);
}
if (orientation != null) {
builder.orientation(orientation);
}
return builder;
}
}
public abstract static class BaseGeoShapeFieldType extends MappedFieldType {
protected Orientation orientation = Defaults.ORIENTATION.value();
protected BaseGeoShapeFieldType() {
setIndexOptions(IndexOptions.DOCS);
setTokenized(false);
setStored(false);
setStoreTermVectors(false);
setOmitNorms(true);
}
protected BaseGeoShapeFieldType(BaseGeoShapeFieldType ref) {
super(ref);
this.orientation = ref.orientation;
}
@Override
public boolean equals(Object o) {
if (!super.equals(o)) return false;
BaseGeoShapeFieldType that = (BaseGeoShapeFieldType) o;
return orientation == that.orientation;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), orientation);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts) {
super.checkCompatibility(fieldType, conflicts);
}
public Orientation orientation() { return this.orientation; }
public void setOrientation(Orientation orientation) {
checkIfFrozen();
this.orientation = orientation;
}
@Override
public Query existsQuery(QueryShardContext context) {
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name()));
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new QueryShardException(context, "Geo fields do not support exact searching, use dedicated geo queries instead");
}
}
protected Explicit<Boolean> coerce;
protected Explicit<Boolean> ignoreMalformed;
protected Explicit<Boolean> ignoreZValue;
protected BaseGeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
Explicit<Boolean> ignoreZValue, Settings indexSettings,
MultiFields multiFields, CopyTo copyTo) {
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
this.coerce = coerce;
this.ignoreMalformed = ignoreMalformed;
this.ignoreZValue = ignoreZValue;
}
@Override
protected void doMerge(Mapper mergeWith) {
super.doMerge(mergeWith);
BaseGeoShapeFieldMapper gsfm = (BaseGeoShapeFieldMapper)mergeWith;
if (gsfm.coerce.explicit()) {
this.coerce = gsfm.coerce;
}
if (gsfm.ignoreMalformed.explicit()) {
this.ignoreMalformed = gsfm.ignoreMalformed;
}
if (gsfm.ignoreZValue.explicit()) {
this.ignoreZValue = gsfm.ignoreZValue;
}
}
@Override
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
builder.field("type", contentType());
BaseGeoShapeFieldType ft = (BaseGeoShapeFieldType)fieldType();
if (includeDefaults || ft.orientation() != Defaults.ORIENTATION.value()) {
builder.field(Names.ORIENTATION.getPreferredName(), ft.orientation());
}
if (includeDefaults || coerce.explicit()) {
builder.field(Names.COERCE.getPreferredName(), coerce.value());
}
if (includeDefaults || ignoreMalformed.explicit()) {
builder.field(IGNORE_MALFORMED, ignoreMalformed.value());
}
if (includeDefaults || ignoreZValue.explicit()) {
builder.field(GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName(), ignoreZValue.value());
}
}
public Explicit<Boolean> coerce() {
return coerce;
}
public Explicit<Boolean> ignoreMalformed() {
return ignoreMalformed;
}
public Explicit<Boolean> ignoreZValue() {
return ignoreZValue;
}
public Orientation orientation() {
return ((BaseGeoShapeFieldType)fieldType).orientation();
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
}

View File

@ -18,48 +18,24 @@
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.XLatLonShape;
import org.apache.lucene.geo.Line;
import org.apache.lucene.geo.Polygon;
import org.apache.lucene.geo.Rectangle;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.spatial.prefix.PrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.TermQueryPrefixTreeStrategy;
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.PackedQuadPrefixTree;
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.geo.SpatialStrategy;
import org.elasticsearch.common.geo.XShapeCollection;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation;
import org.elasticsearch.common.geo.parsers.ShapeParser;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.locationtech.spatial4j.shape.Point;
import org.locationtech.spatial4j.shape.Shape;
import org.locationtech.spatial4j.shape.jts.JtsGeometry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_MALFORMED;
/**
* FieldMapper for indexing {@link org.locationtech.spatial4j.shape.Shape}s.
* FieldMapper for indexing {@link XLatLonShape}s.
* <p>
* Currently Shapes can only be indexed and can only be queried using
* {@link org.elasticsearch.index.query.GeoShapeQueryBuilder}, consequently
@ -73,554 +49,123 @@ import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_MA
* [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ]
* ]
* }
* <p>
* or:
* <p>
* "field" : "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0))
*/
public class GeoShapeFieldMapper extends FieldMapper {
public static final String CONTENT_TYPE = "geo_shape";
public static class Names {
public static final String TREE = "tree";
public static final String TREE_GEOHASH = "geohash";
public static final String TREE_QUADTREE = "quadtree";
public static final String TREE_LEVELS = "tree_levels";
public static final String TREE_PRESISION = "precision";
public static final String DISTANCE_ERROR_PCT = "distance_error_pct";
public static final String ORIENTATION = "orientation";
public static final String STRATEGY = "strategy";
public static final String STRATEGY_POINTS_ONLY = "points_only";
public static final String COERCE = "coerce";
}
public static class Defaults {
public static final String TREE = Names.TREE_GEOHASH;
public static final String STRATEGY = SpatialStrategy.RECURSIVE.getStrategyName();
public static final boolean POINTS_ONLY = false;
public static final int GEOHASH_LEVELS = GeoUtils.geoHashLevelsForPrecision("50m");
public static final int QUADTREE_LEVELS = GeoUtils.quadTreeLevelsForPrecision("50m");
public static final Orientation ORIENTATION = Orientation.RIGHT;
public static final double LEGACY_DISTANCE_ERROR_PCT = 0.025d;
public static final Explicit<Boolean> COERCE = new Explicit<>(false, false);
public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit<>(false, false);
public static final Explicit<Boolean> IGNORE_Z_VALUE = new Explicit<>(true, false);
public static final MappedFieldType FIELD_TYPE = new GeoShapeFieldType();
static {
// setting name here is a hack so freeze can be called...instead all these options should be
// moved to the default ctor for GeoShapeFieldType, and defaultFieldType() should be removed from mappers...
FIELD_TYPE.setName("DoesNotExist");
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setStored(false);
FIELD_TYPE.setStoreTermVectors(false);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.freeze();
}
}
public static class Builder extends FieldMapper.Builder<Builder, GeoShapeFieldMapper> {
private Boolean coerce;
private Boolean ignoreMalformed;
private Boolean ignoreZValue;
public class GeoShapeFieldMapper extends BaseGeoShapeFieldMapper {
public static class Builder extends BaseGeoShapeFieldMapper.Builder<BaseGeoShapeFieldMapper.Builder, GeoShapeFieldMapper> {
public Builder(String name) {
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
}
@Override
public GeoShapeFieldType fieldType() {
return (GeoShapeFieldType)fieldType;
}
public Builder coerce(boolean coerce) {
this.coerce = coerce;
return this;
}
@Override
protected boolean defaultDocValues(Version indexCreated) {
return false;
}
protected Explicit<Boolean> coerce(BuilderContext context) {
if (coerce != null) {
return new Explicit<>(coerce, true);
}
if (context.indexSettings() != null) {
return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false);
}
return Defaults.COERCE;
}
public Builder ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
return this;
}
protected Explicit<Boolean> ignoreMalformed(BuilderContext context) {
if (ignoreMalformed != null) {
return new Explicit<>(ignoreMalformed, true);
}
if (context.indexSettings() != null) {
return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false);
}
return Defaults.IGNORE_MALFORMED;
}
protected Explicit<Boolean> ignoreZValue(BuilderContext context) {
if (ignoreZValue != null) {
return new Explicit<>(ignoreZValue, true);
}
return Defaults.IGNORE_Z_VALUE;
}
public Builder ignoreZValue(final boolean ignoreZValue) {
this.ignoreZValue = ignoreZValue;
return this;
super (name, new GeoShapeFieldType(), new GeoShapeFieldType());
}
@Override
public GeoShapeFieldMapper build(BuilderContext context) {
GeoShapeFieldType geoShapeFieldType = (GeoShapeFieldType)fieldType;
if (geoShapeFieldType.treeLevels() == 0 && geoShapeFieldType.precisionInMeters() < 0) {
geoShapeFieldType.setDefaultDistanceErrorPct(Defaults.LEGACY_DISTANCE_ERROR_PCT);
}
setupFieldType(context);
return new GeoShapeFieldMapper(name, fieldType, ignoreMalformed(context), coerce(context), ignoreZValue(context),
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
return new GeoShapeFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context),
ignoreZValue(), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder(name);
Boolean pointsOnly = null;
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (Names.TREE.equals(fieldName)) {
builder.fieldType().setTree(fieldNode.toString());
iterator.remove();
} else if (Names.TREE_LEVELS.equals(fieldName)) {
builder.fieldType().setTreeLevels(Integer.parseInt(fieldNode.toString()));
iterator.remove();
} else if (Names.TREE_PRESISION.equals(fieldName)) {
builder.fieldType().setPrecisionInMeters(DistanceUnit.parse(fieldNode.toString(),
DistanceUnit.DEFAULT, DistanceUnit.DEFAULT));
iterator.remove();
} else if (Names.DISTANCE_ERROR_PCT.equals(fieldName)) {
builder.fieldType().setDistanceErrorPct(Double.parseDouble(fieldNode.toString()));
iterator.remove();
} else if (Names.ORIENTATION.equals(fieldName)) {
builder.fieldType().setOrientation(ShapeBuilder.Orientation.fromString(fieldNode.toString()));
iterator.remove();
} else if (Names.STRATEGY.equals(fieldName)) {
builder.fieldType().setStrategyName(fieldNode.toString());
iterator.remove();
} else if (IGNORE_MALFORMED.equals(fieldName)) {
builder.ignoreMalformed(XContentMapValues.nodeBooleanValue(fieldNode, name + ".ignore_malformed"));
iterator.remove();
} else if (Names.COERCE.equals(fieldName)) {
builder.coerce(XContentMapValues.nodeBooleanValue(fieldNode, name + "." + Names.COERCE));
iterator.remove();
} else if (GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName().equals(fieldName)) {
builder.ignoreZValue(XContentMapValues.nodeBooleanValue(fieldNode,
name + "." + GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName()));
iterator.remove();
} else if (Names.STRATEGY_POINTS_ONLY.equals(fieldName)) {
pointsOnly = XContentMapValues.nodeBooleanValue(fieldNode, name + "." + Names.STRATEGY_POINTS_ONLY);
iterator.remove();
}
}
if (pointsOnly != null) {
if (builder.fieldType().strategyName.equals(SpatialStrategy.TERM.getStrategyName()) && pointsOnly == false) {
throw new IllegalArgumentException("points_only cannot be set to false for term strategy");
} else {
builder.fieldType().setPointsOnly(pointsOnly);
}
}
return builder;
public static final class GeoShapeFieldType extends BaseGeoShapeFieldType {
public GeoShapeFieldType() {
super();
}
}
public static final class GeoShapeFieldType extends MappedFieldType {
private String tree = Defaults.TREE;
private String strategyName = Defaults.STRATEGY;
private boolean pointsOnly = Defaults.POINTS_ONLY;
private int treeLevels = 0;
private double precisionInMeters = -1;
private Double distanceErrorPct;
private double defaultDistanceErrorPct = 0.0;
private Orientation orientation = Defaults.ORIENTATION;
// these are built when the field type is frozen
private PrefixTreeStrategy defaultStrategy;
private RecursivePrefixTreeStrategy recursiveStrategy;
private TermQueryPrefixTreeStrategy termStrategy;
public GeoShapeFieldType() {}
protected GeoShapeFieldType(GeoShapeFieldType ref) {
super(ref);
this.tree = ref.tree;
this.strategyName = ref.strategyName;
this.pointsOnly = ref.pointsOnly;
this.treeLevels = ref.treeLevels;
this.precisionInMeters = ref.precisionInMeters;
this.distanceErrorPct = ref.distanceErrorPct;
this.defaultDistanceErrorPct = ref.defaultDistanceErrorPct;
this.orientation = ref.orientation;
}
@Override
public GeoShapeFieldType clone() {
return new GeoShapeFieldType(this);
}
@Override
public boolean equals(Object o) {
if (!super.equals(o)) return false;
GeoShapeFieldType that = (GeoShapeFieldType) o;
return treeLevels == that.treeLevels &&
precisionInMeters == that.precisionInMeters &&
defaultDistanceErrorPct == that.defaultDistanceErrorPct &&
Objects.equals(tree, that.tree) &&
Objects.equals(strategyName, that.strategyName) &&
pointsOnly == that.pointsOnly &&
Objects.equals(distanceErrorPct, that.distanceErrorPct) &&
orientation == that.orientation;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), tree, strategyName, pointsOnly, treeLevels, precisionInMeters, distanceErrorPct,
defaultDistanceErrorPct, orientation);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public void freeze() {
super.freeze();
// This is a bit hackish: we need to setup the spatial tree and strategies once the field name is set, which
// must be by the time freeze is called.
SpatialPrefixTree prefixTree;
if ("geohash".equals(tree)) {
prefixTree = new GeohashPrefixTree(ShapeBuilder.SPATIAL_CONTEXT,
getLevels(treeLevels, precisionInMeters, Defaults.GEOHASH_LEVELS, true));
} else if ("legacyquadtree".equals(tree)) {
prefixTree = new QuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT,
getLevels(treeLevels, precisionInMeters, Defaults.QUADTREE_LEVELS, false));
} else if ("quadtree".equals(tree)) {
prefixTree = new PackedQuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT,
getLevels(treeLevels, precisionInMeters, Defaults.QUADTREE_LEVELS, false));
} else {
throw new IllegalArgumentException("Unknown prefix tree type [" + tree + "]");
}
recursiveStrategy = new RecursivePrefixTreeStrategy(prefixTree, name());
recursiveStrategy.setDistErrPct(distanceErrorPct());
recursiveStrategy.setPruneLeafyBranches(false);
termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, name());
termStrategy.setDistErrPct(distanceErrorPct());
defaultStrategy = resolveStrategy(strategyName);
defaultStrategy.setPointsOnly(pointsOnly);
}
@Override
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts) {
super.checkCompatibility(fieldType, conflicts);
GeoShapeFieldType other = (GeoShapeFieldType)fieldType;
// prevent user from changing strategies
if (strategyName().equals(other.strategyName()) == false) {
conflicts.add("mapper [" + name() + "] has different [strategy]");
}
// prevent user from changing trees (changes encoding)
if (tree().equals(other.tree()) == false) {
conflicts.add("mapper [" + name() + "] has different [tree]");
}
if ((pointsOnly() != other.pointsOnly())) {
conflicts.add("mapper [" + name() + "] has different points_only");
}
// TODO we should allow this, but at the moment levels is used to build bookkeeping variables
// in lucene's SpatialPrefixTree implementations, need a patch to correct that first
if (treeLevels() != other.treeLevels()) {
conflicts.add("mapper [" + name() + "] has different [tree_levels]");
}
if (precisionInMeters() != other.precisionInMeters()) {
conflicts.add("mapper [" + name() + "] has different [precision]");
}
}
private static int getLevels(int treeLevels, double precisionInMeters, int defaultLevels, boolean geoHash) {
if (treeLevels > 0 || precisionInMeters >= 0) {
return Math.max(treeLevels, precisionInMeters >= 0 ? (geoHash ? GeoUtils.geoHashLevelsForPrecision(precisionInMeters)
: GeoUtils.quadTreeLevelsForPrecision(precisionInMeters)) : 0);
}
return defaultLevels;
}
public String tree() {
return tree;
}
public void setTree(String tree) {
checkIfFrozen();
this.tree = tree;
}
public String strategyName() {
return strategyName;
}
public void setStrategyName(String strategyName) {
checkIfFrozen();
this.strategyName = strategyName;
if (this.strategyName.equals(SpatialStrategy.TERM.getStrategyName())) {
this.pointsOnly = true;
}
}
public boolean pointsOnly() {
return pointsOnly;
}
public void setPointsOnly(boolean pointsOnly) {
checkIfFrozen();
this.pointsOnly = pointsOnly;
}
public int treeLevels() {
return treeLevels;
}
public void setTreeLevels(int treeLevels) {
checkIfFrozen();
this.treeLevels = treeLevels;
}
public double precisionInMeters() {
return precisionInMeters;
}
public void setPrecisionInMeters(double precisionInMeters) {
checkIfFrozen();
this.precisionInMeters = precisionInMeters;
}
public double distanceErrorPct() {
return distanceErrorPct == null ? defaultDistanceErrorPct : distanceErrorPct;
}
public void setDistanceErrorPct(double distanceErrorPct) {
checkIfFrozen();
this.distanceErrorPct = distanceErrorPct;
}
public void setDefaultDistanceErrorPct(double defaultDistanceErrorPct) {
checkIfFrozen();
this.defaultDistanceErrorPct = defaultDistanceErrorPct;
}
public Orientation orientation() { return this.orientation; }
public void setOrientation(Orientation orientation) {
checkIfFrozen();
this.orientation = orientation;
}
public PrefixTreeStrategy defaultStrategy() {
return this.defaultStrategy;
}
public PrefixTreeStrategy resolveStrategy(SpatialStrategy strategy) {
return resolveStrategy(strategy.getStrategyName());
}
public PrefixTreeStrategy resolveStrategy(String strategyName) {
if (SpatialStrategy.RECURSIVE.getStrategyName().equals(strategyName)) {
return recursiveStrategy;
}
if (SpatialStrategy.TERM.getStrategyName().equals(strategyName)) {
return termStrategy;
}
throw new IllegalArgumentException("Unknown prefix tree strategy [" + strategyName + "]");
}
@Override
public Query existsQuery(QueryShardContext context) {
return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name()));
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new QueryShardException(context, "Geo fields do not support exact searching, use dedicated geo queries instead");
}
}
protected Explicit<Boolean> coerce;
protected Explicit<Boolean> ignoreMalformed;
protected Explicit<Boolean> ignoreZValue;
public GeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, Explicit<Boolean> ignoreMalformed,
Explicit<Boolean> coerce, Explicit<Boolean> ignoreZValue, Settings indexSettings,
public GeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
Explicit<Boolean> ignoreZValue, Settings indexSettings,
MultiFields multiFields, CopyTo copyTo) {
super(simpleName, fieldType, Defaults.FIELD_TYPE, indexSettings, multiFields, copyTo);
this.coerce = coerce;
this.ignoreMalformed = ignoreMalformed;
this.ignoreZValue = ignoreZValue;
super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, ignoreZValue, indexSettings,
multiFields, copyTo);
}
@Override
public GeoShapeFieldType fieldType() {
return (GeoShapeFieldType) super.fieldType();
}
/** parsing logic for {@link XLatLonShape} indexing */
@Override
public void parse(ParseContext context) throws IOException {
try {
Shape shape = context.parseExternalValue(Shape.class);
Object shape = context.parseExternalValue(Object.class);
if (shape == null) {
ShapeBuilder shapeBuilder = ShapeParser.parse(context.parser(), this);
if (shapeBuilder == null) {
return;
}
shape = shapeBuilder.buildS4J();
}
if (fieldType().pointsOnly() == true) {
// index configured for pointsOnly
if (shape instanceof XShapeCollection && XShapeCollection.class.cast(shape).pointsOnly()) {
// MULTIPOINT data: index each point separately
List<Shape> shapes = ((XShapeCollection) shape).getShapes();
for (Shape s : shapes) {
indexShape(context, s);
}
return;
} else if (shape instanceof Point == false) {
throw new MapperParsingException("[{" + fieldType().name() + "}] is configured for points only but a "
+ ((shape instanceof JtsGeometry) ? ((JtsGeometry)shape).getGeom().getGeometryType() : shape.getClass())
+ " was found");
}
shape = shapeBuilder.buildLucene();
}
indexShape(context, shape);
} catch (Exception e) {
if (ignoreMalformed.value() == false) {
throw new MapperParsingException("failed to parse field [{}] of type [{}]", e, fieldType().name(),
fieldType().typeName());
fieldType().typeName());
}
context.addIgnoredField(fieldType.name());
context.addIgnoredField(fieldType().name());
}
}
private void indexShape(ParseContext context, Shape shape) {
List<IndexableField> fields = new ArrayList<>(Arrays.asList(fieldType().defaultStrategy().createIndexableFields(shape)));
createFieldNamesField(context, fields);
for (IndexableField field : fields) {
context.doc().add(field);
}
}
@Override
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
}
@Override
protected void doMerge(Mapper mergeWith) {
super.doMerge(mergeWith);
GeoShapeFieldMapper gsfm = (GeoShapeFieldMapper)mergeWith;
if (gsfm.coerce.explicit()) {
this.coerce = gsfm.coerce;
}
if (gsfm.ignoreMalformed.explicit()) {
this.ignoreMalformed = gsfm.ignoreMalformed;
}
if (gsfm.ignoreZValue.explicit()) {
this.ignoreZValue = gsfm.ignoreZValue;
}
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
builder.field("type", contentType());
if (includeDefaults || fieldType().tree().equals(Defaults.TREE) == false) {
builder.field(Names.TREE, fieldType().tree());
}
if (fieldType().treeLevels() != 0) {
builder.field(Names.TREE_LEVELS, fieldType().treeLevels());
} else if(includeDefaults && fieldType().precisionInMeters() == -1) { // defaults only make sense if precision is not specified
if ("geohash".equals(fieldType().tree())) {
builder.field(Names.TREE_LEVELS, Defaults.GEOHASH_LEVELS);
} else if ("legacyquadtree".equals(fieldType().tree())) {
builder.field(Names.TREE_LEVELS, Defaults.QUADTREE_LEVELS);
} else if ("quadtree".equals(fieldType().tree())) {
builder.field(Names.TREE_LEVELS, Defaults.QUADTREE_LEVELS);
} else {
throw new IllegalArgumentException("Unknown prefix tree type [" + fieldType().tree() + "]");
private void indexShape(ParseContext context, Object luceneShape) {
if (luceneShape instanceof GeoPoint) {
GeoPoint pt = (GeoPoint) luceneShape;
indexFields(context, XLatLonShape.createIndexableFields(name(), pt.lat(), pt.lon()));
} else if (luceneShape instanceof double[]) {
double[] pt = (double[]) luceneShape;
indexFields(context, XLatLonShape.createIndexableFields(name(), pt[1], pt[0]));
} else if (luceneShape instanceof Line) {
indexFields(context, XLatLonShape.createIndexableFields(name(), (Line)luceneShape));
} else if (luceneShape instanceof Polygon) {
indexFields(context, XLatLonShape.createIndexableFields(name(), (Polygon) luceneShape));
} else if (luceneShape instanceof double[][]) {
double[][] pts = (double[][])luceneShape;
for (int i = 0; i < pts.length; ++i) {
indexFields(context, XLatLonShape.createIndexableFields(name(), pts[i][1], pts[i][0]));
}
}
if (fieldType().precisionInMeters() != -1) {
builder.field(Names.TREE_PRESISION, DistanceUnit.METERS.toString(fieldType().precisionInMeters()));
} else if (includeDefaults && fieldType().treeLevels() == 0) { // defaults only make sense if tree levels are not specified
builder.field(Names.TREE_PRESISION, DistanceUnit.METERS.toString(50));
}
if (includeDefaults || fieldType().strategyName().equals(Defaults.STRATEGY) == false) {
builder.field(Names.STRATEGY, fieldType().strategyName());
}
if (includeDefaults || fieldType().distanceErrorPct() != fieldType().defaultDistanceErrorPct) {
builder.field(Names.DISTANCE_ERROR_PCT, fieldType().distanceErrorPct());
}
if (includeDefaults || fieldType().orientation() != Defaults.ORIENTATION) {
builder.field(Names.ORIENTATION, fieldType().orientation());
}
if (fieldType().strategyName().equals(SpatialStrategy.TERM.getStrategyName())) {
// For TERMs strategy the defaults for points only change to true
if (includeDefaults || fieldType().pointsOnly() != true) {
builder.field(Names.STRATEGY_POINTS_ONLY, fieldType().pointsOnly());
} else if (luceneShape instanceof Line[]) {
Line[] lines = (Line[]) luceneShape;
for (int i = 0; i < lines.length; ++i) {
indexFields(context, XLatLonShape.createIndexableFields(name(), lines[i]));
}
} else if (luceneShape instanceof Polygon[]) {
Polygon[] polys = (Polygon[]) luceneShape;
for (int i = 0; i < polys.length; ++i) {
indexFields(context, XLatLonShape.createIndexableFields(name(), polys[i]));
}
} else if (luceneShape instanceof Rectangle) {
// index rectangle as a polygon
Rectangle r = (Rectangle) luceneShape;
Polygon p = new Polygon(new double[]{r.minLat, r.minLat, r.maxLat, r.maxLat, r.minLat},
new double[]{r.minLon, r.maxLon, r.maxLon, r.minLon, r.minLon});
indexFields(context, XLatLonShape.createIndexableFields(name(), p));
} else if (luceneShape instanceof Object[]) {
// recurse to index geometry collection
for (Object o : (Object[])luceneShape) {
indexShape(context, o);
}
} else {
if (includeDefaults || fieldType().pointsOnly() != GeoShapeFieldMapper.Defaults.POINTS_ONLY) {
builder.field(Names.STRATEGY_POINTS_ONLY, fieldType().pointsOnly());
}
}
if (includeDefaults || coerce.explicit()) {
builder.field(Names.COERCE, coerce.value());
}
if (includeDefaults || ignoreMalformed.explicit()) {
builder.field(IGNORE_MALFORMED, ignoreMalformed.value());
}
if (includeDefaults || ignoreZValue.explicit()) {
builder.field(GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName(), ignoreZValue.value());
throw new IllegalArgumentException("invalid shape type found [" + luceneShape.getClass() + "] while indexing shape");
}
}
public Explicit<Boolean> coerce() {
return coerce;
}
public Explicit<Boolean> ignoreMalformed() {
return ignoreMalformed;
}
public Explicit<Boolean> ignoreZValue() {
return ignoreZValue;
}
@Override
protected String contentType() {
return CONTENT_TYPE;
private void indexFields(ParseContext context, Field[] fields) {
ArrayList<IndexableField> flist = new ArrayList<>(Arrays.asList(fields));
createFieldNamesField(context, flist);
for (IndexableField f : flist) {
context.doc().add(f);
}
}
}

Some files were not shown because too many files have changed in this diff Show More