mirror of https://github.com/apache/lucene.git
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/lucene-solr into feature/autoscaling
This commit is contained in:
commit
310bcd7cbe
|
@ -1,5 +1,6 @@
|
|||
# .
|
||||
/eclipse-build
|
||||
/maven-build
|
||||
/classes
|
||||
build
|
||||
/idea-build
|
||||
|
|
|
@ -41,6 +41,14 @@ Lucene and Solr, run:
|
|||
|
||||
`ant compile`
|
||||
|
||||
If you see an error about Ivy missing while invoking Ant (e.g., `.ant/lib does
|
||||
not exist`), run `ant ivy-bootstrap` and retry.
|
||||
|
||||
Sometimes you may face issues with Ivy (e.g., an incompletely downloaded artifact).
|
||||
Cleaning up the Ivy cache and retrying is a workaround for most of such issues:
|
||||
|
||||
`rm -rf ~/.ivy2/cache`
|
||||
|
||||
The Solr server can then be packaged and prepared for startup by running the
|
||||
following command from the `solr/` directory:
|
||||
|
||||
|
|
|
@ -66,6 +66,13 @@
|
|||
</foaf:Person>
|
||||
</maintainer>
|
||||
|
||||
<release>
|
||||
<Version>
|
||||
<name>lucene-7.0.0</name>
|
||||
<created>2017-09-20</created>
|
||||
<revision>7.0.0</revision>
|
||||
</Version>
|
||||
</release>
|
||||
<release>
|
||||
<Version>
|
||||
<name>lucene-6.6.1</name>
|
||||
|
|
|
@ -66,6 +66,13 @@
|
|||
</foaf:Person>
|
||||
</maintainer>
|
||||
|
||||
<release>
|
||||
<Version>
|
||||
<name>solr-7.0.0</name>
|
||||
<created>2017-09-20</created>
|
||||
<revision>7.0.0</revision>
|
||||
</Version>
|
||||
</release>
|
||||
<release>
|
||||
<Version>
|
||||
<name>solr-6.6.1</name>
|
||||
|
|
|
@ -71,5 +71,18 @@
|
|||
</excludes>
|
||||
</testResource>
|
||||
</testResources>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>test-jar</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
|
@ -45,6 +45,13 @@
|
|||
<artifactId>lucene-test-framework</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-spatial3d</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
@lucene-spatial-extras.internal.dependencies@
|
||||
@lucene-spatial-extras.external.dependencies@
|
||||
@lucene-spatial-extras.internal.test.dependencies@
|
||||
|
|
|
@ -53,5 +53,18 @@
|
|||
<build>
|
||||
<sourceDirectory>${module-path}/src/java</sourceDirectory>
|
||||
<testSourceDirectory>${module-path}/src/test</testSourceDirectory>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>test-jar</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
|
@ -230,6 +230,11 @@
|
|||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>2.9.1</version>
|
||||
<configuration>
|
||||
<quiet>true</quiet>
|
||||
<additionalparam>-Xdoclint:all</additionalparam>
|
||||
<additionalparam>-Xdoclint:-missing</additionalparam>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
|
|
|
@ -61,6 +61,13 @@
|
|||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-backward-codecs</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
@solr-core.internal.test.dependencies@
|
||||
@solr-core.external.test.dependencies@
|
||||
</dependencies>
|
||||
|
|
|
@ -31,12 +31,24 @@ import re
|
|||
import shutil
|
||||
|
||||
def create_and_add_index(source, indextype, index_version, current_version, temp_dir):
|
||||
if not current_version.is_back_compat_with(index_version):
|
||||
prefix = 'unsupported'
|
||||
else:
|
||||
prefix = {
|
||||
'cfs': 'index',
|
||||
'nocfs': 'index',
|
||||
'sorted': 'sorted',
|
||||
'moreterms': 'moreterms',
|
||||
'dvupdates': 'dvupdates',
|
||||
'emptyIndex': 'empty'
|
||||
}[indextype]
|
||||
if indextype in ('cfs', 'nocfs'):
|
||||
dirname = 'index.%s' % indextype
|
||||
filename = '%s.%s-%s.zip' % (prefix, index_version, indextype)
|
||||
else:
|
||||
dirname = indextype
|
||||
prefix = 'index' if current_version.is_back_compat_with(index_version) else 'unsupported'
|
||||
filename = '%s.%s-%s.zip' % (prefix, index_version, indextype)
|
||||
filename = '%s.%s.zip' % (prefix, index_version)
|
||||
|
||||
print(' creating %s...' % filename, end='', flush=True)
|
||||
module = 'backward-codecs'
|
||||
index_dir = os.path.join('lucene', module, 'src/test/org/apache/lucene/index')
|
||||
|
@ -47,7 +59,11 @@ def create_and_add_index(source, indextype, index_version, current_version, temp
|
|||
|
||||
test = {
|
||||
'cfs': 'testCreateCFS',
|
||||
'nocfs': 'testCreateNoCFS'
|
||||
'nocfs': 'testCreateNoCFS',
|
||||
'sorted': 'testCreateSortedIndex',
|
||||
'moreterms': 'testCreateMoreTermsIndex',
|
||||
'dvupdates': 'testCreateIndexWithDocValuesUpdates',
|
||||
'emptyIndex': 'testCreateEmptyIndex'
|
||||
}[indextype]
|
||||
ant_args = ' '.join([
|
||||
'-Dtests.bwcdir=%s' % temp_dir,
|
||||
|
@ -83,27 +99,35 @@ def create_and_add_index(source, indextype, index_version, current_version, temp
|
|||
print('done')
|
||||
|
||||
def update_backcompat_tests(types, index_version, current_version):
|
||||
print(' adding new indexes to backcompat tests...', end='', flush=True)
|
||||
print(' adding new indexes %s to backcompat tests...' % types, end='', flush=True)
|
||||
module = 'lucene/backward-codecs'
|
||||
filename = '%s/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java' % module
|
||||
matcher = re.compile(r'final static String\[\] oldNames = {|};' if current_version.is_back_compat_with(index_version)
|
||||
else r'final String\[\] unsupportedNames = {|};')
|
||||
if not current_version.is_back_compat_with(index_version):
|
||||
matcher = re.compile(r'final String\[\] unsupportedNames = {|};'),
|
||||
elif 'sorted' in types:
|
||||
matcher = re.compile(r'final static String\[\] oldSortedNames = {|};')
|
||||
else:
|
||||
matcher = re.compile(r'final static String\[\] oldNames = {|};')
|
||||
|
||||
strip_dash_suffix_re = re.compile(r'-.*')
|
||||
|
||||
def find_version(x):
|
||||
x = x.strip()
|
||||
end = x.index("-")
|
||||
return scriptutil.Version.parse(x[1:end])
|
||||
x = re.sub(strip_dash_suffix_re, '', x) # remove the -suffix if any
|
||||
return scriptutil.Version.parse(x)
|
||||
|
||||
class Edit(object):
|
||||
start = None
|
||||
def __call__(self, buffer, match, line):
|
||||
if self.start:
|
||||
# find where this version should exist
|
||||
i = len(buffer) - 1
|
||||
v = find_version(buffer[i])
|
||||
while i >= self.start and v.on_or_after(index_version):
|
||||
i -= 1
|
||||
i = len(buffer) - 1
|
||||
previous_version_exists = not ('};' in line and buffer[-1].strip().endswith("{"))
|
||||
if previous_version_exists: # Only look if there is a version here
|
||||
v = find_version(buffer[i])
|
||||
while i >= self.start and v.on_or_after(index_version):
|
||||
i -= 1
|
||||
v = find_version(buffer[i])
|
||||
i += 1 # readjust since we skipped past by 1
|
||||
|
||||
# unfortunately python doesn't have a range remove from list...
|
||||
|
@ -111,14 +135,20 @@ def update_backcompat_tests(types, index_version, current_version):
|
|||
while i < len(buffer) and index_version.on_or_after(find_version(buffer[i])):
|
||||
buffer.pop(i)
|
||||
|
||||
if i == len(buffer) and not buffer[-1].strip().endswith(","):
|
||||
if i == len(buffer) and previous_version_exists and not buffer[-1].strip().endswith(","):
|
||||
# add comma
|
||||
buffer[-1] = buffer[-1].rstrip() + ",\n"
|
||||
|
||||
last = buffer[-1]
|
||||
spaces = ' ' * (len(last) - len(last.lstrip()))
|
||||
if previous_version_exists:
|
||||
last = buffer[-1]
|
||||
spaces = ' ' * (len(last) - len(last.lstrip()))
|
||||
else:
|
||||
spaces = ' '
|
||||
for (j, t) in enumerate(types):
|
||||
newline = spaces + ('"%s-%s"' % (index_version, t))
|
||||
if t == 'sorted':
|
||||
newline = spaces + ('"sorted.%s"') % index_version
|
||||
else:
|
||||
newline = spaces + ('"%s-%s"' % (index_version, t))
|
||||
if j < len(types) - 1 or i < len(buffer):
|
||||
newline += ','
|
||||
buffer.insert(i, newline + '\n')
|
||||
|
@ -215,9 +245,16 @@ def main():
|
|||
current_version = scriptutil.Version.parse(scriptutil.find_current_version())
|
||||
create_and_add_index(source, 'cfs', c.version, current_version, c.temp_dir)
|
||||
create_and_add_index(source, 'nocfs', c.version, current_version, c.temp_dir)
|
||||
create_and_add_index(source, 'sorted', c.version, current_version, c.temp_dir)
|
||||
if c.version.minor == 0 and c.version.bugfix == 0 and c.version.major < current_version.major:
|
||||
create_and_add_index(source, 'moreterms', c.version, current_version, c.temp_dir)
|
||||
create_and_add_index(source, 'dvupdates', c.version, current_version, c.temp_dir)
|
||||
create_and_add_index(source, 'emptyIndex', c.version, current_version, c.temp_dir)
|
||||
print ('\nMANUAL UPDATE REQUIRED: edit TestBackwardsCompatibility to enable moreterms, dvupdates, and empty index testing')
|
||||
|
||||
print('\nAdding backwards compatibility tests')
|
||||
update_backcompat_tests(['cfs', 'nocfs'], c.version, current_version)
|
||||
update_backcompat_tests(['sorted'], c.version, current_version)
|
||||
|
||||
print('\nTesting changes')
|
||||
check_backcompat_tests()
|
||||
|
|
|
@ -53,9 +53,14 @@ def runAndSendGPGPassword(command, password):
|
|||
p.stdin.write((password + '\n').encode('UTF-8'))
|
||||
p.stdin.write('\n'.encode('UTF-8'))
|
||||
|
||||
result = p.poll()
|
||||
if result is not None:
|
||||
msg = ' FAILED: %s [see log %s]' % (command, LOG)
|
||||
try:
|
||||
result = p.wait(timeout=120)
|
||||
if result != 0:
|
||||
msg = ' FAILED: %s [see log %s]' % (command, LOG)
|
||||
print(msg)
|
||||
raise RuntimeError(msg)
|
||||
except TimeoutExpired:
|
||||
msg = ' FAILED: %s [timed out after 2 minutes; see log %s]' % (command, LOG)
|
||||
print(msg)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
|
|
@ -25,6 +25,19 @@ New Features
|
|||
|
||||
* LUCENE-7940: Add BengaliAnalyzer. (Md. Abdulla-Al-Sun via Robert Muir)
|
||||
|
||||
* LUCENE-7392: Add point based LatLonBoundingBox as new RangeField Type.
|
||||
(Nick Knize)
|
||||
|
||||
* LUCENE-7951: Spatial-extras has much better Geo3d support by implementing Spatial4j
|
||||
abstractions: SpatialContextFactory, ShapeFactory, BinaryCodec, DistanceCalculator.
|
||||
(Ignacio Vera, David Smiley)
|
||||
|
||||
* LUCENE-7973: Update dictionary version for Ukrainian analyzer to 3.9.0 (Andriy
|
||||
Rysin via Dawid Weiss)
|
||||
|
||||
* LUCENE-7974: Add FloatPointNearestNeighbor, an N-dimensional FloatPoint
|
||||
K-nearest-neighbor search implementation. (Steve Rowe)
|
||||
|
||||
Optimizations
|
||||
|
||||
* LUCENE-7905: Optimize how OrdinalMap (used by
|
||||
|
@ -73,6 +86,9 @@ Build
|
|||
* SOLR-11181: Switch order of maven artifact publishing procedure: deploy first
|
||||
instead of locally installing first, to workaround a double repository push of
|
||||
*-sources.jar and *-javadoc.jar files. (Lynn Monson via Steve Rowe)
|
||||
|
||||
* LUCENE-6673: Maven build fails for target javadoc:jar.
|
||||
(Ramkumar Aiyengar, Daniel Collins via Steve Rowe)
|
||||
|
||||
Other
|
||||
|
||||
|
@ -82,6 +98,12 @@ Other
|
|||
* LUCENE-7933: LongBitSet now validates the numBits parameter (Won
|
||||
Jonghoon, Mike McCandless)
|
||||
|
||||
* LUCENE-7978: Add some more documentation about setting up build
|
||||
environment. (Anton R. Yuste via Uwe Schindler)
|
||||
|
||||
======================= Lucene 7.0.1 =======================
|
||||
(No Changes)
|
||||
|
||||
======================= Lucene 7.0.0 =======================
|
||||
|
||||
New Features
|
||||
|
|
|
@ -152,11 +152,10 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
MockAnalyzer analyzer = new MockAnalyzer(random());
|
||||
analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
|
||||
|
||||
// TODO: remove randomness
|
||||
IndexWriterConfig conf = new IndexWriterConfig(analyzer)
|
||||
.setMergePolicy(mp).setUseCompoundFile(false);
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
LineFileDocs docs = new LineFileDocs(null);
|
||||
LineFileDocs docs = new LineFileDocs(new Random(0));
|
||||
for(int i=0;i<50;i++) {
|
||||
writer.addDocument(docs.nextDoc());
|
||||
}
|
||||
|
@ -275,9 +274,34 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
dir.close();
|
||||
}
|
||||
|
||||
public void testCreateEmptyIndex() throws Exception {
|
||||
Path indexDir = getIndexDir().resolve("emptyIndex");
|
||||
Files.deleteIfExists(indexDir);
|
||||
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setUseCompoundFile(false).setMergePolicy(NoMergePolicy.INSTANCE);
|
||||
try (Directory dir = newFSDirectory(indexDir);
|
||||
IndexWriter writer = new IndexWriter(dir, conf)) {
|
||||
writer.flush();
|
||||
}
|
||||
}
|
||||
|
||||
final static String[] oldNames = {
|
||||
"7.0.0-cfs",
|
||||
"7.0.0-nocfs"
|
||||
};
|
||||
|
||||
public static String[] getOldNames() {
|
||||
return oldNames;
|
||||
}
|
||||
|
||||
final static String[] oldSortedNames = {
|
||||
"sorted.7.0.0"
|
||||
};
|
||||
|
||||
public static String[] getOldSortedNames() {
|
||||
return oldSortedNames;
|
||||
}
|
||||
|
||||
final String[] unsupportedNames = {
|
||||
"1.9.0-cfs",
|
||||
"1.9.0-nocfs",
|
||||
|
@ -440,7 +464,11 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
// TODO: on 6.0.0 release, gen the single segment indices and add here:
|
||||
final static String[] oldSingleSegmentNames = {
|
||||
};
|
||||
|
||||
|
||||
public static String[] getOldSingleSegmentNames() {
|
||||
return oldSingleSegmentNames;
|
||||
}
|
||||
|
||||
static Map<String,Directory> oldIndexDirs;
|
||||
|
||||
/**
|
||||
|
@ -757,7 +785,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
Directory targetDir2 = newDirectory();
|
||||
IndexWriter w = new IndexWriter(targetDir2, newIndexWriterConfig(new MockAnalyzer(random())));
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> TestUtil.addIndexesSlowly(w, reader));
|
||||
assertEquals(e.getMessage(), "Cannot merge a segment that has been created with major version 6 into this index which has been created by major version 7");
|
||||
assertEquals(e.getMessage(), "Cannot merge a segment that has been created with major version 7 into this index which has been created by major version 8");
|
||||
w.close();
|
||||
targetDir2.close();
|
||||
|
||||
|
@ -1441,14 +1469,13 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
public static final String emptyIndex = "empty.7.0.0.zip";
|
||||
|
||||
public void testUpgradeEmptyOldIndex() throws Exception {
|
||||
assumeTrue("Reenable when 7.0 is released", false);
|
||||
Path oldIndexDir = createTempDir("emptyIndex");
|
||||
TestUtil.unzip(getDataInputStream(emptyIndex), oldIndexDir);
|
||||
Directory dir = newFSDirectory(oldIndexDir);
|
||||
|
||||
newIndexUpgrader(dir).upgrade();
|
||||
|
||||
checkAllSegmentsUpgraded(dir, 6);
|
||||
checkAllSegmentsUpgraded(dir, 7);
|
||||
|
||||
dir.close();
|
||||
}
|
||||
|
@ -1456,7 +1483,6 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
public static final String moreTermsIndex = "moreterms.7.0.0.zip";
|
||||
|
||||
public void testMoreTerms() throws Exception {
|
||||
assumeTrue("Reenable when 7.0 is released", false);
|
||||
Path oldIndexDir = createTempDir("moreterms");
|
||||
TestUtil.unzip(getDataInputStream(moreTermsIndex), oldIndexDir);
|
||||
Directory dir = newFSDirectory(oldIndexDir);
|
||||
|
@ -1501,7 +1527,6 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testDocValuesUpdates() throws Exception {
|
||||
assumeTrue("Reenable when 7.0 is released", false);
|
||||
Path oldIndexDir = createTempDir("dvupdates");
|
||||
TestUtil.unzip(getDataInputStream(dvUpdatesIndex), oldIndexDir);
|
||||
Directory dir = newFSDirectory(oldIndexDir);
|
||||
|
@ -1564,12 +1589,10 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testSortedIndex() throws Exception {
|
||||
assumeTrue("Reenable when 7.0 is released", false);
|
||||
String[] versions = new String[] {};
|
||||
for(String version : versions) {
|
||||
for(String name : oldSortedNames) {
|
||||
Path path = createTempDir("sorted");
|
||||
InputStream resource = TestBackwardsCompatibility.class.getResourceAsStream("sorted." + version + ".zip");
|
||||
assertNotNull("Sorted index index " + version + " not found", resource);
|
||||
InputStream resource = TestBackwardsCompatibility.class.getResourceAsStream(name + ".zip");
|
||||
assertNotNull("Sorted index index " + name + " not found", resource);
|
||||
TestUtil.unzip(resource, path);
|
||||
|
||||
// TODO: more tests
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -88,6 +88,7 @@ public abstract class ClassificationTestBase<T> extends LuceneTestCase {
|
|||
|
||||
protected ClassificationResult<T> checkCorrectClassification(Classifier<T> classifier, String inputDoc, T expectedResult) throws Exception {
|
||||
ClassificationResult<T> classificationResult = classifier.assignClass(inputDoc);
|
||||
assertNotNull(classificationResult);
|
||||
T assignedClass = classificationResult.getAssignedClass();
|
||||
assertNotNull(assignedClass);
|
||||
assertEquals("got an assigned class of " + assignedClass, expectedResult instanceof BytesRef ? ((BytesRef) expectedResult).utf8ToString() : expectedResult, assignedClass instanceof BytesRef ? ((BytesRef) assignedClass).utf8ToString() : assignedClass);
|
||||
|
|
|
@ -39,6 +39,13 @@ public final class Version {
|
|||
@Deprecated
|
||||
public static final Version LUCENE_7_0_0 = new Version(7, 0, 0);
|
||||
|
||||
/**
|
||||
* Match settings and bugs in Lucene's 7.0.1 release.
|
||||
* @deprecated Use latest
|
||||
*/
|
||||
@Deprecated
|
||||
public static final Version LUCENE_7_0_1 = new Version(7, 0, 1);
|
||||
|
||||
/**
|
||||
* Match settings and bugs in Lucene's 7.1.0 release.
|
||||
* @deprecated Use latest
|
||||
|
|
|
@ -156,7 +156,7 @@ public class TestIntRangeFieldQueries extends BaseRangeFieldQueryTestCase {
|
|||
}
|
||||
|
||||
/** IntRange test class implementation - use to validate IntRange */
|
||||
private class IntTestRange extends Range {
|
||||
protected class IntTestRange extends Range {
|
||||
int[] min;
|
||||
int[] max;
|
||||
|
||||
|
|
|
@ -809,7 +809,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
|
|||
|
||||
/**
|
||||
* Mapping from old ordinal to new ordinals, used when merging indexes
|
||||
* wit separate taxonomies.
|
||||
* with separate taxonomies.
|
||||
* <p>
|
||||
* addToTaxonomies() merges one or more taxonomies into the given taxonomy
|
||||
* (this). An OrdinalMap is filled for each of the added taxonomies,
|
||||
|
|
|
@ -189,9 +189,9 @@ org.apache.uima.version = 2.3.1
|
|||
/org.apache.xmlbeans/xmlbeans = 2.6.0
|
||||
/org.apache.zookeeper/zookeeper = 3.4.10
|
||||
|
||||
# v1.6.0-alpha.3 of asciidoctor-ant includes asciidoctorj-pdf 1.5.0-alpha.15,
|
||||
# which is the same as asciidoctor-pdf 1.5.0-alpha.15
|
||||
/org.asciidoctor/asciidoctor-ant = 1.6.0-alpha.3
|
||||
# v1.6.0-alpha.5 of asciidoctor-ant includes asciidoctorj-pdf 1.5.0-alpha.16,
|
||||
# which is the same as asciidoctor-pdf 1.5.0-alpha.16
|
||||
/org.asciidoctor/asciidoctor-ant = 1.6.0-alpha.5
|
||||
|
||||
/org.aspectj/aspectjrt = 1.8.0
|
||||
|
||||
|
@ -278,7 +278,7 @@ org.slf4j.version = 1.7.7
|
|||
/org.tukaani/xz = 1.5
|
||||
/rome/rome = 1.0
|
||||
|
||||
ua.net.nlp.morfologik-ukrainian-search.version = 3.7.6
|
||||
ua.net.nlp.morfologik-ukrainian-search.version = 3.9.0
|
||||
/ua.net.nlp/morfologik-ukrainian-search = ${ua.net.nlp.morfologik-ukrainian-search.version}
|
||||
|
||||
/xerces/xercesImpl = 2.9.1
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
8d2c4bf006f59227bcba8885b4602b3a8b5bd799
|
|
@ -0,0 +1 @@
|
|||
9790241f6d0d6cefd48e0d2532bd59097fb0340a
|
|
@ -782,11 +782,7 @@ public final class MoreLikeThis {
|
|||
* @param vector List of terms and their frequencies for a doc/field
|
||||
*/
|
||||
private void addTermFrequencies(Map<String, Map<String, Int>> field2termFreqMap, Terms vector, String fieldName) throws IOException {
|
||||
Map<String, Int> termFreqMap = field2termFreqMap.get(fieldName);
|
||||
if (termFreqMap == null) {
|
||||
termFreqMap = new HashMap<>();
|
||||
field2termFreqMap.put(fieldName, termFreqMap);
|
||||
}
|
||||
Map<String, Int> termFreqMap = field2termFreqMap.computeIfAbsent(fieldName, k -> new HashMap<>());
|
||||
final TermsEnum termsEnum = vector.iterator();
|
||||
final CharsRefBuilder spare = new CharsRefBuilder();
|
||||
BytesRef text;
|
||||
|
@ -823,11 +819,7 @@ public final class MoreLikeThis {
|
|||
throw new UnsupportedOperationException("To use MoreLikeThis without " +
|
||||
"term vectors, you must provide an Analyzer");
|
||||
}
|
||||
Map<String, Int> termFreqMap = perFieldTermFrequencies.get(fieldName);
|
||||
if (termFreqMap == null) {
|
||||
termFreqMap = new HashMap<>();
|
||||
perFieldTermFrequencies.put(fieldName, termFreqMap);
|
||||
}
|
||||
Map<String, Int> termFreqMap = perFieldTermFrequencies.computeIfAbsent(fieldName, k -> new HashMap<>());
|
||||
try (TokenStream ts = analyzer.tokenStream(fieldName, r)) {
|
||||
int tokenCount = 0;
|
||||
// for every token
|
||||
|
@ -906,7 +898,7 @@ public final class MoreLikeThis {
|
|||
* @see #retrieveInterestingTerms(java.io.Reader, String)
|
||||
*/
|
||||
public String[] retrieveInterestingTerms(int docNum) throws IOException {
|
||||
ArrayList<Object> al = new ArrayList<>(maxQueryTerms);
|
||||
ArrayList<String> al = new ArrayList<>(maxQueryTerms);
|
||||
PriorityQueue<ScoreTerm> pq = retrieveTerms(docNum);
|
||||
ScoreTerm scoreTerm;
|
||||
int lim = maxQueryTerms; // have to be careful, retrieveTerms returns all words but that's probably not useful to our caller...
|
||||
|
@ -929,7 +921,7 @@ public final class MoreLikeThis {
|
|||
* @see #setMaxQueryTerms
|
||||
*/
|
||||
public String[] retrieveInterestingTerms(Reader r, String fieldName) throws IOException {
|
||||
ArrayList<Object> al = new ArrayList<>(maxQueryTerms);
|
||||
ArrayList<String> al = new ArrayList<>(maxQueryTerms);
|
||||
PriorityQueue<ScoreTerm> pq = retrieveTerms(r, fieldName);
|
||||
ScoreTerm scoreTerm;
|
||||
int lim = maxQueryTerms; // have to be careful, retrieveTerms returns all words but that's probably not useful to our caller...
|
||||
|
|
|
@ -0,0 +1,383 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.document;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.PriorityQueue;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.PointValues;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TopFieldDocs;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.bkd.BKDReader;
|
||||
|
||||
/**
|
||||
* KNN search on top of N dimensional indexed float points.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class FloatPointNearestNeighbor {
|
||||
|
||||
static class Cell implements Comparable<Cell> {
|
||||
final int readerIndex;
|
||||
final byte[] minPacked;
|
||||
final byte[] maxPacked;
|
||||
final BKDReader.IndexTree index;
|
||||
|
||||
/** The closest possible distance^2 of all points in this cell */
|
||||
final double distanceSquared;
|
||||
|
||||
Cell(BKDReader.IndexTree index, int readerIndex, byte[] minPacked, byte[] maxPacked, double distanceSquared) {
|
||||
this.index = index;
|
||||
this.readerIndex = readerIndex;
|
||||
this.minPacked = minPacked.clone();
|
||||
this.maxPacked = maxPacked.clone();
|
||||
this.distanceSquared = distanceSquared;
|
||||
}
|
||||
|
||||
public int compareTo(Cell other) {
|
||||
return Double.compare(distanceSquared, other.distanceSquared);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Cell(readerIndex=" + readerIndex + " nodeID=" + index.getNodeID()
|
||||
+ " isLeaf=" + index.isLeafNode() + " distanceSquared=" + distanceSquared + ")";
|
||||
}
|
||||
}
|
||||
|
||||
private static class NearestVisitor implements PointValues.IntersectVisitor {
|
||||
int curDocBase;
|
||||
Bits curLiveDocs;
|
||||
final int topN;
|
||||
final PriorityQueue<NearestHit> hitQueue;
|
||||
final float[] origin;
|
||||
private int dims;
|
||||
private int updateMinMaxCounter;
|
||||
private float[] min;
|
||||
private float[] max;
|
||||
|
||||
|
||||
public NearestVisitor(PriorityQueue<NearestHit> hitQueue, int topN, float[] origin) {
|
||||
this.hitQueue = hitQueue;
|
||||
this.topN = topN;
|
||||
this.origin = origin;
|
||||
dims = origin.length;
|
||||
min = new float[dims];
|
||||
max = new float[dims];
|
||||
Arrays.fill(min, Float.NEGATIVE_INFINITY);
|
||||
Arrays.fill(max, Float.POSITIVE_INFINITY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(int docID) {
|
||||
throw new AssertionError();
|
||||
}
|
||||
|
||||
private static final int MANTISSA_BITS = 23;
|
||||
|
||||
/**
|
||||
* Returns the minimum value that will change the given distance when added to it.
|
||||
*
|
||||
* This value is calculated from the distance exponent reduced by (at most) 23,
|
||||
* the number of bits in a float mantissa. This is necessary when the result of
|
||||
* subtracting/adding the distance in a single dimension has an exponent that
|
||||
* differs significantly from that of the distance value. Without this fudge
|
||||
* factor (i.e. only subtracting/adding the distance), cells and values can be
|
||||
* inappropriately judged as outside the search radius.
|
||||
*/
|
||||
private float getMinDelta(float distance) {
|
||||
int exponent = Float.floatToIntBits(distance) >> MANTISSA_BITS; // extract biased exponent (distance is positive)
|
||||
if (exponent == 0) {
|
||||
return Float.MIN_VALUE;
|
||||
} else {
|
||||
exponent = exponent <= MANTISSA_BITS ? 1 : exponent - MANTISSA_BITS; // Avoid underflow
|
||||
return Float.intBitsToFloat(exponent << MANTISSA_BITS);
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeUpdateMinMax() {
|
||||
if (updateMinMaxCounter < 1024 || (updateMinMaxCounter & 0x3F) == 0x3F) {
|
||||
NearestHit hit = hitQueue.peek();
|
||||
float distance = (float)Math.sqrt(hit.distanceSquared);
|
||||
float minDelta = getMinDelta(distance);
|
||||
// String oldMin = Arrays.toString(min);
|
||||
// String oldMax = Arrays.toString(max);
|
||||
for (int d = 0 ; d < dims ; ++d) {
|
||||
min[d] = (origin[d] - distance) - minDelta;
|
||||
max[d] = (origin[d] + distance) + minDelta;
|
||||
// System.out.println("origin[" + d + "] (" + origin[d] + ") - distance (" + distance + ") - minDelta (" + minDelta + ") = min[" + d + "] (" + min[d] + ")");
|
||||
// System.out.println("origin[" + d + "] (" + origin[d] + ") + distance (" + distance + ") + minDelta (" + minDelta + ") = max[" + d + "] (" + max[d] + ")");
|
||||
}
|
||||
// System.out.println("maybeUpdateMinMax: min: " + oldMin + " -> " + Arrays.toString(min) + " max: " + oldMax + " -> " + Arrays.toString(max));
|
||||
}
|
||||
++updateMinMaxCounter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(int docID, byte[] packedValue) {
|
||||
// System.out.println("visit docID=" + docID + " liveDocs=" + curLiveDocs);
|
||||
|
||||
if (curLiveDocs != null && curLiveDocs.get(docID) == false) {
|
||||
return;
|
||||
}
|
||||
|
||||
float[] docPoint = new float[dims];
|
||||
for (int d = 0, offset = 0 ; d < dims ; ++d, offset += Float.BYTES) {
|
||||
docPoint[d] = FloatPoint.decodeDimension(packedValue, offset);
|
||||
if (docPoint[d] > max[d] || docPoint[d] < min[d]) {
|
||||
|
||||
// if (docPoint[d] > max[d]) {
|
||||
// System.out.println(" skipped because docPoint[" + d + "] (" + docPoint[d] + ") > max[" + d + "] (" + max[d] + ")");
|
||||
// } else {
|
||||
// System.out.println(" skipped because docPoint[" + d + "] (" + docPoint[d] + ") < min[" + d + "] (" + min[d] + ")");
|
||||
// }
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
double distanceSquared = euclideanDistanceSquared(origin, docPoint);
|
||||
|
||||
// System.out.println(" visit docID=" + docID + " distanceSquared=" + distanceSquared + " value: " + Arrays.toString(docPoint));
|
||||
|
||||
int fullDocID = curDocBase + docID;
|
||||
|
||||
if (hitQueue.size() == topN) { // queue already full
|
||||
NearestHit bottom = hitQueue.peek();
|
||||
// System.out.println(" bottom distanceSquared=" + bottom.distanceSquared);
|
||||
if (distanceSquared < bottom.distanceSquared
|
||||
// we don't collect docs in order here, so we must also test the tie-break case ourselves:
|
||||
|| (distanceSquared == bottom.distanceSquared && fullDocID < bottom.docID)) {
|
||||
hitQueue.poll();
|
||||
bottom.docID = fullDocID;
|
||||
bottom.distanceSquared = distanceSquared;
|
||||
hitQueue.offer(bottom);
|
||||
// System.out.println(" ** keep1, now bottom=" + bottom);
|
||||
maybeUpdateMinMax();
|
||||
}
|
||||
} else {
|
||||
NearestHit hit = new NearestHit();
|
||||
hit.docID = fullDocID;
|
||||
hit.distanceSquared = distanceSquared;
|
||||
hitQueue.offer(hit);
|
||||
// System.out.println(" ** keep2, new addition=" + hit);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
|
||||
throw new AssertionError();
|
||||
}
|
||||
}
|
||||
|
||||
/** Holds one hit from {@link FloatPointNearestNeighbor#nearest} */
|
||||
static class NearestHit {
|
||||
public int docID;
|
||||
public double distanceSquared;
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "NearestHit(docID=" + docID + " distanceSquared=" + distanceSquared + ")";
|
||||
}
|
||||
}
|
||||
|
||||
private static NearestHit[] nearest(List<BKDReader> readers, List<Bits> liveDocs, List<Integer> docBases, final int topN, float[] origin) throws IOException {
|
||||
|
||||
// System.out.println("NEAREST: readers=" + readers + " liveDocs=" + liveDocs + " origin: " + Arrays.toString(origin));
|
||||
|
||||
// Holds closest collected points seen so far:
|
||||
// TODO: if we used lucene's PQ we could just updateTop instead of poll/offer:
|
||||
final PriorityQueue<NearestHit> hitQueue = new PriorityQueue<>(topN, (a, b) -> {
|
||||
// sort by opposite distance natural order
|
||||
int cmp = Double.compare(a.distanceSquared, b.distanceSquared);
|
||||
return cmp != 0 ? -cmp : b.docID - a.docID; // tie-break by higher docID
|
||||
});
|
||||
|
||||
// Holds all cells, sorted by closest to the point:
|
||||
PriorityQueue<Cell> cellQueue = new PriorityQueue<>();
|
||||
|
||||
NearestVisitor visitor = new NearestVisitor(hitQueue, topN, origin);
|
||||
List<BKDReader.IntersectState> states = new ArrayList<>();
|
||||
|
||||
// Add root cell for each reader into the queue:
|
||||
int bytesPerDim = -1;
|
||||
|
||||
for (int i = 0 ; i < readers.size() ; ++i) {
|
||||
BKDReader reader = readers.get(i);
|
||||
if (bytesPerDim == -1) {
|
||||
bytesPerDim = reader.getBytesPerDimension();
|
||||
} else if (bytesPerDim != reader.getBytesPerDimension()) {
|
||||
throw new IllegalStateException("bytesPerDim changed from " + bytesPerDim
|
||||
+ " to " + reader.getBytesPerDimension() + " across readers");
|
||||
}
|
||||
byte[] minPackedValue = reader.getMinPackedValue();
|
||||
byte[] maxPackedValue = reader.getMaxPackedValue();
|
||||
BKDReader.IntersectState state = reader.getIntersectState(visitor);
|
||||
states.add(state);
|
||||
|
||||
cellQueue.offer(new Cell(state.index, i, reader.getMinPackedValue(), reader.getMaxPackedValue(),
|
||||
approxBestDistanceSquared(minPackedValue, maxPackedValue, origin)));
|
||||
}
|
||||
|
||||
LOOP_OVER_CELLS: while (cellQueue.size() > 0) {
|
||||
Cell cell = cellQueue.poll();
|
||||
// System.out.println(" visit " + cell);
|
||||
|
||||
// TODO: if we replace approxBestDistance with actualBestDistance, we can put an opto here to break once this "best" cell is fully outside of the hitQueue bottom's radius:
|
||||
BKDReader reader = readers.get(cell.readerIndex);
|
||||
|
||||
if (cell.index.isLeafNode()) {
|
||||
// System.out.println(" leaf");
|
||||
// Leaf block: visit all points and possibly collect them:
|
||||
visitor.curDocBase = docBases.get(cell.readerIndex);
|
||||
visitor.curLiveDocs = liveDocs.get(cell.readerIndex);
|
||||
reader.visitLeafBlockValues(cell.index, states.get(cell.readerIndex));
|
||||
// System.out.println(" now " + hitQueue.size() + " hits");
|
||||
} else {
|
||||
// System.out.println(" non-leaf");
|
||||
// Non-leaf block: split into two cells and put them back into the queue:
|
||||
|
||||
if (hitQueue.size() == topN) {
|
||||
for (int d = 0, offset = 0; d < visitor.dims; ++d, offset += Float.BYTES) {
|
||||
float cellMaxAtDim = FloatPoint.decodeDimension(cell.maxPacked, offset);
|
||||
float cellMinAtDim = FloatPoint.decodeDimension(cell.minPacked, offset);
|
||||
if (cellMaxAtDim < visitor.min[d] || cellMinAtDim > visitor.max[d]) {
|
||||
// this cell is outside our search radius; don't bother exploring any more
|
||||
|
||||
// if (cellMaxAtDim < visitor.min[d]) {
|
||||
// System.out.println(" skipped because cell max at " + d + " (" + cellMaxAtDim + ") < visitor.min[" + d + "] (" + visitor.min[d] + ")");
|
||||
// } else {
|
||||
// System.out.println(" skipped because cell min at " + d + " (" + cellMinAtDim + ") > visitor.max[" + d + "] (" + visitor.max[d] + ")");
|
||||
// }
|
||||
|
||||
continue LOOP_OVER_CELLS;
|
||||
}
|
||||
}
|
||||
}
|
||||
BytesRef splitValue = BytesRef.deepCopyOf(cell.index.getSplitDimValue());
|
||||
int splitDim = cell.index.getSplitDim();
|
||||
|
||||
// we must clone the index so that we we can recurse left and right "concurrently":
|
||||
BKDReader.IndexTree newIndex = cell.index.clone();
|
||||
byte[] splitPackedValue = cell.maxPacked.clone();
|
||||
System.arraycopy(splitValue.bytes, splitValue.offset, splitPackedValue, splitDim * bytesPerDim, bytesPerDim);
|
||||
|
||||
cell.index.pushLeft();
|
||||
cellQueue.offer(new Cell(cell.index, cell.readerIndex, cell.minPacked, splitPackedValue,
|
||||
approxBestDistanceSquared(cell.minPacked, splitPackedValue, origin)));
|
||||
|
||||
splitPackedValue = cell.minPacked.clone();
|
||||
System.arraycopy(splitValue.bytes, splitValue.offset, splitPackedValue, splitDim * bytesPerDim, bytesPerDim);
|
||||
|
||||
newIndex.pushRight();
|
||||
cellQueue.offer(new Cell(newIndex, cell.readerIndex, splitPackedValue, cell.maxPacked,
|
||||
approxBestDistanceSquared(splitPackedValue, cell.maxPacked, origin)));
|
||||
}
|
||||
}
|
||||
|
||||
NearestHit[] hits = new NearestHit[hitQueue.size()];
|
||||
int downTo = hitQueue.size()-1;
|
||||
while (hitQueue.size() != 0) {
|
||||
hits[downTo] = hitQueue.poll();
|
||||
downTo--;
|
||||
}
|
||||
return hits;
|
||||
}
|
||||
|
||||
private static double approxBestDistanceSquared(byte[] minPackedValue, byte[] maxPackedValue, float[] value) {
|
||||
boolean insideCell = true;
|
||||
float[] min = new float[value.length];
|
||||
float[] max = new float[value.length];
|
||||
double[] closest = new double[value.length];
|
||||
for (int i = 0, offset = 0 ; i < value.length ; ++i, offset += Float.BYTES) {
|
||||
min[i] = FloatPoint.decodeDimension(minPackedValue, offset);
|
||||
max[i] = FloatPoint.decodeDimension(maxPackedValue, offset);
|
||||
if (insideCell) {
|
||||
if (value[i] < min[i] || value[i] > max[i]) {
|
||||
insideCell = false;
|
||||
}
|
||||
}
|
||||
double minDiff = Math.abs((double)value[i] - (double)min[i]);
|
||||
double maxDiff = Math.abs((double)value[i] - (double)max[i]);
|
||||
closest[i] = minDiff < maxDiff ? minDiff : maxDiff;
|
||||
}
|
||||
if (insideCell) {
|
||||
return 0.0f;
|
||||
}
|
||||
double sumOfSquaredDiffs = 0.0d;
|
||||
for (int d = 0 ; d < value.length ; ++d) {
|
||||
sumOfSquaredDiffs += closest[d] * closest[d];
|
||||
}
|
||||
return sumOfSquaredDiffs;
|
||||
}
|
||||
|
||||
static double euclideanDistanceSquared(float[] a, float[] b) {
|
||||
double sumOfSquaredDifferences = 0.0d;
|
||||
for (int d = 0 ; d < a.length ; ++d) {
|
||||
double diff = (double)a[d] - (double)b[d];
|
||||
sumOfSquaredDifferences += diff * diff;
|
||||
}
|
||||
return sumOfSquaredDifferences;
|
||||
}
|
||||
|
||||
public static TopFieldDocs nearest(IndexSearcher searcher, String field, int topN, float... origin) throws IOException {
|
||||
if (topN < 1) {
|
||||
throw new IllegalArgumentException("topN must be at least 1; got " + topN);
|
||||
}
|
||||
if (field == null) {
|
||||
throw new IllegalArgumentException("field must not be null");
|
||||
}
|
||||
if (searcher == null) {
|
||||
throw new IllegalArgumentException("searcher must not be null");
|
||||
}
|
||||
List<BKDReader> readers = new ArrayList<>();
|
||||
List<Integer> docBases = new ArrayList<>();
|
||||
List<Bits> liveDocs = new ArrayList<>();
|
||||
int totalHits = 0;
|
||||
for (LeafReaderContext leaf : searcher.getIndexReader().leaves()) {
|
||||
PointValues points = leaf.reader().getPointValues(field);
|
||||
if (points != null) {
|
||||
if (points instanceof BKDReader == false) {
|
||||
throw new IllegalArgumentException("can only run on Lucene60PointsReader points implementation, but got " + points);
|
||||
}
|
||||
totalHits += points.getDocCount();
|
||||
readers.add((BKDReader)points);
|
||||
docBases.add(leaf.docBase);
|
||||
liveDocs.add(leaf.reader().getLiveDocs());
|
||||
}
|
||||
}
|
||||
|
||||
NearestHit[] hits = nearest(readers, liveDocs, docBases, topN, origin);
|
||||
|
||||
// Convert to TopFieldDocs:
|
||||
ScoreDoc[] scoreDocs = new ScoreDoc[hits.length];
|
||||
for(int i=0;i<hits.length;i++) {
|
||||
NearestHit hit = hits[i];
|
||||
scoreDocs[i] = new FieldDoc(hit.docID, 0.0f, new Object[] { (float)Math.sqrt(hit.distanceSquared) });
|
||||
}
|
||||
return new TopFieldDocs(totalHits, scoreDocs, null, 0.0f);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,231 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.document;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
|
||||
import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude;
|
||||
import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude;
|
||||
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
|
||||
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude;
|
||||
|
||||
/**
|
||||
* An indexed 2-Dimension Bounding Box field for the Geospatial Lat/Lon Coordinate system
|
||||
* <p>
|
||||
* This field indexes 2-dimension Latitude, Longitude based Geospatial Bounding Boxes. The bounding boxes are defined as
|
||||
* {@code minLat, minLon, maxLat, maxLon} where min/max lat,lon pairs using double floating point precision.
|
||||
* <p>
|
||||
* Multiple values for the same field in one document is supported.
|
||||
*
|
||||
* <p>
|
||||
* This field defines the following static factory methods for common search operations over double ranges:
|
||||
* <ul>
|
||||
* <li>{@link #newIntersectsQuery newIntersectsQuery()} matches bounding boxes that intersect the defined search bounding box.
|
||||
* <li>{@link #newWithinQuery newWithinQuery()} matches bounding boxes that are within the defined search bounding box.
|
||||
* <li>{@link #newContainsQuery newContainsQuery()} matches bounding boxes that contain the defined search bounding box.
|
||||
* <li>{@link #newCrossesQuery newCrosses()} matches bounding boxes that cross the defined search bounding box.
|
||||
* </ul>
|
||||
*
|
||||
* <p>
|
||||
* The following Field limitations and restrictions apply:
|
||||
* <ul>
|
||||
* <li>Dateline wrapping is not supported.
|
||||
* <li>Due to an encoding limitation Eastern and Western Hemisphere Bounding Boxes that share the dateline are not supported.
|
||||
* </ul>
|
||||
*/
|
||||
public class LatLonBoundingBox extends Field {
|
||||
/** uses same encoding as {@link LatLonPoint} so numBytes is the same */
|
||||
public static final int BYTES = LatLonPoint.BYTES;
|
||||
|
||||
/**
|
||||
* Create a new 2D GeoBoundingBoxField representing a 2 dimensional geospatial bounding box
|
||||
*
|
||||
* @param name field name. must not be null
|
||||
* @param minLat minimum latitude value (in degrees); valid in [-90.0 : 90.0]
|
||||
* @param minLon minimum longitude value (in degrees); valid in [-180.0 : 180.0]
|
||||
* @param maxLat maximum latitude value (in degrees); valid in [minLat : 90.0]
|
||||
* @param maxLon maximum longitude value (in degrees); valid in [minLon : 180.0]
|
||||
*/
|
||||
public LatLonBoundingBox(String name, final double minLat, final double minLon,
|
||||
final double maxLat, final double maxLon) {
|
||||
super(name, getType(2));
|
||||
setRangeValues(minLat, minLon, maxLat, maxLon);
|
||||
}
|
||||
|
||||
/** set the field type */
|
||||
static FieldType getType(int geoDimensions) {
|
||||
FieldType ft = new FieldType();
|
||||
ft.setDimensions(geoDimensions*2, BYTES);
|
||||
ft.freeze();
|
||||
return ft;
|
||||
}
|
||||
|
||||
/**
|
||||
* Changes the values of the field
|
||||
* @param minLat minimum latitude value (in degrees); valid in [-90.0 : 90.0]
|
||||
* @param minLon minimum longitude value (in degrees); valid in [-180.0 : 180.0]
|
||||
* @param maxLat maximum latitude value (in degrees); valid in [minLat : 90.0]
|
||||
* @param maxLon maximum longitude value (in degrees); valid in [minLon : 180.0]
|
||||
* @throws IllegalArgumentException if {@code min} or {@code max} is invalid
|
||||
*/
|
||||
public void setRangeValues(double minLat, double minLon, double maxLat, double maxLon) {
|
||||
checkArgs(minLat, minLon, maxLat, maxLon);
|
||||
final byte[] bytes;
|
||||
if (fieldsData == null) {
|
||||
bytes = new byte[4*BYTES];
|
||||
fieldsData = new BytesRef(bytes);
|
||||
} else {
|
||||
bytes = ((BytesRef)fieldsData).bytes;
|
||||
}
|
||||
encode(minLat, minLon, bytes, 0);
|
||||
encode(maxLat, maxLon, bytes, 2 * BYTES);
|
||||
}
|
||||
|
||||
/** validate the two-dimension arguments */
|
||||
static void checkArgs(final double minLat, final double minLon, final double maxLat, final double maxLon) {
|
||||
// dateline crossing not supported
|
||||
if (minLon > maxLon) {
|
||||
throw new IllegalArgumentException("cannot have minLon [" + minLon + "] exceed maxLon [" + maxLon + "].");
|
||||
}
|
||||
// pole crossing not supported
|
||||
if (minLat > maxLat) {
|
||||
throw new IllegalArgumentException("cannot have minLat [" + minLat + "] exceed maxLat [" + maxLat + "].");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new 2d query that finds all indexed 2d GeoBoundingBoxField values that intersect the defined
|
||||
* 3d bounding ranges
|
||||
* @param field field name. must not be null
|
||||
* @param minLat minimum latitude value (in degrees); valid in [-90.0 : 90.0]
|
||||
* @param minLon minimum longitude value (in degrees); valid in [-180.0 : 180.0]
|
||||
* @param maxLat maximum latitude value (in degrees); valid in [minLat : 90.0]
|
||||
* @param maxLon maximum longitude value (in degrees); valid in [minLon : 180.0]
|
||||
* @return query for matching intersecting 2d bounding boxes
|
||||
*/
|
||||
public static Query newIntersectsQuery(String field, final double minLat, final double minLon,
|
||||
final double maxLat, final double maxLon) {
|
||||
return newRangeQuery(field, minLat, minLon, maxLat, maxLon, RangeFieldQuery.QueryType.INTERSECTS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new 2d query that finds all indexed 2d GeoBoundingBoxField values that are within the defined
|
||||
* 2d bounding box
|
||||
* @param field field name. must not be null
|
||||
* @param minLat minimum latitude value (in degrees); valid in [-90.0 : 90.0]
|
||||
* @param minLon minimum longitude value (in degrees); valid in [-180.0 : 180.0]
|
||||
* @param maxLat maximum latitude value (in degrees); valid in [minLat : 90.0]
|
||||
* @param maxLon maximum longitude value (in degrees); valid in [minLon : 180.0]
|
||||
* @return query for matching 3d bounding boxes that are within the defined bounding box
|
||||
*/
|
||||
public static Query newWithinQuery(String field, final double minLat, final double minLon,
|
||||
final double maxLat, final double maxLon) {
|
||||
return newRangeQuery(field, minLat, minLon, maxLat, maxLon, RangeFieldQuery.QueryType.WITHIN);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new 2d query that finds all indexed 2d GeoBoundingBoxField values that contain the defined
|
||||
* 2d bounding box
|
||||
* @param field field name. must not be null
|
||||
* @param minLat minimum latitude value (in degrees); valid in [-90.0 : 90.0]
|
||||
* @param minLon minimum longitude value (in degrees); valid in [-180.0 : 180.0]
|
||||
* @param maxLat maximum latitude value (in degrees); valid in [minLat : 90.0]
|
||||
* @param maxLon maximum longitude value (in degrees); valid in [minLon : 180.0]
|
||||
* @return query for matching 2d bounding boxes that contain the defined bounding box
|
||||
*/
|
||||
public static Query newContainsQuery(String field, final double minLat, final double minLon,
|
||||
final double maxLat, final double maxLon) {
|
||||
return newRangeQuery(field, minLat, minLon, maxLat, maxLon, RangeFieldQuery.QueryType.CONTAINS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new 2d query that finds all indexed 2d GeoBoundingBoxField values that cross the defined
|
||||
* 3d bounding box
|
||||
* @param field field name. must not be null
|
||||
* @param minLat minimum latitude value (in degrees); valid in [-90.0 : 90.0]
|
||||
* @param minLon minimum longitude value (in degrees); valid in [-180.0 : 180.0]
|
||||
* @param maxLat maximum latitude value (in degrees); valid in [minLat : 90.0]
|
||||
* @param maxLon maximum longitude value (in degrees); valid in [minLon : 180.0]
|
||||
* @return query for matching 2d bounding boxes that cross the defined bounding box
|
||||
*/
|
||||
public static Query newCrossesQuery(String field, final double minLat, final double minLon,
|
||||
final double maxLat, final double maxLon) {
|
||||
return newRangeQuery(field, minLat, minLon, maxLat, maxLon, RangeFieldQuery.QueryType.CROSSES);
|
||||
}
|
||||
|
||||
/** helper method to create a two-dimensional geospatial bounding box query */
|
||||
private static Query newRangeQuery(String field, final double minLat, final double minLon,
|
||||
final double maxLat, final double maxLon, final RangeFieldQuery.QueryType queryType) {
|
||||
checkArgs(minLat, minLon, maxLat, maxLon);
|
||||
return new RangeFieldQuery(field, encode(minLat, minLon, maxLat, maxLon), 2, queryType) {
|
||||
@Override
|
||||
protected String toString(byte[] ranges, int dimension) { return LatLonBoundingBox.toString(ranges, dimension); }
|
||||
};
|
||||
}
|
||||
|
||||
/** encodes a two-dimensional geo bounding box into a byte array */
|
||||
static byte[] encode(double minLat, double minLon, double maxLat, double maxLon) {
|
||||
byte[] b = new byte[BYTES * 4];
|
||||
encode(minLat, minLon, b, 0);
|
||||
encode(maxLat, maxLon, b, BYTES*2);
|
||||
return b;
|
||||
}
|
||||
|
||||
/** encodes a two-dimensional geopoint (lat, lon) into a byte array */
|
||||
static void encode(double lat, double lon, byte[] result, int offset) {
|
||||
if (result == null) {
|
||||
result = new byte[BYTES*4];
|
||||
}
|
||||
NumericUtils.intToSortableBytes(encodeLatitude(lat), result, offset);
|
||||
NumericUtils.intToSortableBytes(encodeLongitude(lon), result, offset + BYTES);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(getClass().getSimpleName());
|
||||
sb.append(" <");
|
||||
sb.append(name);
|
||||
sb.append(':');
|
||||
byte[] b = ((BytesRef)fieldsData).bytes;
|
||||
toString(b, 0);
|
||||
sb.append('>');
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private static String toString(byte[] ranges, int dimension) {
|
||||
double min, max;
|
||||
int minOfs = 0;
|
||||
int maxOfs = ranges.length/2;
|
||||
switch (dimension) {
|
||||
case 0:
|
||||
min = decodeLatitude(ranges, minOfs);
|
||||
max = decodeLatitude(ranges, maxOfs);
|
||||
break;
|
||||
case 1:
|
||||
min = decodeLongitude(ranges, minOfs);
|
||||
max = decodeLongitude(ranges, maxOfs);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("invalid dimension [" + dimension + "] in toString");
|
||||
}
|
||||
return "[" + min + " : " + max + "]";
|
||||
}
|
||||
}
|
|
@ -77,7 +77,8 @@ import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitudeCeil;
|
|||
// to the field is not actually what gets indexed. Float would be 1E-5 error vs 1E-7, but it might be
|
||||
// a better tradeoff? then it would be completely transparent to the user and lucene would be "lossless".
|
||||
public class LatLonPoint extends Field {
|
||||
|
||||
/** LatLonPoint is encoded as integer values so number of bytes is 4 */
|
||||
public static final int BYTES = Integer.BYTES;
|
||||
/**
|
||||
* Type for an indexed LatLonPoint
|
||||
* <p>
|
||||
|
|
|
@ -0,0 +1,239 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.document;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.lucene.codecs.Codec;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.PointValues;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.SerialMergeScheduler;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
public class TestFloatPointNearestNeighbor extends LuceneTestCase {
|
||||
|
||||
public void testNearestNeighborWithDeletedDocs() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir, getIndexWriterConfig());
|
||||
Document doc = new Document();
|
||||
doc.add(new FloatPoint("point", 40.0f, 50.0f));
|
||||
doc.add(new StringField("id", "0", Field.Store.YES));
|
||||
w.addDocument(doc);
|
||||
|
||||
doc = new Document();
|
||||
doc.add(new FloatPoint("point", 45.0f, 55.0f));
|
||||
doc.add(new StringField("id", "1", Field.Store.YES));
|
||||
w.addDocument(doc);
|
||||
|
||||
DirectoryReader r = w.getReader();
|
||||
// can't wrap because we require Lucene60PointsFormat directly but e.g. ParallelReader wraps with its own points impl:
|
||||
IndexSearcher s = newSearcher(r, false);
|
||||
FieldDoc hit = (FieldDoc)FloatPointNearestNeighbor.nearest(s, "point", 1, 40.0f, 50.0f).scoreDocs[0];
|
||||
assertEquals("0", r.document(hit.doc).getField("id").stringValue());
|
||||
r.close();
|
||||
|
||||
w.deleteDocuments(new Term("id", "0"));
|
||||
r = w.getReader();
|
||||
// can't wrap because we require Lucene60PointsFormat directly but e.g. ParallelReader wraps with its own points impl:
|
||||
s = newSearcher(r, false);
|
||||
hit = (FieldDoc)LatLonPoint.nearest(s, "point", 40.0, 50.0, 1).scoreDocs[0];
|
||||
assertEquals("1", r.document(hit.doc).getField("id").stringValue());
|
||||
r.close();
|
||||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testNearestNeighborWithAllDeletedDocs() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir, getIndexWriterConfig());
|
||||
Document doc = new Document();
|
||||
doc.add(new FloatPoint("point", 40.0f, 50.0f));
|
||||
doc.add(new StringField("id", "0", Field.Store.YES));
|
||||
w.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new FloatPoint("point", 45.0f, 55.0f));
|
||||
doc.add(new StringField("id", "1", Field.Store.YES));
|
||||
w.addDocument(doc);
|
||||
|
||||
DirectoryReader r = w.getReader();
|
||||
// can't wrap because we require Lucene60PointsFormat directly but e.g. ParallelReader wraps with its own points impl:
|
||||
IndexSearcher s = newSearcher(r, false);
|
||||
FieldDoc hit = (FieldDoc)FloatPointNearestNeighbor.nearest(s, "point", 1, 40.0f, 50.0f).scoreDocs[0];
|
||||
assertEquals("0", r.document(hit.doc).getField("id").stringValue());
|
||||
r.close();
|
||||
|
||||
w.deleteDocuments(new Term("id", "0"));
|
||||
w.deleteDocuments(new Term("id", "1"));
|
||||
r = w.getReader();
|
||||
// can't wrap because we require Lucene60PointsFormat directly but e.g. ParallelReader wraps with its own points impl:
|
||||
s = newSearcher(r, false);
|
||||
assertEquals(0, FloatPointNearestNeighbor.nearest(s, "point", 1, 40.0f, 50.0f).scoreDocs.length);
|
||||
r.close();
|
||||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testTieBreakByDocID() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, getIndexWriterConfig());
|
||||
Document doc = new Document();
|
||||
doc.add(new FloatPoint("point", 40.0f, 50.0f));
|
||||
doc.add(new StringField("id", "0", Field.Store.YES));
|
||||
w.addDocument(doc);
|
||||
doc = new Document();
|
||||
doc.add(new FloatPoint("point", 40.0f, 50.0f));
|
||||
doc.add(new StringField("id", "1", Field.Store.YES));
|
||||
w.addDocument(doc);
|
||||
|
||||
DirectoryReader r = DirectoryReader.open(w);
|
||||
// can't wrap because we require Lucene60PointsFormat directly but e.g. ParallelReader wraps with its own points impl:
|
||||
ScoreDoc[] hits = FloatPointNearestNeighbor.nearest(newSearcher(r, false), "point", 2, 45.0f, 50.0f).scoreDocs;
|
||||
assertEquals("0", r.document(hits[0].doc).getField("id").stringValue());
|
||||
assertEquals("1", r.document(hits[1].doc).getField("id").stringValue());
|
||||
|
||||
r.close();
|
||||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testNearestNeighborWithNoDocs() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir, getIndexWriterConfig());
|
||||
DirectoryReader r = w.getReader();
|
||||
// can't wrap because we require Lucene60PointsFormat directly but e.g. ParallelReader wraps with its own points impl:
|
||||
assertEquals(0, FloatPointNearestNeighbor.nearest(newSearcher(r, false), "point", 1, 40.0f, 50.0f).scoreDocs.length);
|
||||
r.close();
|
||||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testNearestNeighborRandom() throws Exception {
|
||||
Directory dir;
|
||||
int numPoints = atLeast(5000);
|
||||
if (numPoints > 100000) {
|
||||
dir = newFSDirectory(createTempDir(getClass().getSimpleName()));
|
||||
} else {
|
||||
dir = newDirectory();
|
||||
}
|
||||
IndexWriterConfig iwc = getIndexWriterConfig();
|
||||
iwc.setMergePolicy(newLogMergePolicy());
|
||||
iwc.setMergeScheduler(new SerialMergeScheduler());
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
|
||||
|
||||
int dims = TestUtil.nextInt(random(), 1, PointValues.MAX_DIMENSIONS);
|
||||
float[][] values = new float[numPoints][dims];
|
||||
for (int id = 0 ; id < numPoints ; ++id) {
|
||||
for (int dim = 0 ; dim < dims ; ++dim) {
|
||||
Float f = Float.NaN;
|
||||
while (f.isNaN()) {
|
||||
f = Float.intBitsToFloat(random().nextInt());
|
||||
}
|
||||
values[id][dim] = f;
|
||||
}
|
||||
Document doc = new Document();
|
||||
doc.add(new FloatPoint("point", values[id]));
|
||||
doc.add(new StoredField("id", id));
|
||||
w.addDocument(doc);
|
||||
}
|
||||
|
||||
if (random().nextBoolean()) {
|
||||
w.forceMerge(1);
|
||||
}
|
||||
|
||||
DirectoryReader r = w.getReader();
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: reader=" + r);
|
||||
}
|
||||
// can't wrap because we require Lucene60PointsFormat directly but e.g. ParallelReader wraps with its own points impl:
|
||||
IndexSearcher s = newSearcher(r, false);
|
||||
int iters = atLeast(100);
|
||||
for (int iter = 0 ; iter < iters ; ++iter) {
|
||||
if (VERBOSE) {
|
||||
System.out.println("\nTEST: iter=" + iter);
|
||||
}
|
||||
float[] origin = new float[dims];
|
||||
for (int dim = 0 ; dim < dims ; ++dim) {
|
||||
Float f = Float.NaN;
|
||||
while (f.isNaN()) {
|
||||
f = Float.intBitsToFloat(random().nextInt());
|
||||
}
|
||||
origin[dim] = f;
|
||||
}
|
||||
|
||||
// dumb brute force search to get the expected result:
|
||||
FloatPointNearestNeighbor.NearestHit[] expectedHits = new FloatPointNearestNeighbor.NearestHit[numPoints];
|
||||
for (int id = 0 ; id < numPoints ; ++id) {
|
||||
FloatPointNearestNeighbor.NearestHit hit = new FloatPointNearestNeighbor.NearestHit();
|
||||
hit.distanceSquared = FloatPointNearestNeighbor.euclideanDistanceSquared(origin, values[id]);
|
||||
hit.docID = id;
|
||||
expectedHits[id] = hit;
|
||||
}
|
||||
|
||||
Arrays.sort(expectedHits, (a, b) -> {
|
||||
int cmp = Double.compare(a.distanceSquared, b.distanceSquared);
|
||||
return cmp != 0 ? cmp : a.docID - b.docID; // tie break by smaller id
|
||||
});
|
||||
|
||||
int topK = TestUtil.nextInt(random(), 1, numPoints);
|
||||
|
||||
if (VERBOSE) {
|
||||
System.out.println("\nhits for origin=" + Arrays.toString(origin));
|
||||
}
|
||||
|
||||
ScoreDoc[] hits = FloatPointNearestNeighbor.nearest(s, "point", topK, origin).scoreDocs;
|
||||
assertEquals("fewer than expected hits: ", topK, hits.length);
|
||||
|
||||
if (VERBOSE) {
|
||||
for (int i = 0 ; i < topK ; ++i) {
|
||||
FloatPointNearestNeighbor.NearestHit expected = expectedHits[i];
|
||||
FieldDoc actual = (FieldDoc)hits[i];
|
||||
Document actualDoc = r.document(actual.doc);
|
||||
System.out.println("hit " + i);
|
||||
System.out.println(" expected id=" + expected.docID + " " + Arrays.toString(values[expected.docID])
|
||||
+ " distance=" + (float)Math.sqrt(expected.distanceSquared) + " distanceSquared=" + expected.distanceSquared);
|
||||
System.out.println(" actual id=" + actualDoc.getField("id") + " distance=" + actual.fields[0]);
|
||||
}
|
||||
}
|
||||
|
||||
for (int i = 0 ; i < topK ; ++i) {
|
||||
FloatPointNearestNeighbor.NearestHit expected = expectedHits[i];
|
||||
FieldDoc actual = (FieldDoc)hits[i];
|
||||
assertEquals("hit " + i + ":", expected.docID, actual.doc);
|
||||
assertEquals("hit " + i + ":", (float)Math.sqrt(expected.distanceSquared), (Float)actual.fields[0], 0.000001);
|
||||
}
|
||||
}
|
||||
|
||||
r.close();
|
||||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
private IndexWriterConfig getIndexWriterConfig() {
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
iwc.setCodec(Codec.forName("Lucene70"));
|
||||
return iwc;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,299 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.search;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.LatLonBoundingBox;
|
||||
import org.apache.lucene.geo.GeoTestUtil;
|
||||
import org.apache.lucene.geo.Rectangle;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
||||
import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude;
|
||||
import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude;
|
||||
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
|
||||
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude;
|
||||
|
||||
/** Random testing for GeoBoundingBoxField type. */
|
||||
public class TestLatLonBoundingBoxQueries extends BaseRangeFieldQueryTestCase {
|
||||
private static final String FIELD_NAME = "geoBoundingBoxField";
|
||||
|
||||
@Override
|
||||
protected LatLonBoundingBox newRangeField(Range r) {
|
||||
// addRange is called instead of this method
|
||||
throw new UnsupportedOperationException("this method should never be called");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void addRange(Document doc, Range r) {
|
||||
GeoBBox b = (GeoBBox)r;
|
||||
doc.add(new LatLonBoundingBox(FIELD_NAME, b.minLat, b.minLon, b.maxLat, b.maxLon));
|
||||
}
|
||||
|
||||
/** Basic test for 2d boxes */
|
||||
public void testBasics() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
|
||||
// Shared meridian test (disjoint)
|
||||
Document document = new Document();
|
||||
document.add(new LatLonBoundingBox(FIELD_NAME, -20d, -180d, 20d, -100d));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (crosses)
|
||||
document = new Document();
|
||||
document.add(new LatLonBoundingBox(FIELD_NAME, 0d, 14.096488952636719d, 10d, 20d));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (contains, crosses)
|
||||
document = new Document();
|
||||
document.add(new LatLonBoundingBox(FIELD_NAME, -10.282592503353953d, -1d, 1d, 14.096488952636719d));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (crosses)
|
||||
document = new Document();
|
||||
document.add(new LatLonBoundingBox(FIELD_NAME, -1d, -11d, 1d, 1d));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (crosses)
|
||||
document = new Document();
|
||||
document.add(new LatLonBoundingBox(FIELD_NAME, -1d, 14.096488952636719d, 5d, 30d));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (within)
|
||||
document = new Document();
|
||||
document.add(new LatLonBoundingBox(FIELD_NAME, -5d, 0d, -1d, 14.096488952636719d));
|
||||
writer.addDocument(document);
|
||||
|
||||
// search
|
||||
IndexReader reader = writer.getReader();
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
assertEquals(5, searcher.count(LatLonBoundingBox.newIntersectsQuery(FIELD_NAME,
|
||||
-10.282592503353953d, 0.0d, 0.0d, 14.096488952636719d)));
|
||||
assertEquals(1, searcher.count(LatLonBoundingBox.newWithinQuery(FIELD_NAME,
|
||||
-10.282592503353953d, 0.0d, 0.0d, 14.096488952636719d)));
|
||||
assertEquals(1, searcher.count(LatLonBoundingBox.newContainsQuery(FIELD_NAME,
|
||||
-10.282592503353953d, 0.0d, 0.0d, 14.096488952636719d)));
|
||||
assertEquals(4, searcher.count(LatLonBoundingBox.newCrossesQuery(FIELD_NAME,
|
||||
-10.282592503353953d, 0.0d, 0.0d, 14.096488952636719d)));
|
||||
|
||||
reader.close();
|
||||
writer.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int dimension() {
|
||||
return 2;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Range nextRange(int dimensions) {
|
||||
// create a random bounding box in 2 dimensions
|
||||
return new GeoBBox(dimensions);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newIntersectsQuery(Range r) {
|
||||
GeoBBox b = (GeoBBox)r;
|
||||
return LatLonBoundingBox.newIntersectsQuery(FIELD_NAME, b.minLat, b.minLon, b.maxLat, b.maxLon);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newContainsQuery(Range r) {
|
||||
GeoBBox b = (GeoBBox)r;
|
||||
return LatLonBoundingBox.newContainsQuery(FIELD_NAME, b.minLat, b.minLon, b.maxLat, b.maxLon);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newWithinQuery(Range r) {
|
||||
GeoBBox b = (GeoBBox)r;
|
||||
return LatLonBoundingBox.newWithinQuery(FIELD_NAME, b.minLat, b.minLon, b.maxLat, b.maxLon);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newCrossesQuery(Range r) {
|
||||
GeoBBox b = (GeoBBox)r;
|
||||
return LatLonBoundingBox.newCrossesQuery(FIELD_NAME, b.minLat, b.minLon, b.maxLat, b.maxLon);
|
||||
}
|
||||
|
||||
protected static class GeoBBox extends Range {
|
||||
protected double minLat, minLon, maxLat, maxLon;
|
||||
protected int dimension;
|
||||
|
||||
GeoBBox(int dimension) {
|
||||
this.dimension = dimension;
|
||||
final Rectangle box = GeoTestUtil.nextBoxNotCrossingDateline();
|
||||
minLat = quantizeLat(box.minLat);
|
||||
minLon = quantizeLon(box.minLon);
|
||||
maxLat = quantizeLat(box.maxLat);
|
||||
maxLon = quantizeLon(box.maxLon);
|
||||
|
||||
// minLat = quantizeLat(Math.min(box.minLat, box.maxLat));
|
||||
// minLon = quantizeLon(Math.max(box.minLat, box.maxLat));
|
||||
// maxLat = quantizeLat(box.maxLat);
|
||||
// maxLon = quantizeLon(box.maxLon);
|
||||
|
||||
// if (maxLon == -180d) {
|
||||
// // index and search handle this fine, but the test validator
|
||||
// // struggles when maxLon == -180; so lets correct
|
||||
// maxLon = 180d;
|
||||
// }
|
||||
}
|
||||
|
||||
protected static double quantizeLat(double lat) {
|
||||
return decodeLatitude(encodeLatitude(lat));
|
||||
}
|
||||
|
||||
protected double quantizeLon(double lon) {
|
||||
return decodeLongitude(encodeLongitude(lon));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int numDimensions() {
|
||||
return dimension;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Double getMin(int dim) {
|
||||
if (dim == 0) {
|
||||
return minLat;
|
||||
} else if (dim == 1) {
|
||||
return minLon;
|
||||
}
|
||||
throw new IndexOutOfBoundsException("dimension " + dim + " is greater than " + dimension);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setMin(int dim, Object val) {
|
||||
if (dim == 0) {
|
||||
setMinLat((Double)val);
|
||||
} else if (dim == 1) {
|
||||
setMinLon((Double)val);
|
||||
} else {
|
||||
throw new IndexOutOfBoundsException("dimension " + dim + " is greater than " + dimension);
|
||||
}
|
||||
}
|
||||
|
||||
private void setMinLat(double d) {
|
||||
if (d > maxLat) {
|
||||
minLat = maxLat;
|
||||
maxLat = d;
|
||||
} else {
|
||||
minLat = d;
|
||||
}
|
||||
}
|
||||
|
||||
private void setMinLon(double d) {
|
||||
if (d > maxLon) {
|
||||
minLon = maxLon;
|
||||
maxLon = d;
|
||||
} else {
|
||||
minLon = d;
|
||||
}
|
||||
}
|
||||
|
||||
private void setMaxLat(double d) {
|
||||
if (d < minLat) {
|
||||
maxLat = minLat;
|
||||
minLat = d;
|
||||
} else {
|
||||
maxLat = d;
|
||||
}
|
||||
}
|
||||
|
||||
private void setMaxLon(double d) {
|
||||
if (d < minLon) {
|
||||
maxLon = minLon;
|
||||
minLon = d;
|
||||
} else {
|
||||
maxLon = d;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Double getMax(int dim) {
|
||||
if (dim == 0) {
|
||||
return maxLat;
|
||||
} else if (dim == 1) {
|
||||
return maxLon;
|
||||
}
|
||||
throw new IndexOutOfBoundsException("dimension " + dim + " is greater than " + dimension);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setMax(int dim, Object val) {
|
||||
if (dim == 0) {
|
||||
setMaxLat((Double)val);
|
||||
} else if (dim == 1) {
|
||||
setMaxLon((Double)val);
|
||||
} else {
|
||||
throw new IndexOutOfBoundsException("dimension " + dim + " is greater than " + dimension);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isEqual(Range other) {
|
||||
GeoBBox o = (GeoBBox)other;
|
||||
if (this.dimension != o.dimension) return false;
|
||||
if (this.minLat != o.minLat) return false;
|
||||
if (this.minLon != o.minLon) return false;
|
||||
if (this.maxLat != o.maxLat) return false;
|
||||
if (this.maxLon != o.maxLon) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isDisjoint(Range other) {
|
||||
GeoBBox o = (GeoBBox)other;
|
||||
if (minLat > o.maxLat || maxLat < o.minLat) return true;
|
||||
if (minLon > o.maxLon || maxLon < o.minLon) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isWithin(Range other) {
|
||||
GeoBBox o = (GeoBBox)other;
|
||||
return o.contains(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean contains(Range other) {
|
||||
GeoBBox o = (GeoBBox)other;
|
||||
if (minLat > o.minLat || maxLat < o.maxLat) return false;
|
||||
if (minLon > o.minLon || maxLon < o.maxLon) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("GeoBoundingBox(lat: ");
|
||||
b.append(minLat);
|
||||
b.append(" TO ");
|
||||
b.append(maxLat);
|
||||
b.append(", lon: ");
|
||||
b.append(minLon);
|
||||
b.append(" TO ");
|
||||
b.append(maxLon);
|
||||
b.append(")");
|
||||
|
||||
return b.toString();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -77,7 +77,7 @@
|
|||
<ul>
|
||||
<li><a href="changes/Changes.html">Changes</a>: List of changes in this release.</li>
|
||||
<li><a href="SYSTEM_REQUIREMENTS.html">System Requirements</a>: Minimum and supported Java versions.</li>
|
||||
<li><a href="MIGRATE.html">Migration Guide</a>: What changed in Lucene 6; how to migrate code from Lucene 5.x.</li>
|
||||
<li><a href="MIGRATE.html">Migration Guide</a>: What changed in Lucene 8; how to migrate code from Lucene 7.x.</li>
|
||||
<li><a href="JRE_VERSION_MIGRATION.html">JRE Version Migration</a>: Information about upgrading between major JRE versions.</li>
|
||||
<li><a href="core/org/apache/lucene/codecs/{$defaultCodecPackage}/package-summary.html#package.description">File Formats</a>: Guide to the supported index format used by Lucene. This can be customized by using <a href="core/org/apache/lucene/codecs/package-summary.html#package.description">an alternate codec</a>.</li>
|
||||
<li><a href="core/org/apache/lucene/search/package-summary.html#package.description">Search and Scoring in Lucene</a>: Introduction to how Lucene scores documents.</li>
|
||||
|
|
|
@ -38,10 +38,19 @@
|
|||
<path refid="test.base.classpath" />
|
||||
<path refid="spatialjar"/>
|
||||
<pathelement path="src/test-files" />
|
||||
<pathelement path="${common.dir}/build/spatial3d/classes/test" />
|
||||
</path>
|
||||
|
||||
<target name="compile-core" depends="jar-spatial3d,common.compile-core" />
|
||||
|
||||
<target name="compile-test" depends="compile-spatial3d-tests,common.compile-test" />
|
||||
|
||||
<target name="compile-spatial3d-tests">
|
||||
<ant dir="${common.dir}/spatial3d" target="compile-test" inheritAll="false">
|
||||
<propertyset refid="uptodate.and.compiled.properties"/>
|
||||
</ant>
|
||||
</target>
|
||||
|
||||
<target name="javadocs" depends="javadocs-spatial3d,compile-core,check-javadocs-uptodate"
|
||||
unless="javadocs-uptodate-${name}">
|
||||
<invoke-module-javadoc>
|
||||
|
|
|
@ -0,0 +1,130 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.spatial.spatial4j;
|
||||
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
|
||||
import org.apache.lucene.spatial3d.geom.GeoAreaShape;
|
||||
import org.apache.lucene.spatial3d.geom.GeoBBox;
|
||||
import org.apache.lucene.spatial3d.geom.GeoCircle;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPointShape;
|
||||
import org.apache.lucene.spatial3d.geom.PlanetModel;
|
||||
import org.apache.lucene.spatial3d.geom.SerializableObject;
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.context.SpatialContextFactory;
|
||||
import org.locationtech.spatial4j.io.BinaryCodec;
|
||||
import org.locationtech.spatial4j.shape.Circle;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
import org.locationtech.spatial4j.shape.Rectangle;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import org.locationtech.spatial4j.shape.ShapeCollection;
|
||||
|
||||
/**
|
||||
* Geo3d implementation of {@link BinaryCodec}
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class Geo3dBinaryCodec extends BinaryCodec {
|
||||
|
||||
private PlanetModel planetModel;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public Geo3dBinaryCodec(SpatialContext ctx, SpatialContextFactory factory) {
|
||||
super(ctx, factory);
|
||||
planetModel = ((Geo3dSpatialContextFactory) factory).planetModel;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Shape readShape(DataInput dataInput) throws IOException {
|
||||
SerializableObject serializableObject = SerializableObject.readObject(planetModel, (InputStream) dataInput);
|
||||
if (serializableObject instanceof GeoAreaShape) {
|
||||
GeoAreaShape shape = (GeoAreaShape) serializableObject;
|
||||
return new Geo3dShape<>(shape, ctx);
|
||||
}
|
||||
throw new IllegalArgumentException("trying to read a not supported shape: " + serializableObject.getClass());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeShape(DataOutput dataOutput, Shape s) throws IOException {
|
||||
if (s instanceof Geo3dShape) {
|
||||
Geo3dShape geoAreaShape = (Geo3dShape) s;
|
||||
SerializableObject.writeObject((OutputStream) dataOutput, geoAreaShape.shape);
|
||||
} else {
|
||||
throw new IllegalArgumentException("trying to write a not supported shape: " + s.getClass().getName());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Point readPoint(DataInput dataInput) throws IOException {
|
||||
SerializableObject serializableObject = SerializableObject.readObject(planetModel, (InputStream) dataInput);
|
||||
if (serializableObject instanceof GeoPointShape) {
|
||||
GeoPointShape shape = (GeoPointShape) serializableObject;
|
||||
return new Geo3dPointShape(shape, ctx);
|
||||
}
|
||||
throw new IllegalArgumentException("trying to read a not supported point shape: " + serializableObject.getClass());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writePoint(DataOutput dataOutput, Point pt) throws IOException {
|
||||
writeShape(dataOutput, pt);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Rectangle readRect(DataInput dataInput) throws IOException {
|
||||
SerializableObject serializableObject = SerializableObject.readObject(planetModel, (InputStream) dataInput);
|
||||
if (serializableObject instanceof GeoBBox) {
|
||||
GeoBBox shape = (GeoBBox) serializableObject;
|
||||
return new Geo3dRectangleShape(shape, ctx);
|
||||
}
|
||||
throw new IllegalArgumentException("trying to read a not supported rectangle shape: " + serializableObject.getClass());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeRect(DataOutput dataOutput, Rectangle r) throws IOException {
|
||||
writeShape(dataOutput, r);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Circle readCircle(DataInput dataInput) throws IOException {
|
||||
SerializableObject serializableObject = SerializableObject.readObject(planetModel, (InputStream) dataInput);
|
||||
if (serializableObject instanceof GeoCircle) {
|
||||
GeoCircle shape = (GeoCircle) serializableObject;
|
||||
return new Geo3dCircleShape(shape, ctx);
|
||||
}
|
||||
throw new IllegalArgumentException("trying to read a not supported circle shape: " + serializableObject.getClass());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeCircle(DataOutput dataOutput, Circle c) throws IOException {
|
||||
writeShape(dataOutput, c);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ShapeCollection readCollection(DataInput dataInput) throws IOException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeCollection(DataOutput dataOutput, ShapeCollection col) throws IOException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,82 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.spatial.spatial4j;
|
||||
|
||||
import org.apache.lucene.spatial3d.geom.GeoCircle;
|
||||
import org.apache.lucene.spatial3d.geom.GeoCircleFactory;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPointShapeFactory;
|
||||
import org.apache.lucene.spatial3d.geom.PlanetModel;
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.distance.DistanceUtils;
|
||||
import org.locationtech.spatial4j.shape.Circle;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import org.locationtech.spatial4j.shape.SpatialRelation;
|
||||
|
||||
/**
|
||||
* Specialization of a {@link Geo3dShape} which represents a {@link Circle}.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class Geo3dCircleShape extends Geo3dShape<GeoCircle> implements Circle {
|
||||
|
||||
public Geo3dCircleShape(final GeoCircle shape, final SpatialContext spatialcontext) {
|
||||
super(shape, spatialcontext);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset(double x, double y, double radiusDEG) {
|
||||
shape = GeoCircleFactory.makeGeoCircle(shape.getPlanetModel(),
|
||||
y * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
x * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
radiusDEG * DistanceUtils.DEGREES_TO_RADIANS);
|
||||
center = null;
|
||||
boundingBox = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getRadius() {
|
||||
return shape.getRadius() * DistanceUtils.RADIANS_TO_DEGREES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Point getCenter() {
|
||||
Point center = this.center;//volatile read once
|
||||
if (center == null) {
|
||||
center = new Geo3dPointShape(
|
||||
GeoPointShapeFactory.makeGeoPointShape(shape.getPlanetModel(),
|
||||
shape.getCenter().getLatitude(),
|
||||
shape.getCenter().getLongitude()),
|
||||
spatialcontext);
|
||||
this.center = center;
|
||||
}
|
||||
return center;
|
||||
}
|
||||
|
||||
//TODO Improve GeoCircle to properly relate a point with WGS84 model -- LUCENE-7970
|
||||
@Override
|
||||
public SpatialRelation relate(Shape other) {
|
||||
if (shape.getPlanetModel() != PlanetModel.SPHERE && other instanceof Point) {
|
||||
if (spatialcontext.getDistCalc().distance((Point) other, getCenter()) <= getRadius()) {
|
||||
return SpatialRelation.CONTAINS;
|
||||
}
|
||||
return SpatialRelation.DISJOINT;
|
||||
}
|
||||
return super.relate(other);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,154 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.spatial.spatial4j;
|
||||
|
||||
import org.apache.lucene.spatial3d.geom.GeoPoint;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPointShape;
|
||||
import org.apache.lucene.spatial3d.geom.PlanetModel;
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.distance.DistanceCalculator;
|
||||
import org.locationtech.spatial4j.distance.DistanceUtils;
|
||||
import org.locationtech.spatial4j.shape.Circle;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
import org.locationtech.spatial4j.shape.Rectangle;
|
||||
|
||||
/**
|
||||
* Geo3d implementation of {@link DistanceCalculator}
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class Geo3dDistanceCalculator implements DistanceCalculator {
|
||||
|
||||
protected final PlanetModel planetModel;
|
||||
|
||||
public Geo3dDistanceCalculator(PlanetModel planetModel) {
|
||||
this.planetModel = planetModel;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double distance(Point from, Point to) {
|
||||
if (from instanceof Geo3dPointShape && to instanceof Geo3dPointShape) {
|
||||
GeoPointShape pointShape1 = ((Geo3dPointShape) from).shape;
|
||||
GeoPointShape pointShape2 = ((Geo3dPointShape) to).shape;
|
||||
return planetModel.surfaceDistance(pointShape1.getCenter(), pointShape2.getCenter()) * DistanceUtils.RADIANS_TO_DEGREES;
|
||||
}
|
||||
return distance(from, to.getX(), to.getY());
|
||||
}
|
||||
|
||||
@Override
|
||||
public double distance(Point from, double toX, double toY) {
|
||||
GeoPoint fromGeoPoint;
|
||||
if (from instanceof Geo3dPointShape) {
|
||||
fromGeoPoint = (((Geo3dPointShape) from).shape).getCenter();
|
||||
} else {
|
||||
fromGeoPoint = new GeoPoint(planetModel,
|
||||
from.getY() * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
from.getX() * DistanceUtils.DEGREES_TO_RADIANS);
|
||||
}
|
||||
GeoPoint toGeoPoint = new GeoPoint(planetModel,
|
||||
toY * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
toX * DistanceUtils.DEGREES_TO_RADIANS);
|
||||
return planetModel.surfaceDistance(fromGeoPoint, toGeoPoint) * DistanceUtils.RADIANS_TO_DEGREES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean within(Point from, double toX, double toY, double distance) {
|
||||
return (distance < distance(from, toX, toY));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Point pointOnBearing(Point from, double distDEG, double bearingDEG, SpatialContext ctx, Point reuse) {
|
||||
// Algorithm using Vincenty's formulae (https://en.wikipedia.org/wiki/Vincenty%27s_formulae)
|
||||
// which takes into account that planets may not be spherical.
|
||||
//Code adaptation from http://www.movable-type.co.uk/scripts/latlong-vincenty.html
|
||||
Geo3dPointShape geoFrom = (Geo3dPointShape) from;
|
||||
GeoPoint point = (GeoPoint) geoFrom.shape;
|
||||
double lat = point.getLatitude();
|
||||
double lon = point.getLongitude();
|
||||
double dist = DistanceUtils.DEGREES_TO_RADIANS * distDEG;
|
||||
double bearing = DistanceUtils.DEGREES_TO_RADIANS * bearingDEG;
|
||||
|
||||
double sinα1 = Math.sin(bearing);
|
||||
double cosα1 = Math.cos(bearing);
|
||||
|
||||
double tanU1 = (1 - planetModel.flattening) * Math.tan(lat);
|
||||
double cosU1 = 1 / Math.sqrt((1 + tanU1 * tanU1));
|
||||
double sinU1 = tanU1 * cosU1;
|
||||
|
||||
double σ1 = Math.atan2(tanU1, cosα1);
|
||||
double sinα = cosU1 * sinα1;
|
||||
double cosSqα = 1 - sinα * sinα;
|
||||
double uSq = cosSqα * planetModel.squareRatio;// (planetModel.ab* planetModel.ab - planetModel.c*planetModel.c) / (planetModel.c*planetModel.c);
|
||||
double A = 1 + uSq / 16384 * (4096 + uSq * (-768 + uSq * (320 - 175 * uSq)));
|
||||
double B = uSq / 1024 * (256 + uSq * (-128 + uSq * (74 - 47 * uSq)));
|
||||
|
||||
double cos2σM;
|
||||
double sinσ;
|
||||
double cosσ;
|
||||
double Δσ;
|
||||
|
||||
double σ = dist / (planetModel.c * A);
|
||||
double σʹ;
|
||||
double iterations = 0;
|
||||
do {
|
||||
cos2σM = Math.cos(2 * σ1 + σ);
|
||||
sinσ = Math.sin(σ);
|
||||
cosσ = Math.cos(σ);
|
||||
Δσ = B * sinσ * (cos2σM + B / 4 * (cosσ * (-1 + 2 * cos2σM * cos2σM) -
|
||||
B / 6 * cos2σM * (-3 + 4 * sinσ * sinσ) * (-3 + 4 * cos2σM * cos2σM)));
|
||||
σʹ = σ;
|
||||
σ = dist / (planetModel.c * A) + Δσ;
|
||||
} while (Math.abs(σ - σʹ) > 1e-12 && ++iterations < 200);
|
||||
|
||||
if (iterations >= 200) {
|
||||
throw new RuntimeException("Formula failed to converge");
|
||||
}
|
||||
|
||||
double x = sinU1 * sinσ - cosU1 * cosσ * cosα1;
|
||||
double φ2 = Math.atan2(sinU1 * cosσ + cosU1 * sinσ * cosα1, (1 - planetModel.flattening) * Math.sqrt(sinα * sinα + x * x));
|
||||
double λ = Math.atan2(sinσ * sinα1, cosU1 * cosσ - sinU1 * sinσ * cosα1);
|
||||
double C = planetModel.flattening / 16 * cosSqα * (4 + planetModel.flattening * (4 - 3 * cosSqα));
|
||||
double L = λ - (1 - C) * planetModel.flattening * sinα *
|
||||
(σ + C * sinσ * (cos2σM + C * cosσ * (-1 + 2 * cos2σM * cos2σM)));
|
||||
double λ2 = (lon + L + 3 * Math.PI) % (2 * Math.PI) - Math.PI; // normalise to -180..+180
|
||||
|
||||
return ctx.getShapeFactory().pointXY(λ2 * DistanceUtils.RADIANS_TO_DEGREES,
|
||||
φ2 * DistanceUtils.RADIANS_TO_DEGREES);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Rectangle calcBoxByDistFromPt(Point from, double distDEG, SpatialContext ctx, Rectangle reuse) {
|
||||
Circle circle = ctx.getShapeFactory().circle(from, distDEG);
|
||||
return circle.getBoundingBox();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double calcBoxByDistFromPt_yHorizAxisDEG(Point from, double distDEG, SpatialContext ctx) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double area(Rectangle rect) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double area(Circle circle) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.spatial.spatial4j;
|
||||
|
||||
import org.apache.lucene.spatial3d.geom.GeoPointShape;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPointShapeFactory;
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.distance.DistanceUtils;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
import org.locationtech.spatial4j.shape.Rectangle;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
|
||||
/**
|
||||
* Specialization of a {@link Geo3dShape} which represents a {@link Point}.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class Geo3dPointShape extends Geo3dShape<GeoPointShape> implements Point {
|
||||
|
||||
public Geo3dPointShape(final GeoPointShape shape, final SpatialContext spatialcontext) {
|
||||
super(shape, spatialcontext);
|
||||
center = this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset(double x, double y) {
|
||||
shape = GeoPointShapeFactory.makeGeoPointShape(shape.getPlanetModel(),
|
||||
y * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
x * DistanceUtils.DEGREES_TO_RADIANS);
|
||||
center = this;
|
||||
boundingBox = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getX() {
|
||||
return shape.getCenter().getLongitude() * DistanceUtils.RADIANS_TO_DEGREES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getY() {
|
||||
return shape.getCenter().getLatitude() * DistanceUtils.RADIANS_TO_DEGREES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Rectangle getBoundingBox() {
|
||||
Rectangle bbox = this.boundingBox;//volatile read once
|
||||
if (bbox == null) {
|
||||
bbox = new Geo3dRectangleShape(shape, spatialcontext);
|
||||
this.boundingBox = bbox;
|
||||
}
|
||||
return bbox;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Shape getBuffered(double distance, SpatialContext spatialContext) {
|
||||
return spatialContext.getShapeFactory().circle(getX(), getY(), distance);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasArea() {
|
||||
return false;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,163 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.spatial.spatial4j;
|
||||
|
||||
import org.apache.lucene.spatial3d.geom.GeoBBox;
|
||||
import org.apache.lucene.spatial3d.geom.GeoBBoxFactory;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPoint;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPointShapeFactory;
|
||||
import org.apache.lucene.spatial3d.geom.LatLonBounds;
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.distance.DistanceUtils;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
import org.locationtech.spatial4j.shape.Rectangle;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import org.locationtech.spatial4j.shape.SpatialRelation;
|
||||
|
||||
/**
|
||||
* Specialization of a {@link Geo3dShape} which represents a {@link Rectangle}.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class Geo3dRectangleShape extends Geo3dShape<GeoBBox> implements Rectangle {
|
||||
|
||||
private double minX;
|
||||
private double maxX;
|
||||
private double minY;
|
||||
private double maxY;
|
||||
|
||||
public Geo3dRectangleShape(final GeoBBox shape,
|
||||
final SpatialContext spatialcontext,
|
||||
double minX,
|
||||
double maxX,
|
||||
double minY,
|
||||
double maxY) {
|
||||
super(shape, spatialcontext);
|
||||
this.minX = minX;
|
||||
this.maxX = maxX;
|
||||
this.minY = minY;
|
||||
this.maxY = maxY;
|
||||
}
|
||||
|
||||
public Geo3dRectangleShape(final GeoBBox shape, final SpatialContext spatialcontext) {
|
||||
super(shape, spatialcontext);
|
||||
setBoundsFromshape();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Set the bounds from the wrapped GeoBBox.
|
||||
*/
|
||||
private void setBoundsFromshape() {
|
||||
LatLonBounds bounds = new LatLonBounds();
|
||||
shape.getBounds(bounds);
|
||||
minX = bounds.checkNoLongitudeBound() ? -180.0 : bounds.getLeftLongitude() * DistanceUtils.RADIANS_TO_DEGREES;
|
||||
minY = bounds.checkNoBottomLatitudeBound() ? -90.0 : bounds.getMinLatitude() * DistanceUtils.RADIANS_TO_DEGREES;
|
||||
maxX = bounds.checkNoLongitudeBound() ? 180.0 : bounds.getRightLongitude() * DistanceUtils.RADIANS_TO_DEGREES;
|
||||
maxY = bounds.checkNoTopLatitudeBound() ? 90.0 : bounds.getMaxLatitude() * DistanceUtils.RADIANS_TO_DEGREES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Point getCenter() {
|
||||
Point center = this.center;//volatile read once
|
||||
if (center == null) {
|
||||
GeoPoint point = shape.getCenter();
|
||||
center = new Geo3dPointShape(
|
||||
GeoPointShapeFactory.makeGeoPointShape(shape.getPlanetModel(),
|
||||
point.getLatitude(),
|
||||
point.getLongitude()),
|
||||
spatialcontext);
|
||||
this.center = center;
|
||||
}
|
||||
return center;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset(double minX, double maxX, double minY, double maxY) {
|
||||
shape = GeoBBoxFactory.makeGeoBBox(shape.getPlanetModel(),
|
||||
maxY * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
minY * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
minX * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
maxX * DistanceUtils.DEGREES_TO_RADIANS);
|
||||
center = null;
|
||||
boundingBox = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Rectangle getBoundingBox() {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getWidth() {
|
||||
double result = getMaxX() - getMinX();
|
||||
if (result < 0) {
|
||||
result += 360;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getHeight() {
|
||||
return getMaxY() - getMinY();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getMinX() {
|
||||
return minX;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getMinY() {
|
||||
return minY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getMaxX() {
|
||||
return maxX;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getMaxY() {
|
||||
return maxY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean getCrossesDateLine() {
|
||||
return (getMaxX() > 0 && getMinX() < 0);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public SpatialRelation relateYRange(double minY, double maxY) {
|
||||
Rectangle r = spatialcontext.getShapeFactory().rect(-180, 180, minY, maxY);
|
||||
return relate(r);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SpatialRelation relateXRange(double minX, double maxX) {
|
||||
Rectangle r = spatialcontext.getShapeFactory().rect(minX, maxX, -90, 90);
|
||||
return relate(r);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Shape getBuffered(double distance, SpatialContext spatialContext) {
|
||||
GeoBBox bBox = shape.expand(distance * DistanceUtils.DEGREES_TO_RADIANS);
|
||||
return new Geo3dRectangleShape(bBox, spatialContext);
|
||||
}
|
||||
}
|
|
@ -14,127 +14,110 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.spatial.spatial4j;
|
||||
|
||||
import org.apache.lucene.spatial3d.geom.GeoArea;
|
||||
import org.apache.lucene.spatial3d.geom.GeoAreaFactory;
|
||||
import org.apache.lucene.spatial3d.geom.GeoAreaShape;
|
||||
import org.apache.lucene.spatial3d.geom.GeoBBox;
|
||||
import org.apache.lucene.spatial3d.geom.GeoBBoxFactory;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPoint;
|
||||
import org.apache.lucene.spatial3d.geom.LatLonBounds;
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.distance.DistanceUtils;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
import org.locationtech.spatial4j.shape.Rectangle;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import org.locationtech.spatial4j.shape.SpatialRelation;
|
||||
import org.locationtech.spatial4j.shape.impl.RectangleImpl;
|
||||
import org.apache.lucene.spatial3d.geom.LatLonBounds;
|
||||
import org.apache.lucene.spatial3d.geom.GeoArea;
|
||||
import org.apache.lucene.spatial3d.geom.GeoAreaFactory;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPoint;
|
||||
import org.apache.lucene.spatial3d.geom.GeoShape;
|
||||
import org.apache.lucene.spatial3d.geom.PlanetModel;
|
||||
|
||||
/**
|
||||
* A Spatial4j Shape wrapping a {@link GeoShape} ("Geo3D") -- a 3D planar geometry based Spatial4j Shape implementation.
|
||||
* A Spatial4j Shape wrapping a {@link GeoAreaShape} ("Geo3D") -- a 3D planar geometry
|
||||
* based Spatial4j Shape implementation.
|
||||
* Geo3D implements shapes on the surface of a sphere or ellipsoid.
|
||||
*
|
||||
* @param <T> is the type of {@link GeoAreaShape}
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class Geo3dShape implements Shape {
|
||||
/** The required size of this adjustment depends on the actual planetary model chosen.
|
||||
* This value is big enough to account for WGS84. */
|
||||
protected static final double ROUNDOFF_ADJUSTMENT = 0.05;
|
||||
|
||||
public final SpatialContext ctx;
|
||||
public final GeoShape shape;
|
||||
public final PlanetModel planetModel;
|
||||
public class Geo3dShape<T extends GeoAreaShape> implements Shape {
|
||||
|
||||
private volatile Rectangle boundingBox = null; // lazy initialized
|
||||
protected final SpatialContext spatialcontext;
|
||||
|
||||
public Geo3dShape(final GeoShape shape, final SpatialContext ctx) {
|
||||
this(PlanetModel.SPHERE, shape, ctx);
|
||||
}
|
||||
protected T shape;
|
||||
protected volatile Rectangle boundingBox = null; // lazy initialized
|
||||
protected volatile Point center = null; // lazy initialized
|
||||
|
||||
public Geo3dShape(final PlanetModel planetModel, final GeoShape shape, final SpatialContext ctx) {
|
||||
if (!ctx.isGeo()) {
|
||||
throw new IllegalArgumentException("SpatialContext.isGeo() must be true");
|
||||
}
|
||||
this.ctx = ctx;
|
||||
this.planetModel = planetModel;
|
||||
public Geo3dShape(final T shape, final SpatialContext spatialcontext) {
|
||||
this.spatialcontext = spatialcontext;
|
||||
this.shape = shape;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SpatialContext getContext() {
|
||||
return ctx;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SpatialRelation relate(Shape other) {
|
||||
if (other instanceof Rectangle)
|
||||
return relate((Rectangle)other);
|
||||
else if (other instanceof Point)
|
||||
return relate((Point)other);
|
||||
else
|
||||
int relationship;
|
||||
if (other instanceof Geo3dShape<?>) {
|
||||
relationship = relate((Geo3dShape<?>) other);
|
||||
} else if (other instanceof Rectangle) {
|
||||
relationship = relate((Rectangle) other);
|
||||
} else if (other instanceof Point) {
|
||||
relationship = relate((Point) other);
|
||||
} else {
|
||||
throw new RuntimeException("Unimplemented shape relationship determination: " + other.getClass());
|
||||
}
|
||||
|
||||
switch (relationship) {
|
||||
case GeoArea.DISJOINT:
|
||||
return SpatialRelation.DISJOINT;
|
||||
case GeoArea.OVERLAPS:
|
||||
return (other instanceof Point ? SpatialRelation.CONTAINS : SpatialRelation.INTERSECTS);
|
||||
case GeoArea.CONTAINS:
|
||||
return (other instanceof Point ? SpatialRelation.CONTAINS : SpatialRelation.WITHIN);
|
||||
case GeoArea.WITHIN:
|
||||
return SpatialRelation.CONTAINS;
|
||||
}
|
||||
|
||||
throw new RuntimeException("Undetermined shape relationship: " + relationship);
|
||||
}
|
||||
|
||||
protected SpatialRelation relate(Rectangle r) {
|
||||
private int relate(Geo3dShape<?> s) {
|
||||
return shape.getRelationship(s.shape);
|
||||
}
|
||||
|
||||
private int relate(Rectangle r) {
|
||||
// Construct the right kind of GeoArea first
|
||||
GeoArea geoArea = GeoAreaFactory.makeGeoArea(planetModel,
|
||||
GeoArea geoArea = GeoAreaFactory.makeGeoArea(shape.getPlanetModel(),
|
||||
r.getMaxY() * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
r.getMinY() * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
r.getMinX() * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
r.getMaxX() * DistanceUtils.DEGREES_TO_RADIANS);
|
||||
int relationship = geoArea.getRelationship(shape);
|
||||
if (relationship == GeoArea.WITHIN)
|
||||
return SpatialRelation.WITHIN;
|
||||
else if (relationship == GeoArea.CONTAINS)
|
||||
return SpatialRelation.CONTAINS;
|
||||
else if (relationship == GeoArea.OVERLAPS)
|
||||
return SpatialRelation.INTERSECTS;
|
||||
else if (relationship == GeoArea.DISJOINT)
|
||||
return SpatialRelation.DISJOINT;
|
||||
else
|
||||
throw new RuntimeException("Unknown relationship returned: "+relationship);
|
||||
|
||||
return geoArea.getRelationship(shape);
|
||||
}
|
||||
|
||||
protected SpatialRelation relate(Point p) {
|
||||
// Create a GeoPoint
|
||||
GeoPoint point = new GeoPoint(planetModel, p.getY()* DistanceUtils.DEGREES_TO_RADIANS, p.getX()* DistanceUtils.DEGREES_TO_RADIANS);
|
||||
private int relate(Point p) {
|
||||
GeoPoint point = new GeoPoint(shape.getPlanetModel(),
|
||||
p.getY() * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
p.getX() * DistanceUtils.DEGREES_TO_RADIANS);
|
||||
|
||||
if (shape.isWithin(point)) {
|
||||
// Point within shape
|
||||
return SpatialRelation.CONTAINS;
|
||||
return GeoArea.WITHIN;
|
||||
}
|
||||
return SpatialRelation.DISJOINT;
|
||||
return GeoArea.DISJOINT;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public Rectangle getBoundingBox() {
|
||||
Rectangle bbox = this.boundingBox;//volatile read once
|
||||
if (bbox == null) {
|
||||
LatLonBounds bounds = new LatLonBounds();
|
||||
shape.getBounds(bounds);
|
||||
double leftLon;
|
||||
double rightLon;
|
||||
if (bounds.checkNoLongitudeBound()) {
|
||||
leftLon = -180.0;
|
||||
rightLon = 180.0;
|
||||
} else {
|
||||
leftLon = bounds.getLeftLongitude().doubleValue() * DistanceUtils.RADIANS_TO_DEGREES;
|
||||
rightLon = bounds.getRightLongitude().doubleValue() * DistanceUtils.RADIANS_TO_DEGREES;
|
||||
}
|
||||
double minLat;
|
||||
if (bounds.checkNoBottomLatitudeBound()) {
|
||||
minLat = -90.0;
|
||||
} else {
|
||||
minLat = bounds.getMinLatitude().doubleValue() * DistanceUtils.RADIANS_TO_DEGREES;
|
||||
}
|
||||
double maxLat;
|
||||
if (bounds.checkNoTopLatitudeBound()) {
|
||||
maxLat = 90.0;
|
||||
} else {
|
||||
maxLat = bounds.getMaxLatitude().doubleValue() * DistanceUtils.RADIANS_TO_DEGREES;
|
||||
}
|
||||
bbox = new RectangleImpl(leftLon, rightLon, minLat, maxLat, ctx).getBuffered(ROUNDOFF_ADJUSTMENT, ctx);
|
||||
double leftLon = bounds.checkNoLongitudeBound() ? -Math.PI : bounds.getLeftLongitude();
|
||||
double rightLon = bounds.checkNoLongitudeBound() ? Math.PI : bounds.getRightLongitude();
|
||||
double minLat = bounds.checkNoBottomLatitudeBound() ? -Math.PI * 0.5 : bounds.getMinLatitude();
|
||||
double maxLat = bounds.checkNoTopLatitudeBound() ? Math.PI * 0.5 : bounds.getMaxLatitude();
|
||||
GeoBBox geoBBox = GeoBBoxFactory.makeGeoBBox(shape.getPlanetModel(), maxLat, minLat, leftLon, rightLon);
|
||||
bbox = new Geo3dRectangleShape(geoBBox, spatialcontext);
|
||||
this.boundingBox = bbox;
|
||||
}
|
||||
return bbox;
|
||||
|
@ -146,17 +129,22 @@ public class Geo3dShape implements Shape {
|
|||
}
|
||||
|
||||
@Override
|
||||
public double getArea(SpatialContext ctx) {
|
||||
public double getArea(SpatialContext spatialContext) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Point getCenter() {
|
||||
throw new UnsupportedOperationException();
|
||||
Point center = this.center;//volatile read once
|
||||
if (center == null) {
|
||||
center = getBoundingBox().getCenter();
|
||||
this.center = center;
|
||||
}
|
||||
return center;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Shape getBuffered(double distance, SpatialContext ctx) {
|
||||
public Shape getBuffered(double distance, SpatialContext spatialContext) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
|
@ -166,20 +154,20 @@ public class Geo3dShape implements Shape {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Geo3dShape{planetmodel=" + planetModel + ", shape=" + shape + '}';
|
||||
public SpatialContext getContext() {
|
||||
return spatialcontext;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (!(other instanceof Geo3dShape))
|
||||
public boolean equals(Object o) {
|
||||
if (!(o instanceof Geo3dShape<?>))
|
||||
return false;
|
||||
Geo3dShape tr = (Geo3dShape)other;
|
||||
return tr.ctx.equals(ctx) && tr.planetModel.equals(planetModel) && tr.shape.equals(shape);
|
||||
final Geo3dShape<?> other = (Geo3dShape<?>) o;
|
||||
return (other.spatialcontext.equals(spatialcontext) && other.shape.equals(shape));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return planetModel.hashCode() + shape.hashCode();
|
||||
return spatialcontext.hashCode() + shape.hashCode();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,396 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.spatial.spatial4j;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.spatial3d.geom.GeoBBox;
|
||||
import org.apache.lucene.spatial3d.geom.GeoBBoxFactory;
|
||||
import org.apache.lucene.spatial3d.geom.GeoCircle;
|
||||
import org.apache.lucene.spatial3d.geom.GeoCircleFactory;
|
||||
import org.apache.lucene.spatial3d.geom.GeoCompositeAreaShape;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPath;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPathFactory;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPoint;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPointShape;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPointShapeFactory;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPolygon;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPolygonFactory;
|
||||
import org.apache.lucene.spatial3d.geom.PlanetModel;
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.context.SpatialContextFactory;
|
||||
import org.locationtech.spatial4j.distance.DistanceUtils;
|
||||
import org.locationtech.spatial4j.exception.InvalidShapeException;
|
||||
import org.locationtech.spatial4j.shape.Circle;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
import org.locationtech.spatial4j.shape.Rectangle;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import org.locationtech.spatial4j.shape.ShapeCollection;
|
||||
import org.locationtech.spatial4j.shape.ShapeFactory;
|
||||
|
||||
/**
|
||||
* Geo3d implementation of {@link ShapeFactory}
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class Geo3dShapeFactory implements ShapeFactory {
|
||||
|
||||
private final boolean normWrapLongitude;
|
||||
private SpatialContext context;
|
||||
private PlanetModel planetModel;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public Geo3dShapeFactory(SpatialContext context, SpatialContextFactory factory) {
|
||||
this.context = context;
|
||||
this.planetModel = ((Geo3dSpatialContextFactory) factory).planetModel;
|
||||
this.normWrapLongitude = context.isGeo() && factory.normWrapLongitude;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SpatialContext getSpatialContext() {
|
||||
return context;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isNormWrapLongitude() {
|
||||
return normWrapLongitude;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double normX(double x) {
|
||||
if (this.normWrapLongitude) {
|
||||
x = DistanceUtils.normLonDEG(x);
|
||||
}
|
||||
return x;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double normY(double y) {
|
||||
return y;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double normZ(double z) {
|
||||
return z;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double normDist(double distance) {
|
||||
return distance;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void verifyX(double x) {
|
||||
Rectangle bounds = this.context.getWorldBounds();
|
||||
if (x < bounds.getMinX() || x > bounds.getMaxX()) {
|
||||
throw new InvalidShapeException("Bad X value " + x + " is not in boundary " + bounds);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void verifyY(double y) {
|
||||
Rectangle bounds = this.context.getWorldBounds();
|
||||
if (y < bounds.getMinY() || y > bounds.getMaxY()) {
|
||||
throw new InvalidShapeException("Bad Y value " + y + " is not in boundary " + bounds);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void verifyZ(double v) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Point pointXY(double x, double y) {
|
||||
GeoPointShape point = GeoPointShapeFactory.makeGeoPointShape(planetModel,
|
||||
y * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
x * DistanceUtils.DEGREES_TO_RADIANS);
|
||||
return new Geo3dPointShape(point, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Point pointXYZ(double x, double y, double z) {
|
||||
GeoPoint point = new GeoPoint(x, y, z);
|
||||
GeoPointShape pointShape = GeoPointShapeFactory.makeGeoPointShape(planetModel,
|
||||
point.getLatitude(),
|
||||
point.getLongitude());
|
||||
return new Geo3dPointShape(pointShape, context);
|
||||
//throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Rectangle rect(Point point, Point point1) {
|
||||
return rect(point.getX(), point1.getX(), point.getY(), point1.getY());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Rectangle rect(double minX, double maxX, double minY, double maxY) {
|
||||
GeoBBox bBox = GeoBBoxFactory.makeGeoBBox(planetModel,
|
||||
maxY * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
minY * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
minX * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
maxX * DistanceUtils.DEGREES_TO_RADIANS);
|
||||
return new Geo3dRectangleShape(bBox, context, minX, maxX, minY, maxY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Circle circle(double x, double y, double distance) {
|
||||
GeoCircle circle = GeoCircleFactory.makeGeoCircle(planetModel,
|
||||
y * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
x * DistanceUtils.DEGREES_TO_RADIANS,
|
||||
distance * DistanceUtils.DEGREES_TO_RADIANS);
|
||||
return new Geo3dCircleShape(circle, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Circle circle(Point point, double distance) {
|
||||
return circle(point.getX(), point.getY(), distance);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Shape lineString(List<Point> list, double distance) {
|
||||
LineStringBuilder builder = lineString();
|
||||
for (Point point : list) {
|
||||
builder.pointXY(point.getX(), point.getY());
|
||||
}
|
||||
builder.buffer(distance);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <S extends Shape> ShapeCollection<S> multiShape(List<S> list) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public LineStringBuilder lineString() {
|
||||
return new Geo3dLineStringBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public PolygonBuilder polygon() {
|
||||
return new Geo3dPolygonBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T extends Shape> MultiShapeBuilder<T> multiShape(Class<T> aClass) {
|
||||
return new Geo3dMultiShapeBuilder<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MultiPointBuilder multiPoint() {
|
||||
return new Geo3dMultiPointBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MultiLineStringBuilder multiLineString() {
|
||||
return new Geo3dMultiLineBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MultiPolygonBuilder multiPolygon() {
|
||||
return new Geo3dMultiPolygonBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* Geo3d implementation of {@link org.locationtech.spatial4j.shape.ShapeFactory.PointsBuilder} interface to
|
||||
* generate {@link GeoPoint}.
|
||||
*
|
||||
* @param <T> is normally this object
|
||||
*/
|
||||
private class Geo3dPointBuilder<T> implements PointsBuilder<T> {
|
||||
|
||||
List<GeoPoint> points = new ArrayList<>();
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public T pointXY(double x, double y) {
|
||||
GeoPoint point = new GeoPoint(planetModel, y * DistanceUtils.DEGREES_TO_RADIANS, x * DistanceUtils.DEGREES_TO_RADIANS);
|
||||
points.add(point);
|
||||
return (T) this;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public T pointXYZ(double x, double y, double z) {
|
||||
GeoPoint point = new GeoPoint(x, y, z);
|
||||
if (!points.contains(point)) {
|
||||
points.add(point);
|
||||
}
|
||||
return (T) this;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Geo3d implementation of {@link org.locationtech.spatial4j.shape.ShapeFactory.LineStringBuilder} to generate
|
||||
* nine Strings. Note that GeoPath needs a buffer so we set the
|
||||
* buffer to 1e-10.
|
||||
*/
|
||||
private class Geo3dLineStringBuilder extends Geo3dPointBuilder<LineStringBuilder> implements LineStringBuilder {
|
||||
|
||||
double distance = 0;
|
||||
|
||||
@Override
|
||||
public LineStringBuilder buffer(double distance) {
|
||||
this.distance = distance;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Shape build() {
|
||||
GeoPath path = GeoPathFactory.makeGeoPath(planetModel, distance, points.toArray(new GeoPoint[points.size()]));
|
||||
return new Geo3dShape<>(path, context);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Geo3d implementation of {@link org.locationtech.spatial4j.shape.ShapeFactory.PolygonBuilder} to generate
|
||||
* polygons.
|
||||
*/
|
||||
private class Geo3dPolygonBuilder extends Geo3dPointBuilder<PolygonBuilder> implements PolygonBuilder {
|
||||
|
||||
List<GeoPolygon> polyHoles;
|
||||
|
||||
@Override
|
||||
public HoleBuilder hole() {
|
||||
return new Geo3dHoleBuilder();
|
||||
}
|
||||
|
||||
class Geo3dHoleBuilder extends Geo3dPointBuilder<PolygonBuilder.HoleBuilder> implements PolygonBuilder.HoleBuilder {
|
||||
@Override
|
||||
public PolygonBuilder endHole() {
|
||||
if (polyHoles == null) {
|
||||
polyHoles = new ArrayList<>();
|
||||
}
|
||||
polyHoles.add(GeoPolygonFactory.makeGeoPolygon(planetModel, points));
|
||||
return Geo3dPolygonBuilder.this;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Shape build() {
|
||||
GeoPolygon polygon = GeoPolygonFactory.makeGeoPolygon(planetModel, points, polyHoles);
|
||||
return new Geo3dShape<>(polygon, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Shape buildOrRect() {
|
||||
return build();
|
||||
}
|
||||
}
|
||||
|
||||
private class Geo3dMultiPointBuilder extends Geo3dPointBuilder<MultiPointBuilder> implements MultiPointBuilder {
|
||||
|
||||
@Override
|
||||
public Shape build() {
|
||||
GeoCompositeAreaShape areaShape = new GeoCompositeAreaShape(planetModel);
|
||||
for (GeoPoint point : points) {
|
||||
GeoPointShape pointShape = GeoPointShapeFactory.makeGeoPointShape(planetModel, point.getLatitude(), point.getLongitude());
|
||||
areaShape.addShape(pointShape);
|
||||
}
|
||||
return new Geo3dShape<>(areaShape, context);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Geo3d implementation of {@link org.locationtech.spatial4j.shape.ShapeFactory.MultiLineStringBuilder} to generate
|
||||
* multi-lines
|
||||
*/
|
||||
private class Geo3dMultiLineBuilder implements MultiLineStringBuilder {
|
||||
|
||||
List<LineStringBuilder> builders = new ArrayList<>();
|
||||
|
||||
@Override
|
||||
public LineStringBuilder lineString() {
|
||||
return new Geo3dLineStringBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MultiLineStringBuilder add(LineStringBuilder lineStringBuilder) {
|
||||
builders.add(lineStringBuilder);
|
||||
return this;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Shape build() {
|
||||
GeoCompositeAreaShape areaShape = new GeoCompositeAreaShape(planetModel);
|
||||
for (LineStringBuilder builder : builders) {
|
||||
Geo3dShape<GeoPolygon> shape = (Geo3dShape<GeoPolygon>) builder.build();
|
||||
areaShape.addShape(shape.shape);
|
||||
}
|
||||
return new Geo3dShape<>(areaShape, context);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Geo3d implementation of {@link org.locationtech.spatial4j.shape.ShapeFactory.MultiPolygonBuilder} to generate
|
||||
* multi-polygons. We have chosen to use a composite shape but
|
||||
* it might be possible to use GeoComplexPolygon.
|
||||
*/
|
||||
private class Geo3dMultiPolygonBuilder implements MultiPolygonBuilder {
|
||||
|
||||
List<PolygonBuilder> builders = new ArrayList<>();
|
||||
|
||||
@Override
|
||||
public PolygonBuilder polygon() {
|
||||
return new Geo3dPolygonBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MultiPolygonBuilder add(PolygonBuilder polygonBuilder) {
|
||||
builders.add(polygonBuilder);
|
||||
return this;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Shape build() {
|
||||
GeoCompositeAreaShape areaShape = new GeoCompositeAreaShape(planetModel);
|
||||
for (PolygonBuilder builder : builders) {
|
||||
Geo3dShape<GeoPolygon> shape = (Geo3dShape<GeoPolygon>) builder.build();
|
||||
areaShape.addShape(shape.shape);
|
||||
}
|
||||
return new Geo3dShape<>(areaShape, context);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Geo3d implementation of {@link org.locationtech.spatial4j.shape.ShapeFactory.MultiShapeBuilder} to generate
|
||||
* geometry collections
|
||||
*
|
||||
* @param <T> is the type of shapes.
|
||||
*/
|
||||
private class Geo3dMultiShapeBuilder<T extends Shape> implements MultiShapeBuilder<T> {
|
||||
|
||||
GeoCompositeAreaShape composite = new GeoCompositeAreaShape(planetModel);
|
||||
|
||||
@Override
|
||||
public MultiShapeBuilder<T> add(T shape) {
|
||||
Geo3dShape<?> areaShape = (Geo3dShape<?>) shape;
|
||||
composite.addShape(areaShape.shape);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Shape build() {
|
||||
return new Geo3dShape<>(composite, context);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,94 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.spatial.spatial4j;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.lucene.spatial3d.geom.PlanetModel;
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.context.SpatialContextFactory;
|
||||
|
||||
/**
|
||||
* Geo3d implementation of {@link SpatialContextFactory}
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class Geo3dSpatialContextFactory extends SpatialContextFactory {
|
||||
|
||||
/**
|
||||
* The default planet model
|
||||
*/
|
||||
private static final PlanetModel DEFAULT_PLANET_MODEL = PlanetModel.SPHERE;
|
||||
|
||||
/**
|
||||
* The planet model
|
||||
*/
|
||||
public PlanetModel planetModel;
|
||||
|
||||
/**
|
||||
* Empty Constructor.
|
||||
*/
|
||||
public Geo3dSpatialContextFactory() {
|
||||
this.binaryCodecClass = Geo3dBinaryCodec.class;
|
||||
this.shapeFactoryClass = Geo3dShapeFactory.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SpatialContext newSpatialContext() {
|
||||
if (planetModel == null) {
|
||||
planetModel = DEFAULT_PLANET_MODEL;
|
||||
}
|
||||
if (distCalc == null) {
|
||||
this.distCalc = new Geo3dDistanceCalculator(planetModel);
|
||||
}
|
||||
return new SpatialContext(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void init(Map<String, String> args, ClassLoader classLoader) {
|
||||
initPlanetModel(args);
|
||||
super.init(args, classLoader);
|
||||
}
|
||||
|
||||
protected void initPlanetModel(Map<String, String> args) {
|
||||
String planetModel = args.get("planetModel");
|
||||
if (planetModel != null) {
|
||||
if (planetModel.equalsIgnoreCase("sphere")) {
|
||||
this.planetModel = PlanetModel.SPHERE;
|
||||
} else if (planetModel.equalsIgnoreCase("wgs84")) {
|
||||
this.planetModel = PlanetModel.WGS84;
|
||||
} else {
|
||||
throw new RuntimeException("Unknown planet model: " + planetModel);
|
||||
}
|
||||
} else {
|
||||
this.planetModel = DEFAULT_PLANET_MODEL;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void initCalculator() {
|
||||
String calcStr = this.args.get("distCalculator");
|
||||
if (calcStr == null) {
|
||||
return;
|
||||
} else if (calcStr.equals("geo3d")) {
|
||||
this.distCalc = new Geo3dDistanceCalculator(planetModel);
|
||||
} else {
|
||||
super.initCalculator(); // some other distance calculator
|
||||
}
|
||||
}
|
||||
}
|
|
@ -16,18 +16,18 @@
|
|||
*/
|
||||
package org.apache.lucene.spatial;
|
||||
|
||||
import org.apache.lucene.spatial.query.SpatialArgs;
|
||||
import org.apache.lucene.spatial.spatial4j.Geo3dSpatialContextFactory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.junit.Test;
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import org.apache.lucene.spatial.query.SpatialArgs;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class SpatialArgsTest {
|
||||
public class SpatialArgsTest extends LuceneTestCase {
|
||||
|
||||
@Test
|
||||
public void calcDistanceFromErrPct() {
|
||||
final SpatialContext ctx = SpatialContext.GEO;
|
||||
final SpatialContext ctx = usually() ? SpatialContext.GEO : new Geo3dSpatialContextFactory().newSpatialContext();
|
||||
final double DEP = 0.5;//distErrPct
|
||||
|
||||
//the result is the diagonal distance from the center to the closest corner,
|
||||
|
|
|
@ -28,16 +28,15 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
|
|||
import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
|
||||
import org.apache.lucene.spatial.query.SpatialOperation;
|
||||
import org.apache.lucene.spatial.serialized.SerializedDVStrategy;
|
||||
import org.apache.lucene.spatial3d.geom.GeoBBoxFactory;
|
||||
import org.apache.lucene.spatial3d.geom.GeoCircleFactory;
|
||||
import org.apache.lucene.spatial3d.geom.GeoAreaShape;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPath;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPathFactory;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPoint;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPolygonFactory;
|
||||
import org.apache.lucene.spatial3d.geom.GeoShape;
|
||||
import org.apache.lucene.spatial3d.geom.PlanetModel;
|
||||
import org.apache.lucene.spatial3d.geom.RandomGeo3dShapeGenerator;
|
||||
import org.junit.Test;
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
import org.locationtech.spatial4j.shape.Rectangle;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
|
||||
|
@ -45,11 +44,10 @@ import static org.locationtech.spatial4j.distance.DistanceUtils.DEGREES_TO_RADIA
|
|||
|
||||
public class Geo3dRptTest extends RandomSpatialOpStrategyTestCase {
|
||||
|
||||
private PlanetModel planetModel;
|
||||
private RandomGeo3dShapeGenerator shapeGenerator;
|
||||
private SpatialPrefixTree grid;
|
||||
private RecursivePrefixTreeStrategy rptStrategy;
|
||||
{
|
||||
this.ctx = SpatialContext.GEO;
|
||||
}
|
||||
|
||||
private void setupGeohashGrid() {
|
||||
this.grid = new GeohashPrefixTree(ctx, 2);//A fairly shallow grid
|
||||
|
@ -64,7 +62,12 @@ public class Geo3dRptTest extends RandomSpatialOpStrategyTestCase {
|
|||
}
|
||||
|
||||
private void setupStrategy() {
|
||||
//setup
|
||||
shapeGenerator = new RandomGeo3dShapeGenerator();
|
||||
planetModel = shapeGenerator.randomPlanetModel();
|
||||
Geo3dSpatialContextFactory factory = new Geo3dSpatialContextFactory();
|
||||
factory.planetModel = planetModel;
|
||||
ctx = factory.newSpatialContext();
|
||||
|
||||
setupGeohashGrid();
|
||||
|
||||
SerializedDVStrategy serializedDVStrategy = new SerializedDVStrategy(ctx, getClass().getSimpleName() + "_sdv");
|
||||
|
@ -76,12 +79,12 @@ public class Geo3dRptTest extends RandomSpatialOpStrategyTestCase {
|
|||
public void testFailure1() throws IOException {
|
||||
setupStrategy();
|
||||
final List<GeoPoint> points = new ArrayList<GeoPoint>();
|
||||
points.add(new GeoPoint(PlanetModel.SPHERE, 18 * DEGREES_TO_RADIANS, -27 * DEGREES_TO_RADIANS));
|
||||
points.add(new GeoPoint(PlanetModel.SPHERE, -57 * DEGREES_TO_RADIANS, 146 * DEGREES_TO_RADIANS));
|
||||
points.add(new GeoPoint(PlanetModel.SPHERE, 14 * DEGREES_TO_RADIANS, -180 * DEGREES_TO_RADIANS));
|
||||
points.add(new GeoPoint(PlanetModel.SPHERE, -15 * DEGREES_TO_RADIANS, 153 * DEGREES_TO_RADIANS));
|
||||
points.add(new GeoPoint(planetModel, 18 * DEGREES_TO_RADIANS, -27 * DEGREES_TO_RADIANS));
|
||||
points.add(new GeoPoint(planetModel, -57 * DEGREES_TO_RADIANS, 146 * DEGREES_TO_RADIANS));
|
||||
points.add(new GeoPoint(planetModel, 14 * DEGREES_TO_RADIANS, -180 * DEGREES_TO_RADIANS));
|
||||
points.add(new GeoPoint(planetModel, -15 * DEGREES_TO_RADIANS, 153 * DEGREES_TO_RADIANS));
|
||||
|
||||
final Shape triangle = new Geo3dShape(GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points),ctx);
|
||||
final Shape triangle = new Geo3dShape(GeoPolygonFactory.makeGeoPolygon(planetModel, points),ctx);
|
||||
final Rectangle rect = ctx.makeRectangle(-49, -45, 73, 86);
|
||||
testOperation(rect,SpatialOperation.Intersects,triangle, false);
|
||||
}
|
||||
|
@ -91,16 +94,16 @@ public class Geo3dRptTest extends RandomSpatialOpStrategyTestCase {
|
|||
setupStrategy();
|
||||
|
||||
final List<GeoPoint> points = new ArrayList<>();
|
||||
points.add(new GeoPoint(PlanetModel.SPHERE, 18 * DEGREES_TO_RADIANS, -27 * DEGREES_TO_RADIANS));
|
||||
points.add(new GeoPoint(PlanetModel.SPHERE, -57 * DEGREES_TO_RADIANS, 146 * DEGREES_TO_RADIANS));
|
||||
points.add(new GeoPoint(PlanetModel.SPHERE, 14 * DEGREES_TO_RADIANS, -180 * DEGREES_TO_RADIANS));
|
||||
points.add(new GeoPoint(PlanetModel.SPHERE, -15 * DEGREES_TO_RADIANS, 153 * DEGREES_TO_RADIANS));
|
||||
points.add(new GeoPoint(planetModel, 18 * DEGREES_TO_RADIANS, -27 * DEGREES_TO_RADIANS));
|
||||
points.add(new GeoPoint(planetModel, -57 * DEGREES_TO_RADIANS, 146 * DEGREES_TO_RADIANS));
|
||||
points.add(new GeoPoint(planetModel, 14 * DEGREES_TO_RADIANS, -180 * DEGREES_TO_RADIANS));
|
||||
points.add(new GeoPoint(planetModel, -15 * DEGREES_TO_RADIANS, 153 * DEGREES_TO_RADIANS));
|
||||
final GeoPoint[] pathPoints = new GeoPoint[] {
|
||||
new GeoPoint(PlanetModel.SPHERE, 55.0 * DEGREES_TO_RADIANS, -26.0 * DEGREES_TO_RADIANS),
|
||||
new GeoPoint(PlanetModel.SPHERE, -90.0 * DEGREES_TO_RADIANS, 0.0),
|
||||
new GeoPoint(PlanetModel.SPHERE, 54.0 * DEGREES_TO_RADIANS, 165.0 * DEGREES_TO_RADIANS),
|
||||
new GeoPoint(PlanetModel.SPHERE, -90.0 * DEGREES_TO_RADIANS, 0.0)};
|
||||
final GeoShape path = GeoPathFactory.makeGeoPath(PlanetModel.SPHERE, 29 * DEGREES_TO_RADIANS, pathPoints);
|
||||
new GeoPoint(planetModel, 55.0 * DEGREES_TO_RADIANS, -26.0 * DEGREES_TO_RADIANS),
|
||||
new GeoPoint(planetModel, -90.0 * DEGREES_TO_RADIANS, 0.0),
|
||||
new GeoPoint(planetModel, 54.0 * DEGREES_TO_RADIANS, 165.0 * DEGREES_TO_RADIANS),
|
||||
new GeoPoint(planetModel, -90.0 * DEGREES_TO_RADIANS, 0.0)};
|
||||
final GeoPath path = GeoPathFactory.makeGeoPath(planetModel, 29 * DEGREES_TO_RADIANS, pathPoints);
|
||||
final Shape shape = new Geo3dShape(path,ctx);
|
||||
final Rectangle rect = ctx.makeRectangle(131, 143, 39, 54);
|
||||
testOperation(rect,SpatialOperation.Intersects,shape,true);
|
||||
|
@ -114,111 +117,57 @@ public class Geo3dRptTest extends RandomSpatialOpStrategyTestCase {
|
|||
testOperationRandomShapes(SpatialOperation.Intersects);
|
||||
}
|
||||
|
||||
private Shape makeTriangle(double x1, double y1, double x2, double y2, double x3, double y3) {
|
||||
final List<GeoPoint> geoPoints = new ArrayList<>();
|
||||
geoPoints.add(new GeoPoint(PlanetModel.SPHERE, y1 * DEGREES_TO_RADIANS, x1 * DEGREES_TO_RADIANS));
|
||||
geoPoints.add(new GeoPoint(PlanetModel.SPHERE, y2 * DEGREES_TO_RADIANS, x2 * DEGREES_TO_RADIANS));
|
||||
geoPoints.add(new GeoPoint(PlanetModel.SPHERE, y3 * DEGREES_TO_RADIANS, x3 * DEGREES_TO_RADIANS));
|
||||
final GeoShape shape = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, geoPoints);
|
||||
return new Geo3dShape(shape, ctx);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Shape randomIndexedShape() {
|
||||
return randomRectangle();
|
||||
int type = shapeGenerator.randomShapeType();
|
||||
GeoAreaShape areaShape = shapeGenerator.randomGeoAreaShape(type, planetModel);
|
||||
return new Geo3dShape<>(areaShape, ctx);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Shape randomQueryShape() {
|
||||
final int shapeType = random().nextInt(4);
|
||||
switch (shapeType) {
|
||||
case 0: {
|
||||
// Polygons
|
||||
final int vertexCount = random().nextInt(3) + 3;
|
||||
while (true) {
|
||||
final List<GeoPoint> geoPoints = new ArrayList<>();
|
||||
while (geoPoints.size() < vertexCount) {
|
||||
final Point point = randomPoint();
|
||||
final GeoPoint gPt = new GeoPoint(PlanetModel.SPHERE, point.getY() * DEGREES_TO_RADIANS, point.getX() * DEGREES_TO_RADIANS);
|
||||
geoPoints.add(gPt);
|
||||
}
|
||||
final int convexPointIndex = random().nextInt(vertexCount); //If we get this wrong, hopefully we get IllegalArgumentException
|
||||
try {
|
||||
final GeoShape shape = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, geoPoints);
|
||||
if (shape == null) {
|
||||
continue;
|
||||
}
|
||||
return new Geo3dShape(shape, ctx);
|
||||
} catch (IllegalArgumentException e) {
|
||||
// This is what happens when we create a shape that is invalid. Although it is conceivable that there are cases where
|
||||
// the exception is thrown incorrectly, we aren't going to be able to do that in this random test.
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
case 1: {
|
||||
// Circles
|
||||
while (true) {
|
||||
final int circleRadius = random().nextInt(179) + 1;
|
||||
final Point point = randomPoint();
|
||||
try {
|
||||
final GeoShape shape = GeoCircleFactory.makeGeoCircle(PlanetModel.SPHERE, point.getY() * DEGREES_TO_RADIANS, point.getX() * DEGREES_TO_RADIANS,
|
||||
circleRadius * DEGREES_TO_RADIANS);
|
||||
return new Geo3dShape(shape, ctx);
|
||||
} catch (IllegalArgumentException e) {
|
||||
// This is what happens when we create a shape that is invalid. Although it is conceivable that there are cases where
|
||||
// the exception is thrown incorrectly, we aren't going to be able to do that in this random test.
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
case 2: {
|
||||
// Rectangles
|
||||
while (true) {
|
||||
Point ulhcPoint = randomPoint();
|
||||
Point lrhcPoint = randomPoint();
|
||||
if (ulhcPoint.getY() < lrhcPoint.getY()) {
|
||||
//swap
|
||||
Point temp = ulhcPoint;
|
||||
ulhcPoint = lrhcPoint;
|
||||
lrhcPoint = temp;
|
||||
}
|
||||
try {
|
||||
final GeoShape shape = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, ulhcPoint.getY() * DEGREES_TO_RADIANS,
|
||||
lrhcPoint.getY() * DEGREES_TO_RADIANS,
|
||||
ulhcPoint.getX() * DEGREES_TO_RADIANS,
|
||||
lrhcPoint.getX() * DEGREES_TO_RADIANS);
|
||||
//System.err.println("Trial rectangle shape: "+shape);
|
||||
return new Geo3dShape(shape, ctx);
|
||||
} catch (IllegalArgumentException e) {
|
||||
// This is what happens when we create a shape that is invalid. Although it is conceivable that there are cases where
|
||||
// the exception is thrown incorrectly, we aren't going to be able to do that in this random test.
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
case 3: {
|
||||
// Paths
|
||||
final int pointCount = random().nextInt(5) + 1;
|
||||
final double width = (random().nextInt(89)+1) * DEGREES_TO_RADIANS;
|
||||
final GeoPoint[] points = new GeoPoint[pointCount];
|
||||
while (true) {
|
||||
for (int i = 0; i < pointCount; i++) {
|
||||
final Point nextPoint = randomPoint();
|
||||
points[i] = new GeoPoint(PlanetModel.SPHERE, nextPoint.getY() * DEGREES_TO_RADIANS, nextPoint.getX() * DEGREES_TO_RADIANS);
|
||||
}
|
||||
try {
|
||||
final GeoShape path = GeoPathFactory.makeGeoPath(PlanetModel.SPHERE, width, points);
|
||||
return new Geo3dShape(path, ctx);
|
||||
} catch (IllegalArgumentException e) {
|
||||
// This is what happens when we create a shape that is invalid. Although it is conceivable that there are cases where
|
||||
// the exception is thrown incorrectly, we aren't going to be able to do that in this random test.
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
default:
|
||||
throw new IllegalStateException("Unexpected shape type");
|
||||
}
|
||||
int type = shapeGenerator.randomShapeType();
|
||||
GeoAreaShape areaShape = shapeGenerator.randomGeoAreaShape(type, planetModel);
|
||||
return new Geo3dShape<>(areaShape, ctx);
|
||||
}
|
||||
|
||||
//TODO move to a new test class?
|
||||
@Test
|
||||
public void testWKT() throws Exception {
|
||||
Geo3dSpatialContextFactory factory = new Geo3dSpatialContextFactory();
|
||||
SpatialContext ctx = factory.newSpatialContext();
|
||||
String wkt = "POLYGON ((20.0 -60.4, 20.1 -60.4, 20.1 -60.3, 20.0 -60.3,20.0 -60.4))";
|
||||
Shape s = ctx.getFormats().getWktReader().read(wkt);
|
||||
assertTrue(s instanceof Geo3dShape<?>);
|
||||
wkt = "POINT (30 10)";
|
||||
s = ctx.getFormats().getWktReader().read(wkt);
|
||||
assertTrue(s instanceof Geo3dShape<?>);
|
||||
wkt = "LINESTRING (30 10, 10 30, 40 40)";
|
||||
s = ctx.getFormats().getWktReader().read(wkt);
|
||||
assertTrue(s instanceof Geo3dShape<?>);
|
||||
wkt = "POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10), (20 30, 35 35, 30 20, 20 30))";
|
||||
s = ctx.getFormats().getWktReader().read(wkt);
|
||||
assertTrue(s instanceof Geo3dShape<?>);
|
||||
wkt = "MULTIPOINT ((10 40), (40 30), (20 20), (30 10))";
|
||||
s = ctx.getFormats().getWktReader().read(wkt);
|
||||
assertTrue(s instanceof Geo3dShape<?>);
|
||||
wkt = "MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))";
|
||||
s = ctx.getFormats().getWktReader().read(wkt);
|
||||
assertTrue(s instanceof Geo3dShape<?>);
|
||||
wkt = "MULTIPOLYGON (((40 40, 20 45, 45 30, 40 40)), ((20 35, 10 30, 10 10, 30 5, 45 20, 20 35),(30 20, 20 15, 20 25, 30 20)))";
|
||||
s = ctx.getFormats().getWktReader().read(wkt);
|
||||
assertTrue(s instanceof Geo3dShape<?>);
|
||||
wkt = "GEOMETRYCOLLECTION(POINT(4 6),LINESTRING(4 6,7 10))";
|
||||
s = ctx.getFormats().getWktReader().read(wkt);
|
||||
assertTrue(s instanceof Geo3dShape<?>);
|
||||
wkt = "ENVELOPE(1, 2, 4, 3)";
|
||||
s = ctx.getFormats().getWktReader().read(wkt);
|
||||
assertTrue(s instanceof Geo3dShape<?>);
|
||||
wkt = "BUFFER(POINT(-10 30), 5.2)";
|
||||
s = ctx.getFormats().getWktReader().read(wkt);
|
||||
assertTrue(s instanceof Geo3dShape<?>);
|
||||
//wkt = "BUFFER(LINESTRING(1 2, 3 4), 0.5)";
|
||||
//s = ctx.getFormats().getWktReader().read(wkt);
|
||||
//assertTrue(s instanceof Geo3dShape<?>);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,8 @@ package org.apache.lucene.spatial.spatial4j;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.spatial3d.geom.GeoPath;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPolygon;
|
||||
import org.locationtech.spatial4j.TestLog;
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.distance.DistanceUtils;
|
||||
|
@ -119,9 +121,9 @@ public abstract class Geo3dShapeRectRelationTestCase extends RandomizedShapeTest
|
|||
protected Geo3dShape generateRandomShape(Point nearP) {
|
||||
final int circleRadius = 180 - random().nextInt(180);//no 0-radius
|
||||
final Point point = nearP;
|
||||
final GeoShape shape = GeoCircleFactory.makeGeoCircle(planetModel, point.getY() * DEGREES_TO_RADIANS, point.getX() * DEGREES_TO_RADIANS,
|
||||
final GeoCircle shape = GeoCircleFactory.makeGeoCircle(planetModel, point.getY() * DEGREES_TO_RADIANS, point.getX() * DEGREES_TO_RADIANS,
|
||||
circleRadius * DEGREES_TO_RADIANS);
|
||||
return new Geo3dShape(planetModel, shape, ctx);
|
||||
return new Geo3dShape(shape, ctx);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -153,11 +155,11 @@ public abstract class Geo3dShapeRectRelationTestCase extends RandomizedShapeTest
|
|||
ulhcPoint = lrhcPoint;
|
||||
lrhcPoint = temp;
|
||||
}
|
||||
final GeoShape shape = GeoBBoxFactory.makeGeoBBox(planetModel, ulhcPoint.getY() * DEGREES_TO_RADIANS,
|
||||
final GeoBBox shape = GeoBBoxFactory.makeGeoBBox(planetModel, ulhcPoint.getY() * DEGREES_TO_RADIANS,
|
||||
lrhcPoint.getY() * DEGREES_TO_RADIANS,
|
||||
ulhcPoint.getX() * DEGREES_TO_RADIANS,
|
||||
lrhcPoint.getX() * DEGREES_TO_RADIANS);
|
||||
return new Geo3dShape(planetModel, shape, ctx);
|
||||
return new Geo3dShape(shape, ctx);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -185,11 +187,11 @@ public abstract class Geo3dShapeRectRelationTestCase extends RandomizedShapeTest
|
|||
geoPoints.add(gPt);
|
||||
}
|
||||
try {
|
||||
final GeoShape shape = GeoPolygonFactory.makeGeoPolygon(planetModel, geoPoints);
|
||||
final GeoPolygon shape = GeoPolygonFactory.makeGeoPolygon(planetModel, geoPoints);
|
||||
if (shape == null) {
|
||||
continue;
|
||||
}
|
||||
return new Geo3dShape(planetModel, shape, ctx);
|
||||
return new Geo3dShape(shape, ctx);
|
||||
} catch (IllegalArgumentException e) {
|
||||
// This is what happens when we create a shape that is invalid. Although it is conceivable that there are cases where
|
||||
// the exception is thrown incorrectly, we aren't going to be able to do that in this random test.
|
||||
|
@ -231,8 +233,8 @@ public abstract class Geo3dShapeRectRelationTestCase extends RandomizedShapeTest
|
|||
}
|
||||
|
||||
try {
|
||||
final GeoShape path = GeoPathFactory.makeGeoPath(planetModel, width, points);
|
||||
return new Geo3dShape(planetModel, path, ctx);
|
||||
final GeoPath path = GeoPathFactory.makeGeoPath(planetModel, width, points);
|
||||
return new Geo3dShape(path, ctx);
|
||||
} catch (IllegalArgumentException e) {
|
||||
// This is what happens when we create a shape that is invalid. Although it is conceivable that there are cases where
|
||||
// the exception is thrown incorrectly, we aren't going to be able to do that in this random test.
|
||||
|
@ -257,6 +259,6 @@ public abstract class Geo3dShapeRectRelationTestCase extends RandomizedShapeTest
|
|||
|
||||
private Point geoPointToSpatial4jPoint(GeoPoint geoPoint) {
|
||||
return ctx.makePoint(geoPoint.getLongitude() * DistanceUtils.RADIANS_TO_DEGREES,
|
||||
geoPoint.getLongitude() * DistanceUtils.RADIANS_TO_DEGREES);
|
||||
geoPoint.getLatitude() * DistanceUtils.RADIANS_TO_DEGREES);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,21 +19,29 @@ package org.apache.lucene.spatial.spatial4j;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.locationtech.spatial4j.shape.Rectangle;
|
||||
import org.apache.lucene.spatial3d.geom.GeoArea;
|
||||
import org.apache.lucene.spatial3d.geom.GeoBBox;
|
||||
import org.apache.lucene.spatial3d.geom.GeoBBoxFactory;
|
||||
import org.apache.lucene.spatial3d.geom.GeoCircle;
|
||||
import org.apache.lucene.spatial3d.geom.GeoCircleFactory;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPoint;
|
||||
import org.apache.lucene.spatial3d.geom.GeoPolygonFactory;
|
||||
import org.apache.lucene.spatial3d.geom.GeoShape;
|
||||
import org.apache.lucene.spatial3d.geom.PlanetModel;
|
||||
import org.junit.Test;
|
||||
import org.locationtech.spatial4j.shape.Circle;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
import org.locationtech.spatial4j.shape.Rectangle;
|
||||
import org.locationtech.spatial4j.shape.SpatialRelation;
|
||||
|
||||
public class Geo3dShapeSphereModelRectRelationTest extends Geo3dShapeRectRelationTestCase {
|
||||
|
||||
public Geo3dShapeSphereModelRectRelationTest() {
|
||||
super(PlanetModel.SPHERE);
|
||||
Geo3dSpatialContextFactory factory = new Geo3dSpatialContextFactory();
|
||||
factory.planetModel = PlanetModel.SPHERE;
|
||||
//factory.distCalc = new GeodesicSphereDistCalc.Haversine();
|
||||
this.ctx = factory.newSpatialContext();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -60,13 +68,28 @@ public class Geo3dShapeSphereModelRectRelationTest extends Geo3dShapeRectRelatio
|
|||
|
||||
@Test
|
||||
public void testFailure2_LUCENE6475() {
|
||||
GeoShape geo3dCircle = GeoCircleFactory.makeGeoCircle(planetModel, 1.6282053147165243E-4 * RADIANS_PER_DEGREE,
|
||||
GeoCircle geo3dCircle = GeoCircleFactory.makeGeoCircle(planetModel, 1.6282053147165243E-4 * RADIANS_PER_DEGREE,
|
||||
-70.1600629789353 * RADIANS_PER_DEGREE, 86 * RADIANS_PER_DEGREE);
|
||||
Geo3dShape geo3dShape = new Geo3dShape(planetModel, geo3dCircle, ctx);
|
||||
Geo3dShape geo3dShape = new Geo3dShape(geo3dCircle, ctx);
|
||||
Rectangle rect = ctx.makeRectangle(-118, -114, -2.0, 32.0);
|
||||
assertTrue(geo3dShape.relate(rect).intersects());
|
||||
// thus the bounding box must intersect too
|
||||
assertTrue(geo3dShape.getBoundingBox().relate(rect).intersects());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void pointBearingTest(){
|
||||
double radius = 136;
|
||||
double distance = 135.97;
|
||||
double bearing = 188;
|
||||
Point p = ctx.getShapeFactory().pointXY(35, 85);
|
||||
Circle circle = ctx.getShapeFactory().circle(p, radius);
|
||||
Point bPoint = ctx.getDistCalc().pointOnBearing(p, distance, bearing, ctx, (Point) null);
|
||||
|
||||
double d = ctx.getDistCalc().distance(p, bPoint);
|
||||
assertEquals(d, distance, 10-8);
|
||||
|
||||
assertEquals(circle.relate(bPoint), SpatialRelation.CONTAINS);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,11 +26,18 @@ import org.apache.lucene.spatial3d.geom.GeoPath;
|
|||
import org.apache.lucene.spatial3d.geom.GeoPoint;
|
||||
import org.apache.lucene.spatial3d.geom.PlanetModel;
|
||||
import org.junit.Test;
|
||||
import org.locationtech.spatial4j.shape.Circle;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
import org.locationtech.spatial4j.shape.SpatialRelation;
|
||||
|
||||
public class Geo3dShapeWGS84ModelRectRelationTest extends Geo3dShapeRectRelationTestCase {
|
||||
|
||||
public Geo3dShapeWGS84ModelRectRelationTest() {
|
||||
super(PlanetModel.WGS84);
|
||||
Geo3dSpatialContextFactory factory = new Geo3dSpatialContextFactory();
|
||||
factory.planetModel = PlanetModel.WGS84;
|
||||
//factory.distCalc = new GeodesicSphereDistCalc.Haversine();
|
||||
this.ctx = factory.newSpatialContext();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -92,4 +99,18 @@ public class Geo3dShapeWGS84ModelRectRelationTest extends Geo3dShapeRectRelation
|
|||
// (3) The point mentioned is NOT inside the path segment, either. (I think it should be...)
|
||||
}
|
||||
|
||||
@Test
|
||||
public void pointBearingTest(){
|
||||
double radius = 136;
|
||||
double distance = 135.97;
|
||||
double bearing = 188;
|
||||
Point p = ctx.getShapeFactory().pointXY(35, 85);
|
||||
Circle circle = ctx.getShapeFactory().circle(p, radius);
|
||||
Point bPoint = ctx.getDistCalc().pointOnBearing(p, distance, bearing, ctx, (Point) null);
|
||||
|
||||
double d = ctx.getDistCalc().distance(p, bPoint);
|
||||
assertEquals(d, distance, 10-8);
|
||||
|
||||
assertEquals(circle.relate(bPoint), SpatialRelation.CONTAINS);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.spatial3d.geom;
|
||||
|
||||
/**
|
||||
* This interface describes methods that determine what the bounds are
|
||||
* for a shape.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public interface Bounded {
|
||||
|
||||
/**
|
||||
* Compute bounds for the shape.
|
||||
*
|
||||
* @param bounds is the input bounds object.
|
||||
* The input object will be modified.
|
||||
*/
|
||||
public void getBounds(final Bounds bounds);
|
||||
|
||||
}
|
|
@ -40,4 +40,20 @@ public class GeoCircleFactory {
|
|||
return new GeoStandardCircle(planetModel, latitude, longitude, radius);
|
||||
}
|
||||
|
||||
/** Create an exact GeoCircle given specified bounds and desired accuracy.
|
||||
* @param planetModel is the planet model.
|
||||
* @param latitude is the center latitude.
|
||||
* @param longitude is the center longitude.
|
||||
* @param radius is the radius surface distance.
|
||||
* @param accuracy is the maximum linear distance between the circle approximation and the real circle, as computed using
|
||||
* the Vincenty formula.
|
||||
* @return a GeoCircle corresponding to what was specified.
|
||||
*/
|
||||
public static GeoCircle makeExactGeoCircle(final PlanetModel planetModel, final double latitude, final double longitude, final double radius, final double accuracy) {
|
||||
if (radius < Vector.MINIMUM_ANGULAR_RESOLUTION) {
|
||||
return new GeoDegeneratePoint(planetModel, latitude, longitude);
|
||||
}
|
||||
return new GeoExactCircle(planetModel, latitude, longitude, radius, accuracy);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,373 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.spatial3d.geom;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.List;
|
||||
import java.util.HashMap;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
|
||||
/**
|
||||
* Circular area with a center and radius.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
class GeoExactCircle extends GeoBaseCircle {
|
||||
/** Center of circle */
|
||||
protected final GeoPoint center;
|
||||
/** Cutoff angle of circle (not quite the same thing as radius) */
|
||||
protected final double cutoffAngle;
|
||||
/** Actual accuracy */
|
||||
protected final double actualAccuracy;
|
||||
/** Planes describing the circle */
|
||||
protected final List<SidedPlane> circlePlanes;
|
||||
/** Bounds for the planes */
|
||||
protected final Map<SidedPlane, Membership> eitherBounds;
|
||||
/** A point that is on the world and on the circle plane */
|
||||
protected final GeoPoint[] edgePoints;
|
||||
/** The set of notable points for each edge */
|
||||
protected final List<GeoPoint[]> notableEdgePoints;
|
||||
/** Notable points for a circle -- there aren't any */
|
||||
protected static final GeoPoint[] circlePoints = new GeoPoint[0];
|
||||
|
||||
/** Constructor.
|
||||
*@param planetModel is the planet model.
|
||||
*@param lat is the center latitude.
|
||||
*@param lon is the center longitude.
|
||||
*@param cutoffAngle is the surface radius for the circle.
|
||||
*@param accuracy is the allowed error value (linear distance).
|
||||
*/
|
||||
public GeoExactCircle(final PlanetModel planetModel, final double lat, final double lon, final double cutoffAngle, final double accuracy) {
|
||||
super(planetModel);
|
||||
if (lat < -Math.PI * 0.5 || lat > Math.PI * 0.5)
|
||||
throw new IllegalArgumentException("Latitude out of bounds");
|
||||
if (lon < -Math.PI || lon > Math.PI)
|
||||
throw new IllegalArgumentException("Longitude out of bounds");
|
||||
if (cutoffAngle < 0.0)
|
||||
throw new IllegalArgumentException("Cutoff angle out of bounds");
|
||||
if (cutoffAngle < Vector.MINIMUM_RESOLUTION)
|
||||
throw new IllegalArgumentException("Cutoff angle cannot be effectively zero");
|
||||
|
||||
this.center = new GeoPoint(planetModel, lat, lon);
|
||||
this.cutoffAngle = cutoffAngle;
|
||||
|
||||
if (accuracy < Vector.MINIMUM_RESOLUTION) {
|
||||
actualAccuracy = Vector.MINIMUM_RESOLUTION;
|
||||
} else {
|
||||
actualAccuracy = accuracy;
|
||||
}
|
||||
|
||||
// Since the provide cutoff angle is really a surface distance, we need to use the point-on-bearing even for spheres.
|
||||
final List<SidedPlane> circlePlanes = new ArrayList<>();
|
||||
// If it turns out that there's only one circle plane, this array will be populated but unused
|
||||
final List<GeoPoint[]> notableEdgePoints = new ArrayList<>();
|
||||
|
||||
// We construct approximation planes until we have a low enough error estimate
|
||||
final List<ApproximationSlice> slices = new ArrayList<>(100);
|
||||
// Construct four cardinal points, and then we'll build the first two planes
|
||||
final GeoPoint northPoint = planetModel.surfacePointOnBearing(center, cutoffAngle, 0.0);
|
||||
final GeoPoint southPoint = planetModel.surfacePointOnBearing(center, cutoffAngle, Math.PI);
|
||||
final GeoPoint eastPoint = planetModel.surfacePointOnBearing(center, cutoffAngle, Math.PI * 0.5);
|
||||
final GeoPoint westPoint = planetModel.surfacePointOnBearing(center, cutoffAngle, Math.PI * 1.5);
|
||||
|
||||
if (planetModel.c > planetModel.ab) {
|
||||
// z can be greater than x or y, so ellipse is longer in height than width
|
||||
slices.add(new ApproximationSlice(center, eastPoint, Math.PI * 0.5, westPoint, Math.PI * -0.5, northPoint, 0.0));
|
||||
slices.add(new ApproximationSlice(center, westPoint, Math.PI * 1.5, eastPoint, Math.PI * 0.5, southPoint, Math.PI));
|
||||
} else {
|
||||
// z will be less than x or y, so ellipse is shorter than it is tall
|
||||
slices.add(new ApproximationSlice(center, northPoint, Math.PI * 2.0, southPoint, Math.PI, eastPoint, Math.PI * 0.5));
|
||||
slices.add(new ApproximationSlice(center, southPoint, Math.PI, northPoint, 0.0, westPoint, Math.PI * 1.5));
|
||||
}
|
||||
|
||||
// Now, iterate over slices until we have converted all of them into safe SidedPlanes.
|
||||
while (slices.size() > 0) {
|
||||
// Peel off a slice from the back
|
||||
final ApproximationSlice thisSlice = slices.remove(slices.size()-1);
|
||||
// Assess it to see if it is OK as it is, or needs to be split.
|
||||
// To do this, we need to look at the part of the circle that will have the greatest error.
|
||||
// We will need to compute bearing points for these.
|
||||
final double interpPoint1Bearing = (thisSlice.point1Bearing + thisSlice.middlePointBearing) * 0.5;
|
||||
final GeoPoint interpPoint1 = planetModel.surfacePointOnBearing(center, cutoffAngle, interpPoint1Bearing);
|
||||
final double interpPoint2Bearing = (thisSlice.point2Bearing + thisSlice.middlePointBearing) * 0.5;
|
||||
final GeoPoint interpPoint2 = planetModel.surfacePointOnBearing(center, cutoffAngle, interpPoint2Bearing);
|
||||
// Is this point on the plane? (that is, is the approximation good enough?)
|
||||
if (Math.abs(thisSlice.plane.evaluate(interpPoint1)) < actualAccuracy && Math.abs(thisSlice.plane.evaluate(interpPoint2)) < actualAccuracy) {
|
||||
// Good enough; add it to the list of planes, unless it was identical to the previous plane
|
||||
if (circlePlanes.size() == 0 || !circlePlanes.get(circlePlanes.size()-1).isNumericallyIdentical(thisSlice.plane)) {
|
||||
circlePlanes.add(thisSlice.plane);
|
||||
notableEdgePoints.add(new GeoPoint[]{thisSlice.endPoint1, thisSlice.endPoint2});
|
||||
}
|
||||
} else {
|
||||
// Split the plane into two, and add it back to the end
|
||||
slices.add(new ApproximationSlice(center,
|
||||
thisSlice.endPoint1, thisSlice.point1Bearing,
|
||||
thisSlice.middlePoint, thisSlice.middlePointBearing,
|
||||
interpPoint1, interpPoint1Bearing));
|
||||
slices.add(new ApproximationSlice(center,
|
||||
thisSlice.middlePoint, thisSlice.middlePointBearing,
|
||||
thisSlice.endPoint2, thisSlice.point2Bearing,
|
||||
interpPoint2, interpPoint2Bearing));
|
||||
}
|
||||
}
|
||||
|
||||
//System.out.println("Number of planes needed: "+circlePlanes.size());
|
||||
|
||||
this.edgePoints = new GeoPoint[]{northPoint};
|
||||
this.circlePlanes = circlePlanes;
|
||||
// Compute bounds
|
||||
if (circlePlanes.size() == 1) {
|
||||
this.eitherBounds = null;
|
||||
this.notableEdgePoints = null;
|
||||
} else {
|
||||
this.notableEdgePoints = notableEdgePoints;
|
||||
this.eitherBounds = new HashMap<>(circlePlanes.size());
|
||||
for (int i = 0; i < circlePlanes.size(); i++) {
|
||||
final SidedPlane thisPlane = circlePlanes.get(i);
|
||||
final SidedPlane previousPlane = (i == 0)?circlePlanes.get(circlePlanes.size()-1):circlePlanes.get(i-1);
|
||||
final SidedPlane nextPlane = (i == circlePlanes.size()-1)?circlePlanes.get(0):circlePlanes.get(i+1);
|
||||
eitherBounds.put(thisPlane, new EitherBound(previousPlane, nextPlane));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Constructor for deserialization.
|
||||
* @param planetModel is the planet model.
|
||||
* @param inputStream is the input stream.
|
||||
*/
|
||||
public GeoExactCircle(final PlanetModel planetModel, final InputStream inputStream) throws IOException {
|
||||
this(planetModel,
|
||||
SerializableObject.readDouble(inputStream),
|
||||
SerializableObject.readDouble(inputStream),
|
||||
SerializableObject.readDouble(inputStream),
|
||||
SerializableObject.readDouble(inputStream));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(final OutputStream outputStream) throws IOException {
|
||||
SerializableObject.writeDouble(outputStream, center.getLatitude());
|
||||
SerializableObject.writeDouble(outputStream, center.getLongitude());
|
||||
SerializableObject.writeDouble(outputStream, cutoffAngle);
|
||||
SerializableObject.writeDouble(outputStream, actualAccuracy);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double getRadius() {
|
||||
return cutoffAngle;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPoint getCenter() {
|
||||
return center;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected double distance(final DistanceStyle distanceStyle, final double x, final double y, final double z) {
|
||||
return distanceStyle.computeDistance(this.center, x, y, z);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void distanceBounds(final Bounds bounds, final DistanceStyle distanceStyle, final double distanceValue) {
|
||||
// TBD: Compute actual bounds based on distance
|
||||
getBounds(bounds);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected double outsideDistance(final DistanceStyle distanceStyle, final double x, final double y, final double z) {
|
||||
if (circlePlanes == null) {
|
||||
return 0.0;
|
||||
}
|
||||
if (circlePlanes.size() == 1) {
|
||||
return distanceStyle.computeDistance(planetModel, circlePlanes.get(0), x, y, z);
|
||||
}
|
||||
double outsideDistance = Double.POSITIVE_INFINITY;
|
||||
for (final SidedPlane plane : circlePlanes) {
|
||||
final double distance = distanceStyle.computeDistance(planetModel, plane, x, y, z, eitherBounds.get(plane));
|
||||
if (distance < outsideDistance) {
|
||||
outsideDistance = distance;
|
||||
}
|
||||
}
|
||||
return outsideDistance;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isWithin(final double x, final double y, final double z) {
|
||||
if (circlePlanes == null) {
|
||||
return true;
|
||||
}
|
||||
for (final Membership plane : circlePlanes) {
|
||||
if (!plane.isWithin(x, y, z)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPoint[] getEdgePoints() {
|
||||
return edgePoints;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean intersects(final Plane p, final GeoPoint[] notablePoints, final Membership... bounds) {
|
||||
if (circlePlanes == null) {
|
||||
return false;
|
||||
}
|
||||
if (circlePlanes.size() == 1) {
|
||||
return circlePlanes.get(0).intersects(planetModel, p, notablePoints, circlePoints, bounds);
|
||||
}
|
||||
for (int edgeIndex = 0; edgeIndex < circlePlanes.size(); edgeIndex++) {
|
||||
final SidedPlane edge = circlePlanes.get(edgeIndex);
|
||||
final GeoPoint[] points = notableEdgePoints.get(edgeIndex);
|
||||
if (edge.intersects(planetModel, p, notablePoints, points, bounds, eitherBounds.get(edge))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean intersects(GeoShape geoShape) {
|
||||
if (circlePlanes == null) {
|
||||
return false;
|
||||
}
|
||||
if (circlePlanes.size() == 1) {
|
||||
return geoShape.intersects(circlePlanes.get(0), circlePoints);
|
||||
}
|
||||
for (int edgeIndex = 0; edgeIndex < circlePlanes.size(); edgeIndex++) {
|
||||
final SidedPlane edge = circlePlanes.get(edgeIndex);
|
||||
final GeoPoint[] points = notableEdgePoints.get(edgeIndex);
|
||||
if (geoShape.intersects(edge, points, eitherBounds.get(edge))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void getBounds(Bounds bounds) {
|
||||
super.getBounds(bounds);
|
||||
if (circlePlanes == null) {
|
||||
return;
|
||||
}
|
||||
bounds.addPoint(center);
|
||||
if (circlePlanes.size() == 1) {
|
||||
bounds.addPlane(planetModel, circlePlanes.get(0));
|
||||
return;
|
||||
}
|
||||
// Add bounds for all circle planes
|
||||
for (final SidedPlane plane : circlePlanes) {
|
||||
bounds.addPlane(planetModel, plane, eitherBounds.get(plane));
|
||||
// We don't bother to compute the intersection bounds since, unless the planet model is pathological, we expect planes to be intersecting at shallow
|
||||
// angles.
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (!(o instanceof GeoExactCircle))
|
||||
return false;
|
||||
GeoExactCircle other = (GeoExactCircle) o;
|
||||
return super.equals(other) && other.center.equals(center) && other.cutoffAngle == cutoffAngle && other.actualAccuracy == actualAccuracy;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = super.hashCode();
|
||||
result = 31 * result + center.hashCode();
|
||||
long temp = Double.doubleToLongBits(cutoffAngle);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(actualAccuracy);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "GeoExactCircle: {planetmodel=" + planetModel+", center=" + center + ", radius=" + cutoffAngle + "(" + cutoffAngle * 180.0 / Math.PI + "), accuracy=" + actualAccuracy + "}";
|
||||
}
|
||||
|
||||
/** A membership implementation representing edges that must apply.
|
||||
*/
|
||||
protected static class EitherBound implements Membership {
|
||||
|
||||
protected final SidedPlane sideBound1;
|
||||
protected final SidedPlane sideBound2;
|
||||
|
||||
/** Constructor.
|
||||
* @param sideBound1 is the first side bound.
|
||||
* @param sideBound2 is the second side bound.
|
||||
*/
|
||||
public EitherBound(final SidedPlane sideBound1, final SidedPlane sideBound2) {
|
||||
this.sideBound1 = sideBound1;
|
||||
this.sideBound2 = sideBound2;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isWithin(final Vector v) {
|
||||
return sideBound1.isWithin(v) && sideBound2.isWithin(v);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isWithin(final double x, final double y, final double z) {
|
||||
return sideBound1.isWithin(x,y,z) && sideBound2.isWithin(x,y,z);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "(" + sideBound1 + "," + sideBound2 + ")";
|
||||
}
|
||||
}
|
||||
|
||||
/** A temporary description of a section of circle.
|
||||
*/
|
||||
protected static class ApproximationSlice {
|
||||
public final SidedPlane plane;
|
||||
public final GeoPoint endPoint1;
|
||||
public final double point1Bearing;
|
||||
public final GeoPoint endPoint2;
|
||||
public final double point2Bearing;
|
||||
public final GeoPoint middlePoint;
|
||||
public final double middlePointBearing;
|
||||
|
||||
public ApproximationSlice(final GeoPoint center,
|
||||
final GeoPoint endPoint1, final double point1Bearing,
|
||||
final GeoPoint endPoint2, final double point2Bearing,
|
||||
final GeoPoint middlePoint, final double middlePointBearing) {
|
||||
this.endPoint1 = endPoint1;
|
||||
this.point1Bearing = point1Bearing;
|
||||
this.endPoint2 = endPoint2;
|
||||
this.point2Bearing = point2Bearing;
|
||||
this.middlePoint = middlePoint;
|
||||
this.middlePointBearing = middlePointBearing;
|
||||
// Construct the plane going through the three given points
|
||||
this.plane = SidedPlane.constructNormalizedThreePointSidedPlane(center, endPoint1, endPoint2, middlePoint);
|
||||
if (this.plane == null) {
|
||||
throw new IllegalArgumentException("Either circle is too large to fit on ellipsoid or accuracy is too high; could not construct a plane with endPoint1="+endPoint1+" bearing "+point1Bearing+", endPoint2="+endPoint2+" bearing "+point2Bearing+", middle="+middlePoint+" bearing "+middlePointBearing);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -23,7 +23,7 @@ package org.apache.lucene.spatial3d.geom;
|
|||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public interface GeoShape extends Membership, PlanetObject {
|
||||
public interface GeoShape extends Bounded, Membership, PlanetObject {
|
||||
|
||||
/**
|
||||
* Return a sample point that is on the outside edge/boundary of the shape.
|
||||
|
@ -53,12 +53,4 @@ public interface GeoShape extends Membership, PlanetObject {
|
|||
*/
|
||||
public boolean intersects(final Plane plane, final GeoPoint[] notablePoints, final Membership... bounds);
|
||||
|
||||
/**
|
||||
* Compute bounds for the shape.
|
||||
*
|
||||
* @param bounds is the input bounds object.
|
||||
* The input object will be modified.
|
||||
*/
|
||||
public void getBounds(final Bounds bounds);
|
||||
|
||||
}
|
||||
|
|
|
@ -311,7 +311,7 @@ public class PlanetModel implements SerializableObject {
|
|||
lambdaP = lambda;
|
||||
lambda = L + (1.0 - C) * flattening * sinAlpha *
|
||||
(sigma + C * sinSigma * (cos2SigmaM + C * cosSigma * (-1.0 + 2.0 * cos2SigmaM *cos2SigmaM)));
|
||||
} while (Math.abs(lambda-lambdaP) > Vector.MINIMUM_RESOLUTION && ++iterLimit < 40);
|
||||
} while (Math.abs(lambda-lambdaP) >= Vector.MINIMUM_RESOLUTION && ++iterLimit < 100);
|
||||
|
||||
final double uSq = cosSqAlpha * this.squareRatio;
|
||||
final double A = 1.0 + uSq / 16384.0 * (4096.0 + uSq * (-768.0 + uSq * (320.0 - 175.0 * uSq)));
|
||||
|
@ -322,6 +322,65 @@ public class PlanetModel implements SerializableObject {
|
|||
return c * A * (sigma - deltaSigma);
|
||||
}
|
||||
|
||||
/** Compute new point given original point, a bearing direction, and an adjusted angle (as would be computed by
|
||||
* the surfaceDistance() method above). The original point can be anywhere on the globe. The bearing direction
|
||||
* ranges from 0 (due east at the equator) to pi/2 (due north) to pi (due west at the equator) to 3 pi/4 (due south)
|
||||
* to 2 pi.
|
||||
* @param from is the starting point.
|
||||
* @param dist is the adjusted angle.
|
||||
* @param bearing is the direction to proceed.
|
||||
* @return the new point, consistent with the bearing direction and distance.
|
||||
*/
|
||||
public GeoPoint surfacePointOnBearing(final GeoPoint from, final double dist, final double bearing) {
|
||||
// Algorithm using Vincenty's formulae (https://en.wikipedia.org/wiki/Vincenty%27s_formulae)
|
||||
// which takes into account that planets may not be spherical.
|
||||
//Code adaptation from http://www.movable-type.co.uk/scripts/latlong-vincenty.html
|
||||
|
||||
double lat = from.getLatitude();
|
||||
double lon = from.getLongitude();
|
||||
double sinα1 = Math.sin(bearing);
|
||||
double cosα1 = Math.cos(bearing);
|
||||
|
||||
double tanU1 = (1.0 - flattening) * Math.tan(lat);
|
||||
double cosU1 = 1.0 / Math.sqrt((1.0 + tanU1 * tanU1));
|
||||
double sinU1 = tanU1 * cosU1;
|
||||
|
||||
double σ1 = Math.atan2(tanU1, cosα1);
|
||||
double sinα = cosU1 * sinα1;
|
||||
double cosSqα = 1.0 - sinα * sinα;
|
||||
double uSq = cosSqα * squareRatio;
|
||||
double A = 1.0 + uSq / 16384.0 * (4096.0 + uSq * (-768.0 + uSq * (320.0 - 175.0 * uSq)));
|
||||
double B = uSq / 1024.0 * (256.0 + uSq * (-128.0 + uSq * (74.0 - 47.0 * uSq)));
|
||||
|
||||
double cos2σM;
|
||||
double sinσ;
|
||||
double cosσ;
|
||||
double Δσ;
|
||||
|
||||
double σ = dist / (c * A);
|
||||
double σʹ;
|
||||
double iterations = 0;
|
||||
do {
|
||||
cos2σM = Math.cos(2.0 * σ1 + σ);
|
||||
sinσ = Math.sin(σ);
|
||||
cosσ = Math.cos(σ);
|
||||
Δσ = B * sinσ * (cos2σM + B / 4.0 * (cosσ * (-1.0 + 2.0 * cos2σM * cos2σM) -
|
||||
B / 6.0 * cos2σM * (-3.0 + 4.0 * sinσ * sinσ) * (-3.0 + 4.0 * cos2σM * cos2σM)));
|
||||
σʹ = σ;
|
||||
σ = dist / (c * A) + Δσ;
|
||||
} while (Math.abs(σ - σʹ) >= Vector.MINIMUM_RESOLUTION && ++iterations < 100);
|
||||
|
||||
double x = sinU1 * sinσ - cosU1 * cosσ * cosα1;
|
||||
double φ2 = Math.atan2(sinU1 * cosσ + cosU1 * sinσ * cosα1, (1.0 - flattening) * Math.sqrt(sinα * sinα + x * x));
|
||||
double λ = Math.atan2(sinσ * sinα1, cosU1 * cosσ - sinU1 * sinσ * cosα1);
|
||||
double C = flattening / 16.0 * cosSqα * (4.0 + flattening * (4.0 - 3.0 * cosSqα));
|
||||
double L = λ - (1.0 - C) * flattening * sinα *
|
||||
(σ + C * sinσ * (cos2σM + C * cosσ * (-1.0 + 2.0 * cos2σM * cos2σM)));
|
||||
double λ2 = (lon + L + 3.0 * Math.PI) % (2.0 * Math.PI) - Math.PI; // normalise to -180..+180
|
||||
|
||||
return new GeoPoint(this, φ2, λ2);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object o) {
|
||||
if (!(o instanceof PlanetModel))
|
||||
|
|
|
@ -74,6 +74,7 @@ class StandardObjects {
|
|||
classRegsitry.put(StandardXYZSolid.class, 34);
|
||||
classRegsitry.put(PlanetModel.class, 35);
|
||||
classRegsitry.put(GeoDegeneratePath.class, 36);
|
||||
classRegsitry.put(GeoExactCircle.class, 37);
|
||||
|
||||
for (Class<?> clazz : classRegsitry.keySet()){
|
||||
codeRegsitry.put(classRegsitry.get(clazz), clazz);
|
||||
|
|
|
@ -16,11 +16,40 @@
|
|||
*/
|
||||
package org.apache.lucene.spatial3d.geom;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Repeat;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
public class GeoCircleTest extends LuceneTestCase {
|
||||
|
||||
@Test
|
||||
public void testExactCircle() {
|
||||
GeoCircle c;
|
||||
GeoPoint gp;
|
||||
|
||||
// Construct a variety of circles to see how many actual planes are involved
|
||||
c = new GeoExactCircle(PlanetModel.WGS84, 0.0, 0.0, 0.1, 1e-6);
|
||||
gp = new GeoPoint(PlanetModel.WGS84, 0.0, 0.2);
|
||||
assertTrue(!c.isWithin(gp));
|
||||
gp = new GeoPoint(PlanetModel.WGS84, 0.0, 0.0);
|
||||
assertTrue(c.isWithin(gp));
|
||||
|
||||
c = new GeoExactCircle(PlanetModel.WGS84, 0.1, 0.0, 0.1, 1e-6);
|
||||
|
||||
c = new GeoExactCircle(PlanetModel.WGS84, 0.2, 0.0, 0.1, 1e-6);
|
||||
|
||||
c = new GeoExactCircle(PlanetModel.WGS84, 0.3, 0.0, 0.1, 1e-6);
|
||||
|
||||
c = new GeoExactCircle(PlanetModel.WGS84, 0.4, 0.0, 0.1, 1e-6);
|
||||
|
||||
c = new GeoExactCircle(PlanetModel.WGS84, Math.PI * 0.5, 0.0, 0.1, 1e-6);
|
||||
gp = new GeoPoint(PlanetModel.WGS84, Math.PI * 0.5 - 0.2, 0.0);
|
||||
assertTrue(!c.isWithin(gp));
|
||||
gp = new GeoPoint(PlanetModel.WGS84, Math.PI * 0.5, 0.0);
|
||||
assertTrue(c.isWithin(gp));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCircleDistance() {
|
||||
GeoCircle c;
|
||||
|
@ -418,5 +447,59 @@ public class GeoCircleTest extends LuceneTestCase {
|
|||
assert gc.isWithin(gp)?solid.isWithin(gp):true;
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
@Repeat(iterations = 100)
|
||||
public void RandomPointBearingWGS84Test(){
|
||||
PlanetModel planetModel = PlanetModel.WGS84;
|
||||
RandomGeo3dShapeGenerator generator = new RandomGeo3dShapeGenerator();
|
||||
GeoPoint center = generator.randomGeoPoint(planetModel);
|
||||
double radius = random().nextDouble() * Math.PI;
|
||||
checkBearingPoint(planetModel, center, radius, 0);
|
||||
checkBearingPoint(planetModel, center, radius, 0.5 * Math.PI);
|
||||
checkBearingPoint(planetModel, center, radius, Math.PI);
|
||||
checkBearingPoint(planetModel, center, radius, 1.5 * Math.PI);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Repeat(iterations = 10000)
|
||||
public void RandomPointBearingCardinalTest(){
|
||||
double ab = random().nextDouble() * 0.6 + 0.9;
|
||||
double c = random().nextDouble() * 0.6 + 0.9 ;
|
||||
PlanetModel planetModel = new PlanetModel(ab, c);
|
||||
RandomGeo3dShapeGenerator generator = new RandomGeo3dShapeGenerator();
|
||||
GeoPoint center = generator.randomGeoPoint(planetModel);
|
||||
double radius = random().nextDouble() * 0.9 * Math.PI;
|
||||
checkBearingPoint(planetModel, center, radius, 0);
|
||||
checkBearingPoint(planetModel, center, radius, 0.5 * Math.PI);
|
||||
checkBearingPoint(planetModel, center, radius, Math.PI);
|
||||
checkBearingPoint(planetModel, center, radius, 1.5 * Math.PI);
|
||||
}
|
||||
|
||||
private void checkBearingPoint(PlanetModel planetModel, GeoPoint center, double radius, double bearingAngle) {
|
||||
GeoPoint point = planetModel.surfacePointOnBearing(center, radius, bearingAngle);
|
||||
double surfaceDistance = planetModel.surfaceDistance(center, point);
|
||||
assertTrue(planetModel.toString() + " " + Double.toString(surfaceDistance - radius) + " " + Double.toString(radius), surfaceDistance - radius < Vector.MINIMUM_ANGULAR_RESOLUTION);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void exactCircleLargeTest(){
|
||||
boolean success = true;
|
||||
try {
|
||||
GeoCircle circle = GeoCircleFactory.makeExactGeoCircle(new PlanetModel(0.5, 0.7), 0.25 * Math.PI, 0,0.35 * Math.PI, 1e-12);
|
||||
} catch (IllegalArgumentException e) {
|
||||
success = false;
|
||||
}
|
||||
assertTrue(success);
|
||||
success = false;
|
||||
try {
|
||||
GeoCircle circle = GeoCircleFactory.makeExactGeoCircle(PlanetModel.WGS84, 0.25 * Math.PI, 0,0.9996 * Math.PI, 1e-12);
|
||||
} catch (IllegalArgumentException e) {
|
||||
success = true;
|
||||
}
|
||||
assertTrue(success);
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -55,6 +55,7 @@ public class RandomGeo3dShapeGenerator extends LuceneTestCase {
|
|||
final protected static int COLLECTION = 8;
|
||||
final protected static int POINT = 9;
|
||||
final protected static int LINE = 10;
|
||||
final protected static int EXACT_CIRCLE = 11;
|
||||
|
||||
/* Helper shapes for generating constraints whch are just three sided polygons */
|
||||
final protected static int CONVEX_SIMPLE_POLYGON = 500;
|
||||
|
@ -87,7 +88,7 @@ public class RandomGeo3dShapeGenerator extends LuceneTestCase {
|
|||
* @return a random generated shape code
|
||||
*/
|
||||
public int randomShapeType(){
|
||||
return random().nextInt(11);
|
||||
return random().nextInt(12);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -100,7 +101,7 @@ public class RandomGeo3dShapeGenerator extends LuceneTestCase {
|
|||
* @return a random generated polygon code
|
||||
*/
|
||||
public int randomGeoAreaShapeType(){
|
||||
return random().nextInt(11);
|
||||
return random().nextInt(12);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -284,6 +285,9 @@ public class RandomGeo3dShapeGenerator extends LuceneTestCase {
|
|||
case CONCAVE_SIMPLE_POLYGON: {
|
||||
return concaveSimplePolygon(planetModel, constraints);
|
||||
}
|
||||
case EXACT_CIRCLE: {
|
||||
return exactCircle(planetModel, constraints);
|
||||
}
|
||||
default:
|
||||
throw new IllegalStateException("Unexpected shape type");
|
||||
}
|
||||
|
@ -350,6 +354,38 @@ public class RandomGeo3dShapeGenerator extends LuceneTestCase {
|
|||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Method that returns a random generated a GeoCircle under given constraints. Returns
|
||||
* NULL if it cannot build the GeoCircle under the given constraints.
|
||||
*
|
||||
* @param planetModel The planet model.
|
||||
* @param constraints The given constraints.
|
||||
* @return The random generated GeoCircle.
|
||||
*/
|
||||
private GeoCircle exactCircle(PlanetModel planetModel , Constraints constraints) {
|
||||
int iterations=0;
|
||||
while (iterations < MAX_SHAPE_ITERATIONS) {
|
||||
iterations++;
|
||||
final GeoPoint center = randomGeoPoint(planetModel, constraints);
|
||||
if (center == null){
|
||||
continue;
|
||||
}
|
||||
final double radius = randomCutoffAngle();
|
||||
final int pow = random().nextInt(10) +3;
|
||||
final double accuracy = random().nextDouble() * Math.pow(10, (-1) * pow);
|
||||
try {
|
||||
GeoCircle circle = GeoCircleFactory.makeExactGeoCircle(planetModel, center.getLatitude(), center.getLongitude(), radius, accuracy);
|
||||
if (!constraints.valid(circle)) {
|
||||
continue;
|
||||
}
|
||||
return circle;
|
||||
} catch (IllegalArgumentException e) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Method that returns a random generated a GeoBBox under given constraints. Returns
|
||||
* NULL if it cannot build the GeoBBox under the given constraints.
|
||||
|
|
|
@ -307,6 +307,11 @@ public class GeoTestUtil {
|
|||
return nextBoxInternal(nextLatitude(), nextLatitude(), nextLongitude(), nextLongitude(), true);
|
||||
}
|
||||
|
||||
/** returns next pseudorandom box: does not cross the 180th meridian */
|
||||
public static Rectangle nextBoxNotCrossingDateline() {
|
||||
return nextBoxInternal(nextLatitude(), nextLatitude(), nextLongitude(), nextLongitude(), false);
|
||||
}
|
||||
|
||||
/** Makes an n-gon, centered at the provided lat/lon, and each vertex approximately
|
||||
* distanceMeters away from the center.
|
||||
*
|
||||
|
|
|
@ -141,12 +141,12 @@ public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
|
|||
if (x == m) {
|
||||
int d = (int)Math.floor(m/2);
|
||||
// current could be multivalue but old may not be, so use first box
|
||||
if (even == 0) {
|
||||
if (even == 0) { // even is min
|
||||
ranges[id][0].setMin(d, ranges[oldID][0].getMin(d));
|
||||
if (VERBOSE) {
|
||||
System.out.println(" id=" + id + " box=" + ranges[id] + " (same min[" + d + "] as doc=" + oldID + ")");
|
||||
}
|
||||
} else {
|
||||
} else { // odd is max
|
||||
ranges[id][0].setMax(d, ranges[oldID][0].getMax(d));
|
||||
if (VERBOSE) {
|
||||
System.out.println(" id=" + id + " box=" + ranges[id] + " (same max[" + d + "] as doc=" + oldID + ")");
|
||||
|
@ -184,7 +184,7 @@ public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
|
|||
doc.add(new NumericDocValuesField("id", id));
|
||||
if (ranges[id][0].isMissing == false) {
|
||||
for (int n=0; n<ranges[id].length; ++n) {
|
||||
doc.add(newRangeField(ranges[id][n]));
|
||||
addRange(doc, ranges[id][n]);
|
||||
}
|
||||
}
|
||||
w.addDocument(doc);
|
||||
|
@ -293,6 +293,10 @@ public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
|
|||
IOUtils.close(r, dir);
|
||||
}
|
||||
|
||||
protected void addRange(Document doc, Range box) {
|
||||
doc.add(newRangeField(box));
|
||||
}
|
||||
|
||||
protected boolean expectedResult(Range queryRange, Range[] range, Range.QueryType queryType) {
|
||||
for (int i=0; i<range.length; ++i) {
|
||||
if (expectedBBoxQueryResult(queryRange, range[i], queryType) == true) {
|
||||
|
|
|
@ -101,6 +101,7 @@
|
|||
<pathelement location="${memory.jar}"/>
|
||||
<pathelement location="${misc.jar}"/>
|
||||
<pathelement location="${spatial-extras.jar}"/>
|
||||
<pathelement location="${spatial3d.jar}"/>
|
||||
<pathelement location="${expressions.jar}"/>
|
||||
<pathelement location="${suggest.jar}"/>
|
||||
<pathelement location="${grouping.jar}"/>
|
||||
|
@ -170,7 +171,7 @@
|
|||
|
||||
<target name="prep-lucene-jars"
|
||||
depends="jar-lucene-core, jar-backward-codecs, jar-analyzers-phonetic, jar-analyzers-kuromoji, jar-codecs,jar-expressions, jar-suggest, jar-highlighter, jar-memory,
|
||||
jar-misc, jar-spatial-extras, jar-grouping, jar-queries, jar-queryparser, jar-join, jar-sandbox, jar-classification">
|
||||
jar-misc, jar-spatial-extras, jar-spatial3d, jar-grouping, jar-queries, jar-queryparser, jar-join, jar-sandbox, jar-classification">
|
||||
<property name="solr.deps.compiled" value="true"/>
|
||||
</target>
|
||||
|
||||
|
@ -492,6 +493,12 @@
|
|||
</ant>
|
||||
</target>
|
||||
|
||||
<target name="-compile-test-lucene-backward-codecs">
|
||||
<ant dir="${common.dir}/backward-codecs" target="compile-test" inheritAll="false">
|
||||
<propertyset refid="uptodate.and.compiled.properties"/>
|
||||
</ant>
|
||||
</target>
|
||||
|
||||
<!-- Solr contrib targets -->
|
||||
<target name="-compile-analysis-extras">
|
||||
<ant dir="${common-solr.dir}/contrib/analysis-extras" target="compile" inheritAll="false">
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
|
||||
<target name="compile-core" depends="compile-solrj,common-solr.compile-core"/>
|
||||
|
||||
<target name="compile-test" depends="jar-analyzers-icu,-compile-test-lucene-queryparser,-compile-analysis-extras,common-solr.compile-test"/>
|
||||
<target name="compile-test" depends="jar-analyzers-icu,-compile-test-lucene-queryparser,-compile-test-lucene-backward-codecs,-compile-analysis-extras,common-solr.compile-test"/>
|
||||
|
||||
<path id="test.classpath">
|
||||
<path refid="solr.test.base.classpath"/>
|
||||
|
@ -39,6 +39,7 @@
|
|||
<pathelement location="${analyzers-icu.jar}"/>
|
||||
<pathelement location="${common-solr.dir}/build/contrib/solr-analysis-extras/classes/java"/>
|
||||
<pathelement location="${common.dir}/build/queryparser/classes/test"/>
|
||||
<pathelement location="${common.dir}/build/backward-codecs/classes/test"/>
|
||||
<fileset dir="${common-solr.dir}/contrib/analysis-extras/lib" includes="icu4j*.jar"/>
|
||||
</path>
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ import java.util.Map;
|
|||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.solr.common.cloud.DistributedQueue;
|
||||
import org.apache.solr.cloud.OverseerCollectionMessageHandler.Cmd;
|
||||
import org.apache.solr.cloud.overseer.OverseerAction;
|
||||
import org.apache.solr.common.SolrException;
|
||||
|
|
|
@ -115,6 +115,7 @@ class ShardLeaderElectionContextBase extends ElectionContext {
|
|||
protected String shardId;
|
||||
protected String collection;
|
||||
protected LeaderElector leaderElector;
|
||||
protected ZkStateReader zkStateReader;
|
||||
private Integer leaderZkNodeParentVersion;
|
||||
|
||||
// Prevents a race between cancelling and becoming leader.
|
||||
|
@ -128,6 +129,7 @@ class ShardLeaderElectionContextBase extends ElectionContext {
|
|||
collection, shardId), props, zkStateReader.getZkClient());
|
||||
this.leaderElector = leaderElector;
|
||||
this.zkClient = zkStateReader.getZkClient();
|
||||
this.zkStateReader = zkStateReader;
|
||||
this.shardId = shardId;
|
||||
this.collection = collection;
|
||||
}
|
||||
|
@ -216,14 +218,24 @@ class ShardLeaderElectionContextBase extends ElectionContext {
|
|||
}
|
||||
|
||||
assert shardId != null;
|
||||
ZkNodeProps m = ZkNodeProps.fromKeyVals(Overseer.QUEUE_OPERATION,
|
||||
OverseerAction.LEADER.toLower(), ZkStateReader.SHARD_ID_PROP, shardId,
|
||||
ZkStateReader.COLLECTION_PROP, collection, ZkStateReader.BASE_URL_PROP,
|
||||
leaderProps.getProperties().get(ZkStateReader.BASE_URL_PROP),
|
||||
ZkStateReader.CORE_NAME_PROP,
|
||||
leaderProps.getProperties().get(ZkStateReader.CORE_NAME_PROP),
|
||||
ZkStateReader.STATE_PROP, Replica.State.ACTIVE.toString());
|
||||
Overseer.getStateUpdateQueue(zkClient).offer(Utils.toJSON(m));
|
||||
boolean isAlreadyLeader = false;
|
||||
if (zkStateReader.getClusterState() != null &&
|
||||
zkStateReader.getClusterState().getCollection(collection).getSlice(shardId).getReplicas().size() < 2) {
|
||||
Replica leader = zkStateReader.getLeader(collection, shardId);
|
||||
if (leader != null
|
||||
&& leader.getBaseUrl().equals(leaderProps.get(ZkStateReader.BASE_URL_PROP))
|
||||
&& leader.getCoreName().equals(leaderProps.get(ZkStateReader.CORE_NAME_PROP))) {
|
||||
isAlreadyLeader = true;
|
||||
}
|
||||
}
|
||||
if (!isAlreadyLeader) {
|
||||
ZkNodeProps m = ZkNodeProps.fromKeyVals(Overseer.QUEUE_OPERATION, OverseerAction.LEADER.toLower(),
|
||||
ZkStateReader.SHARD_ID_PROP, shardId,
|
||||
ZkStateReader.COLLECTION_PROP, collection,
|
||||
ZkStateReader.BASE_URL_PROP, leaderProps.get(ZkStateReader.BASE_URL_PROP),
|
||||
ZkStateReader.CORE_NAME_PROP, leaderProps.get(ZkStateReader.CORE_NAME_PROP));
|
||||
Overseer.getStateUpdateQueue(zkClient).offer(Utils.toJSON(m));
|
||||
}
|
||||
}
|
||||
|
||||
public LeaderElector getLeaderElector() {
|
||||
|
@ -309,10 +321,12 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
|
|||
int leaderVoteWait = cc.getZkController().getLeaderVoteWait();
|
||||
|
||||
log.debug("Running the leader process for shard={} and weAreReplacement={} and leaderVoteWait={}", shardId, weAreReplacement, leaderVoteWait);
|
||||
// clear the leader in clusterstate
|
||||
ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.LEADER.toLower(),
|
||||
ZkStateReader.SHARD_ID_PROP, shardId, ZkStateReader.COLLECTION_PROP, collection);
|
||||
Overseer.getStateUpdateQueue(zkClient).offer(Utils.toJSON(m));
|
||||
if (zkController.getClusterState().getCollection(collection).getSlice(shardId).getReplicas().size() > 1) {
|
||||
// Clear the leader in clusterstate. We only need to worry about this if there is actually more than one replica.
|
||||
ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.LEADER.toLower(),
|
||||
ZkStateReader.SHARD_ID_PROP, shardId, ZkStateReader.COLLECTION_PROP, collection);
|
||||
Overseer.getStateUpdateQueue(zkClient).offer(Utils.toJSON(m));
|
||||
}
|
||||
|
||||
boolean allReplicasInLine = false;
|
||||
if (!weAreReplacement) {
|
||||
|
|
|
@ -81,10 +81,10 @@ public class Overseer implements Closeable {
|
|||
private final SolrZkClient zkClient;
|
||||
private final String myId;
|
||||
//queue where everybody can throw tasks
|
||||
private final DistributedQueue stateUpdateQueue;
|
||||
private final ZkDistributedQueue stateUpdateQueue;
|
||||
//Internal queue where overseer stores events that have not yet been published into cloudstate
|
||||
//If Overseer dies while extracting the main queue a new overseer will start from this queue
|
||||
private final DistributedQueue workQueue;
|
||||
private final ZkDistributedQueue workQueue;
|
||||
// Internal map which holds the information about running tasks.
|
||||
private final DistributedMap runningMap;
|
||||
// Internal map which holds the information about successfully completed tasks.
|
||||
|
@ -609,9 +609,9 @@ public class Overseer implements Closeable {
|
|||
* This method will create the /overseer znode in ZooKeeper if it does not exist already.
|
||||
*
|
||||
* @param zkClient the {@link SolrZkClient} to be used for reading/writing to the queue
|
||||
* @return a {@link DistributedQueue} object
|
||||
* @return a {@link ZkDistributedQueue} object
|
||||
*/
|
||||
public static DistributedQueue getStateUpdateQueue(final SolrZkClient zkClient) {
|
||||
public static ZkDistributedQueue getStateUpdateQueue(final SolrZkClient zkClient) {
|
||||
return getStateUpdateQueue(zkClient, new Stats());
|
||||
}
|
||||
|
||||
|
@ -622,11 +622,11 @@ public class Overseer implements Closeable {
|
|||
*
|
||||
* @param zkClient the {@link SolrZkClient} to be used for reading/writing to the queue
|
||||
* @param zkStats a {@link Overseer.Stats} object which tracks statistics for all zookeeper operations performed by this queue
|
||||
* @return a {@link DistributedQueue} object
|
||||
* @return a {@link ZkDistributedQueue} object
|
||||
*/
|
||||
static DistributedQueue getStateUpdateQueue(final SolrZkClient zkClient, Stats zkStats) {
|
||||
static ZkDistributedQueue getStateUpdateQueue(final SolrZkClient zkClient, Stats zkStats) {
|
||||
createOverseerNode(zkClient);
|
||||
return new DistributedQueue(zkClient, "/overseer/queue", zkStats);
|
||||
return new ZkDistributedQueue(zkClient, "/overseer/queue", zkStats);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -642,11 +642,11 @@ public class Overseer implements Closeable {
|
|||
*
|
||||
* @param zkClient the {@link SolrZkClient} to be used for reading/writing to the queue
|
||||
* @param zkStats a {@link Overseer.Stats} object which tracks statistics for all zookeeper operations performed by this queue
|
||||
* @return a {@link DistributedQueue} object
|
||||
* @return a {@link ZkDistributedQueue} object
|
||||
*/
|
||||
static DistributedQueue getInternalWorkQueue(final SolrZkClient zkClient, Stats zkStats) {
|
||||
static ZkDistributedQueue getInternalWorkQueue(final SolrZkClient zkClient, Stats zkStats) {
|
||||
createOverseerNode(zkClient);
|
||||
return new DistributedQueue(zkClient, "/overseer/queue-work", zkStats);
|
||||
return new ZkDistributedQueue(zkClient, "/overseer/queue-work", zkStats);
|
||||
}
|
||||
|
||||
/* Internal map for failed tasks, not to be used outside of the Overseer */
|
||||
|
@ -680,7 +680,7 @@ public class Overseer implements Closeable {
|
|||
* see {@link org.apache.solr.common.params.CollectionParams.CollectionAction#OVERSEERSTATUS}.
|
||||
*
|
||||
* @param zkClient the {@link SolrZkClient} to be used for reading/writing to the queue
|
||||
* @return a {@link DistributedQueue} object
|
||||
* @return a {@link ZkDistributedQueue} object
|
||||
*/
|
||||
static OverseerTaskQueue getCollectionQueue(final SolrZkClient zkClient) {
|
||||
return getCollectionQueue(zkClient, new Stats());
|
||||
|
@ -698,7 +698,7 @@ public class Overseer implements Closeable {
|
|||
* see {@link org.apache.solr.common.params.CollectionParams.CollectionAction#OVERSEERSTATUS}.
|
||||
*
|
||||
* @param zkClient the {@link SolrZkClient} to be used for reading/writing to the queue
|
||||
* @return a {@link DistributedQueue} object
|
||||
* @return a {@link ZkDistributedQueue} object
|
||||
*/
|
||||
static OverseerTaskQueue getCollectionQueue(final SolrZkClient zkClient, Stats zkStats) {
|
||||
createOverseerNode(zkClient);
|
||||
|
@ -718,7 +718,7 @@ public class Overseer implements Closeable {
|
|||
* see {@link org.apache.solr.common.params.CollectionParams.CollectionAction#OVERSEERSTATUS}.
|
||||
*
|
||||
* @param zkClient the {@link SolrZkClient} to be used for reading/writing to the queue
|
||||
* @return a {@link DistributedQueue} object
|
||||
* @return a {@link ZkDistributedQueue} object
|
||||
*/
|
||||
static OverseerTaskQueue getConfigSetQueue(final SolrZkClient zkClient) {
|
||||
return getConfigSetQueue(zkClient, new Stats());
|
||||
|
@ -741,7 +741,7 @@ public class Overseer implements Closeable {
|
|||
* {@link OverseerConfigSetMessageHandler}.
|
||||
*
|
||||
* @param zkClient the {@link SolrZkClient} to be used for reading/writing to the queue
|
||||
* @return a {@link DistributedQueue} object
|
||||
* @return a {@link ZkDistributedQueue} object
|
||||
*/
|
||||
static OverseerTaskQueue getConfigSetQueue(final SolrZkClient zkClient, Stats zkStats) {
|
||||
// For now, we use the same queue as the collection queue, but ensure
|
||||
|
|
|
@ -35,11 +35,11 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* A {@link DistributedQueue} augmented with helper methods specific to the overseer task queues.
|
||||
* A {@link ZkDistributedQueue} augmented with helper methods specific to the overseer task queues.
|
||||
* Methods specific to this subclass ignore superclass internal state and hit ZK directly.
|
||||
* This is inefficient! But the API on this class is kind of muddy..
|
||||
*/
|
||||
public class OverseerTaskQueue extends DistributedQueue {
|
||||
public class OverseerTaskQueue extends ZkDistributedQueue {
|
||||
private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
|
||||
private static final String RESPONSE_PREFIX = "qnr-" ;
|
||||
|
|
|
@ -214,6 +214,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
|
|||
|
||||
ModifiableSolrParams solrParams = new ModifiableSolrParams();
|
||||
solrParams.set(ReplicationHandler.MASTER_URL, leaderUrl);
|
||||
solrParams.set(ReplicationHandler.SKIP_COMMIT_ON_MASTER_VERSION_ZERO, replicaType == Replica.Type.TLOG);
|
||||
|
||||
if (isClosed()) return; // we check closed on return
|
||||
boolean success = replicationHandler.doFetch(solrParams, false).getSuccessful();
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.solr.common.util.NamedList;
|
|||
import org.apache.solr.core.CoreContainer;
|
||||
import org.apache.solr.core.SolrConfig;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.handler.IndexFetcher;
|
||||
import org.apache.solr.handler.ReplicationHandler;
|
||||
import org.apache.solr.request.LocalSolrQueryRequest;
|
||||
import org.apache.solr.request.SolrQueryRequest;
|
||||
|
@ -74,6 +75,7 @@ public class ReplicateFromLeader {
|
|||
|
||||
NamedList<Object> slaveConfig = new NamedList<>();
|
||||
slaveConfig.add("fetchFromLeader", Boolean.TRUE);
|
||||
slaveConfig.add(ReplicationHandler.SKIP_COMMIT_ON_MASTER_VERSION_ZERO, switchTransactionLog);
|
||||
slaveConfig.add("pollInterval", pollIntervalStr);
|
||||
NamedList<Object> replicationConfig = new NamedList<>();
|
||||
replicationConfig.add("slave", slaveConfig);
|
||||
|
@ -85,10 +87,11 @@ public class ReplicateFromLeader {
|
|||
|
||||
replicationProcess = new ReplicationHandler();
|
||||
if (switchTransactionLog) {
|
||||
replicationProcess.setPollListener((solrCore, pollSuccess) -> {
|
||||
if (pollSuccess) {
|
||||
replicationProcess.setPollListener((solrCore, fetchResult) -> {
|
||||
if (fetchResult == IndexFetcher.IndexFetchResult.INDEX_FETCH_SUCCESS) {
|
||||
String commitVersion = getCommitVersion(core);
|
||||
if (commitVersion == null) return;
|
||||
if (Long.parseLong(commitVersion) == lastVersion) return;
|
||||
UpdateLog updateLog = solrCore.getUpdateHandler().getUpdateLog();
|
||||
SolrQueryRequest req = new LocalSolrQueryRequest(core,
|
||||
new ModifiableSolrParams());
|
||||
|
|
|
@ -138,7 +138,7 @@ public class ZkController {
|
|||
|
||||
private final boolean SKIP_AUTO_RECOVERY = Boolean.getBoolean("solrcloud.skip.autorecovery");
|
||||
|
||||
private final DistributedQueue overseerJobQueue;
|
||||
private final ZkDistributedQueue overseerJobQueue;
|
||||
private final OverseerTaskQueue overseerCollectionQueue;
|
||||
private final OverseerTaskQueue overseerConfigSetQueue;
|
||||
|
||||
|
@ -1053,7 +1053,7 @@ public class ZkController {
|
|||
if (isTlogReplicaAndNotLeader) {
|
||||
String commitVersion = ReplicateFromLeader.getCommitVersion(core);
|
||||
if (commitVersion != null) {
|
||||
ulog.copyOverOldUpdates(Long.parseLong(commitVersion), true);
|
||||
ulog.copyOverOldUpdates(Long.parseLong(commitVersion));
|
||||
}
|
||||
}
|
||||
// we will call register again after zk expiration and on reload
|
||||
|
@ -1803,7 +1803,7 @@ public class ZkController {
|
|||
}
|
||||
}
|
||||
|
||||
public DistributedQueue getOverseerJobQueue() {
|
||||
public ZkDistributedQueue getOverseerJobQueue() {
|
||||
return overseerJobQueue;
|
||||
}
|
||||
|
||||
|
|
|
@ -30,6 +30,7 @@ import java.util.function.Predicate;
|
|||
import com.codahale.metrics.Timer;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.solr.common.cloud.DistributedQueue;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrException.ErrorCode;
|
||||
import org.apache.solr.common.cloud.SolrZkClient;
|
||||
|
@ -47,7 +48,7 @@ import org.slf4j.LoggerFactory;
|
|||
* multiple-producer: if there are multiple consumers on the same ZK queue,
|
||||
* the results should be correct but inefficient
|
||||
*/
|
||||
public class DistributedQueue {
|
||||
public class ZkDistributedQueue implements DistributedQueue {
|
||||
private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
|
||||
static final String PREFIX = "qn-";
|
||||
|
@ -92,11 +93,11 @@ public class DistributedQueue {
|
|||
|
||||
private int watcherCount = 0;
|
||||
|
||||
public DistributedQueue(SolrZkClient zookeeper, String dir) {
|
||||
public ZkDistributedQueue(SolrZkClient zookeeper, String dir) {
|
||||
this(zookeeper, dir, new Overseer.Stats());
|
||||
}
|
||||
|
||||
public DistributedQueue(SolrZkClient zookeeper, String dir, Overseer.Stats stats) {
|
||||
public ZkDistributedQueue(SolrZkClient zookeeper, String dir, Overseer.Stats stats) {
|
||||
this.dir = dir;
|
||||
|
||||
ZkCmdExecutor cmdExecutor = new ZkCmdExecutor(zookeeper.getZkClientTimeout());
|
||||
|
@ -119,6 +120,7 @@ public class DistributedQueue {
|
|||
*
|
||||
* @return data at the first element of the queue, or null.
|
||||
*/
|
||||
@Override
|
||||
public byte[] peek() throws KeeperException, InterruptedException {
|
||||
Timer.Context time = stats.time(dir + "_peek");
|
||||
try {
|
||||
|
@ -135,6 +137,7 @@ public class DistributedQueue {
|
|||
* @param block if true, blocks until an element enters the queue
|
||||
* @return data at the first element of the queue, or null.
|
||||
*/
|
||||
@Override
|
||||
public byte[] peek(boolean block) throws KeeperException, InterruptedException {
|
||||
return block ? peek(Long.MAX_VALUE) : peek();
|
||||
}
|
||||
|
@ -146,6 +149,7 @@ public class DistributedQueue {
|
|||
* @param wait max wait time in ms.
|
||||
* @return data at the first element of the queue, or null.
|
||||
*/
|
||||
@Override
|
||||
public byte[] peek(long wait) throws KeeperException, InterruptedException {
|
||||
Preconditions.checkArgument(wait > 0);
|
||||
Timer.Context time;
|
||||
|
@ -177,6 +181,7 @@ public class DistributedQueue {
|
|||
*
|
||||
* @return Head of the queue or null.
|
||||
*/
|
||||
@Override
|
||||
public byte[] poll() throws KeeperException, InterruptedException {
|
||||
Timer.Context time = stats.time(dir + "_poll");
|
||||
try {
|
||||
|
@ -191,6 +196,7 @@ public class DistributedQueue {
|
|||
*
|
||||
* @return The former head of the queue
|
||||
*/
|
||||
@Override
|
||||
public byte[] remove() throws NoSuchElementException, KeeperException, InterruptedException {
|
||||
Timer.Context time = stats.time(dir + "_remove");
|
||||
try {
|
||||
|
@ -209,6 +215,7 @@ public class DistributedQueue {
|
|||
*
|
||||
* @return The former head of the queue
|
||||
*/
|
||||
@Override
|
||||
public byte[] take() throws KeeperException, InterruptedException {
|
||||
// Same as for element. Should refactor this.
|
||||
Timer.Context timer = stats.time(dir + "_take");
|
||||
|
@ -231,6 +238,7 @@ public class DistributedQueue {
|
|||
* Inserts data into queue. If there are no other queue consumers, the offered element
|
||||
* will be immediately visible when this method returns.
|
||||
*/
|
||||
@Override
|
||||
public void offer(byte[] data) throws KeeperException, InterruptedException {
|
||||
Timer.Context time = stats.time(dir + "_offer");
|
||||
try {
|
||||
|
@ -323,10 +331,10 @@ public class DistributedQueue {
|
|||
/**
|
||||
* Return the currently-known set of elements, using child names from memory. If no children are found, or no
|
||||
* children pass {@code acceptFilter}, waits up to {@code waitMillis} for at least one child to become available.
|
||||
* <p/>
|
||||
* Package-private to support {@link OverseerTaskQueue} specifically.
|
||||
*/
|
||||
Collection<Pair<String, byte[]>> peekElements(int max, long waitMillis, Predicate<String> acceptFilter) throws KeeperException, InterruptedException {
|
||||
@Override
|
||||
public Collection<Pair<String, byte[]>> peekElements(int max, long waitMillis, Predicate<String> acceptFilter) throws KeeperException, InterruptedException {
|
||||
List<String> foundChildren = new ArrayList<>();
|
||||
long waitNanos = TimeUnit.MILLISECONDS.toNanos(waitMillis);
|
||||
boolean first = true;
|
|
@ -643,9 +643,10 @@ public class CoreContainer {
|
|||
if (zkSys.getZkController() != null) {
|
||||
zkSys.getZkController().throwErrorIfReplicaReplaced(cd);
|
||||
}
|
||||
|
||||
solrCores.waitAddPendingCoreOps(cd.getName());
|
||||
core = createFromDescriptor(cd, false, false);
|
||||
} finally {
|
||||
solrCores.removeFromPendingOps(cd.getName());
|
||||
if (asyncSolrCoreLoad) {
|
||||
solrCores.markCoreAsNotLoading(cd);
|
||||
}
|
||||
|
@ -941,7 +942,13 @@ public class CoreContainer {
|
|||
// first and clean it up if there's an error.
|
||||
coresLocator.create(this, cd);
|
||||
|
||||
SolrCore core = createFromDescriptor(cd, true, newCollection);
|
||||
SolrCore core = null;
|
||||
try {
|
||||
solrCores.waitAddPendingCoreOps(cd.getName());
|
||||
core = createFromDescriptor(cd, true, newCollection);
|
||||
} finally {
|
||||
solrCores.removeFromPendingOps(cd.getName());
|
||||
}
|
||||
|
||||
return core;
|
||||
} catch (Exception ex) {
|
||||
|
@ -976,7 +983,6 @@ public class CoreContainer {
|
|||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
|
||||
"Error CREATEing SolrCore '" + coreName + "': " + ex.getMessage() + rootMsg, ex);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -985,6 +991,26 @@ public class CoreContainer {
|
|||
* @param dcore a core descriptor
|
||||
* @param publishState publish core state to the cluster if true
|
||||
*
|
||||
* WARNING: Any call to this method should be surrounded by a try/finally block
|
||||
* that calls solrCores.waitAddPendingCoreOps(...) and solrCores.removeFromPendingOps(...)
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* try {
|
||||
* solrCores.waitAddPendingCoreOps(dcore.getName());
|
||||
* createFromDescriptor(...);
|
||||
* } finally {
|
||||
* solrCores.removeFromPendingOps(dcore.getName());
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* Trying to put the waitAddPending... in this method results in Bad Things Happening due to race conditions.
|
||||
* getCore() depends on getting the core returned _if_ it's in the pending list due to some other thread opening it.
|
||||
* If the core is not in the pending list and not loaded, then getCore() calls this method. Anything that called
|
||||
* to check if the core was loaded _or_ in pending ops and, based on the return called createFromDescriptor would
|
||||
* introduce a race condition, see getCore() for the place it would be a problem
|
||||
*
|
||||
* @return the newly created core
|
||||
*/
|
||||
private SolrCore createFromDescriptor(CoreDescriptor dcore, boolean publishState, boolean newCollection) {
|
||||
|
@ -1279,7 +1305,12 @@ public class CoreContainer {
|
|||
} else {
|
||||
CoreLoadFailure clf = coreInitFailures.get(name);
|
||||
if (clf != null) {
|
||||
createFromDescriptor(clf.cd, true, false);
|
||||
try {
|
||||
solrCores.waitAddPendingCoreOps(clf.cd.getName());
|
||||
createFromDescriptor(clf.cd, true, false);
|
||||
} finally {
|
||||
solrCores.removeFromPendingOps(clf.cd.getName());
|
||||
}
|
||||
} else {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No such core: " + name );
|
||||
}
|
||||
|
@ -1452,7 +1483,8 @@ public class CoreContainer {
|
|||
// TestLazyCores
|
||||
if (desc == null || zkSys.getZkController() != null) return null;
|
||||
|
||||
// This will put an entry in pending core ops if the core isn't loaded
|
||||
// This will put an entry in pending core ops if the core isn't loaded. Here's where moving the
|
||||
// waitAddPendingCoreOps to createFromDescriptor would introduce a race condition.
|
||||
core = solrCores.waitAddPendingCoreOps(name);
|
||||
|
||||
if (isShutDown) return null; // We're quitting, so stop. This needs to be after the wait above since we may come off
|
||||
|
|
|
@ -26,10 +26,12 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.CancellationException;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.RejectedExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.locks.Lock;
|
||||
|
||||
import org.apache.solr.client.solrj.SolrRequest;
|
||||
|
@ -617,7 +619,7 @@ public class CdcrRequestHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
rsp.add(CdcrParams.ERRORS, hosts);
|
||||
}
|
||||
|
||||
private void handleBootstrapAction(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException, SolrServerException {
|
||||
private void handleBootstrapAction(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException, InterruptedException, SolrServerException {
|
||||
String collectionName = core.getCoreDescriptor().getCloudDescriptor().getCollectionName();
|
||||
String shard = core.getCoreDescriptor().getCloudDescriptor().getShardId();
|
||||
if (!leaderStateManager.amILeader()) {
|
||||
|
@ -625,6 +627,7 @@ public class CdcrRequestHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Action " + CdcrParams.CdcrAction.BOOTSTRAP +
|
||||
" sent to non-leader replica");
|
||||
}
|
||||
CountDownLatch latch = new CountDownLatch(1); // latch to make sure BOOTSTRAP_STATUS gives correct response
|
||||
|
||||
Runnable runnable = () -> {
|
||||
Lock recoveryLock = req.getCore().getSolrCoreState().getRecoveryLock();
|
||||
|
@ -635,6 +638,7 @@ public class CdcrRequestHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
handleCancelBootstrap(req, rsp);
|
||||
} else if (leaderStateManager.amILeader()) {
|
||||
coreState.setCdcrBootstrapRunning(true);
|
||||
latch.countDown(); // free the latch as current bootstrap is executing
|
||||
//running.set(true);
|
||||
String masterUrl = req.getParams().get(ReplicationHandler.MASTER_URL);
|
||||
BootstrapCallable bootstrapCallable = new BootstrapCallable(masterUrl, core);
|
||||
|
@ -657,6 +661,8 @@ public class CdcrRequestHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
if (locked) {
|
||||
coreState.setCdcrBootstrapRunning(false);
|
||||
recoveryLock.unlock();
|
||||
} else {
|
||||
latch.countDown(); // free the latch as current bootstrap is executing
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -664,6 +670,7 @@ public class CdcrRequestHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
try {
|
||||
core.getCoreContainer().getUpdateShardHandler().getUpdateExecutor().submit(runnable);
|
||||
rsp.add(RESPONSE_STATUS, "submitted");
|
||||
latch.await(10000, TimeUnit.MILLISECONDS); // put the latch for current bootstrap command
|
||||
} catch (RejectedExecutionException ree) {
|
||||
// no problem, we're probably shutting down
|
||||
rsp.add(RESPONSE_STATUS, "failed");
|
||||
|
|
|
@ -163,6 +163,8 @@ public class IndexFetcher {
|
|||
|
||||
private Integer soTimeout;
|
||||
|
||||
private boolean skipCommitOnMasterVersionZero;
|
||||
|
||||
private static final String INTERRUPT_RESPONSE_MESSAGE = "Interrupted while waiting for modify lock";
|
||||
|
||||
public static class IndexFetchResult {
|
||||
|
@ -226,6 +228,10 @@ public class IndexFetcher {
|
|||
if (fetchFromLeader != null && fetchFromLeader instanceof Boolean) {
|
||||
this.fetchFromLeader = (boolean) fetchFromLeader;
|
||||
}
|
||||
Object skipCommitOnMasterVersionZero = initArgs.get(SKIP_COMMIT_ON_MASTER_VERSION_ZERO);
|
||||
if (skipCommitOnMasterVersionZero != null && skipCommitOnMasterVersionZero instanceof Boolean) {
|
||||
this.skipCommitOnMasterVersionZero = (boolean) skipCommitOnMasterVersionZero;
|
||||
}
|
||||
String masterUrl = (String) initArgs.get(MASTER_URL);
|
||||
if (masterUrl == null && !this.fetchFromLeader)
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
|
||||
|
@ -428,7 +434,7 @@ public class IndexFetcher {
|
|||
LOG.info("Slave's version: " + IndexDeletionPolicyWrapper.getCommitTimestamp(commit));
|
||||
|
||||
if (latestVersion == 0L) {
|
||||
if (forceReplication && commit.getGeneration() != 0) {
|
||||
if (commit.getGeneration() != 0) {
|
||||
// since we won't get the files for an empty index,
|
||||
// we just clear ours and commit
|
||||
LOG.info("New index in Master. Deleting mine...");
|
||||
|
@ -438,8 +444,12 @@ public class IndexFetcher {
|
|||
} finally {
|
||||
iw.decref();
|
||||
}
|
||||
SolrQueryRequest req = new LocalSolrQueryRequest(solrCore, new ModifiableSolrParams());
|
||||
solrCore.getUpdateHandler().commit(new CommitUpdateCommand(req, false));
|
||||
if (skipCommitOnMasterVersionZero) {
|
||||
openNewSearcherAndUpdateCommitPoint();
|
||||
} else {
|
||||
SolrQueryRequest req = new LocalSolrQueryRequest(solrCore, new ModifiableSolrParams());
|
||||
solrCore.getUpdateHandler().commit(new CommitUpdateCommand(req, false));
|
||||
}
|
||||
}
|
||||
|
||||
//there is nothing to be replicated
|
||||
|
|
|
@ -68,6 +68,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.RateLimiter;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrException.ErrorCode;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
|
@ -81,6 +82,7 @@ import org.apache.solr.common.util.SimpleOrderedMap;
|
|||
import org.apache.solr.common.util.StrUtils;
|
||||
import org.apache.solr.common.util.SuppressForbidden;
|
||||
import org.apache.solr.core.CloseHook;
|
||||
import static org.apache.solr.core.Config.assertWarnOrFail;
|
||||
import org.apache.solr.core.CoreContainer;
|
||||
import org.apache.solr.core.DirectoryFactory.DirContext;
|
||||
import org.apache.solr.core.IndexDeletionPolicyWrapper;
|
||||
|
@ -218,7 +220,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
|
||||
private PollListener pollListener;
|
||||
public interface PollListener {
|
||||
void onComplete(SolrCore solrCore, boolean pollSuccess) throws IOException;
|
||||
void onComplete(SolrCore solrCore, IndexFetchResult fetchResult) throws IOException;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1180,8 +1182,8 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
try {
|
||||
LOG.debug("Polling for index modifications");
|
||||
markScheduledExecutionStart();
|
||||
boolean pollSuccess = doFetch(null, false).getSuccessful();
|
||||
if (pollListener != null) pollListener.onComplete(core, pollSuccess);
|
||||
IndexFetchResult fetchResult = doFetch(null, false);
|
||||
if (pollListener != null) pollListener.onComplete(core, fetchResult);
|
||||
} catch (Exception e) {
|
||||
LOG.error("Exception in fetching index", e);
|
||||
}
|
||||
|
@ -1201,6 +1203,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
public void inform(SolrCore core) {
|
||||
this.core = core;
|
||||
registerCloseHook();
|
||||
Long deprecatedReserveCommitDuration = null;
|
||||
Object nbtk = initArgs.get(NUMBER_BACKUPS_TO_KEEP_INIT_PARAM);
|
||||
if(nbtk!=null) {
|
||||
numberBackupsToKeep = Integer.parseInt(nbtk.toString());
|
||||
|
@ -1316,10 +1319,26 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
String reserve = (String) master.get(RESERVE);
|
||||
if (reserve != null && !reserve.trim().equals("")) {
|
||||
reserveCommitDuration = readIntervalMs(reserve);
|
||||
deprecatedReserveCommitDuration = reserveCommitDuration;
|
||||
// remove this error check & backcompat logic when Version.LUCENE_7_1_0 is removed
|
||||
assertWarnOrFail(
|
||||
"Beginning with Solr 7.1, master."+RESERVE + " is deprecated and should now be configured directly on the ReplicationHandler.",
|
||||
(null == reserve),
|
||||
core.getSolrConfig().luceneMatchVersion.onOrAfter(Version.LUCENE_7_1_0));
|
||||
}
|
||||
LOG.info("Commits will be reserved for " + reserveCommitDuration);
|
||||
isMaster = true;
|
||||
}
|
||||
|
||||
{
|
||||
final String reserve = (String) initArgs.get(RESERVE);
|
||||
if (reserve != null && !reserve.trim().equals("")) {
|
||||
reserveCommitDuration = readIntervalMs(reserve);
|
||||
if (deprecatedReserveCommitDuration != null) {
|
||||
throw new IllegalArgumentException("'master."+RESERVE+"' and '"+RESERVE+"' are mutually exclusive.");
|
||||
}
|
||||
}
|
||||
}
|
||||
LOG.info("Commits will be reserved for " + reserveCommitDuration + "ms.");
|
||||
}
|
||||
|
||||
// check master or slave is enabled
|
||||
|
@ -1735,6 +1754,10 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
|
|||
|
||||
public static final String FETCH_FROM_LEADER = "fetchFromLeader";
|
||||
|
||||
// in case of TLOG replica, if masterVersion = zero, don't do commit
|
||||
// otherwise updates from current tlog won't copied over properly to the new tlog, leading to data loss
|
||||
public static final String SKIP_COMMIT_ON_MASTER_VERSION_ZERO = "skipCommitOnMasterVersionZero";
|
||||
|
||||
public static final String STATUS = "status";
|
||||
|
||||
public static final String COMMAND = "command";
|
||||
|
|
|
@ -297,12 +297,21 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
|
|||
.withFunctionName("sumDifference", SumDifferenceEvaluator.class)
|
||||
.withFunctionName("meanDifference", MeanDifferenceEvaluator.class)
|
||||
.withFunctionName("primes", PrimesEvaluator.class)
|
||||
.withFunctionName("factorial", FactorialEvaluator.class)
|
||||
.withFunctionName("movingMedian", MovingMedianEvaluator.class)
|
||||
.withFunctionName("monteCarlo", MonteCarloEvaluator.class)
|
||||
.withFunctionName("constantDistribution", ConstantDistributionEvaluator.class)
|
||||
.withFunctionName("weibullDistribution", WeibullDistributionEvaluator.class)
|
||||
.withFunctionName("mean", MeanEvaluator.class)
|
||||
.withFunctionName("mode", ModeEvaluator.class)
|
||||
.withFunctionName("logNormalDistribution", LogNormalDistributionEvaluator.class)
|
||||
.withFunctionName("zipFDistribution", ZipFDistributionEvaluator.class)
|
||||
.withFunctionName("gammaDistribution", GammaDistributionEvaluator.class)
|
||||
.withFunctionName("betaDistribution", BetaDistributionEvaluator.class)
|
||||
|
||||
// Boolean Stream Evaluators
|
||||
|
||||
|
||||
|
||||
.withFunctionName("and", AndEvaluator.class)
|
||||
.withFunctionName("and", AndEvaluator.class)
|
||||
.withFunctionName("eor", ExclusiveOrEvaluator.class)
|
||||
.withFunctionName("eq", EqualToEvaluator.class)
|
||||
.withFunctionName("gt", GreaterThanEvaluator.class)
|
||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.solr.handler.admin;
|
|||
import java.lang.invoke.MethodHandles;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.solr.cloud.CloudDescriptor;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.cloud.ClusterState;
|
||||
|
@ -29,15 +28,10 @@ import org.apache.solr.common.cloud.Replica;
|
|||
import org.apache.solr.common.cloud.Slice;
|
||||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.apache.solr.common.params.CoreAdminParams;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.params.SolrParams;
|
||||
import org.apache.solr.core.CoreContainer;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.handler.admin.CoreAdminHandler.CallInfo;
|
||||
import org.apache.solr.request.LocalSolrQueryRequest;
|
||||
import org.apache.solr.search.SolrIndexSearcher;
|
||||
import org.apache.solr.update.CommitUpdateCommand;
|
||||
import org.apache.solr.util.RefCounted;
|
||||
import org.apache.solr.util.TestInjection;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -177,33 +171,6 @@ class PrepRecoveryOp implements CoreAdminHandler.CoreAdminOp {
|
|||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
|
||||
"Solr is shutting down");
|
||||
}
|
||||
|
||||
// solrcloud_debug
|
||||
if (log.isDebugEnabled() && core != null) {
|
||||
try {
|
||||
LocalSolrQueryRequest r = new LocalSolrQueryRequest(core,
|
||||
new ModifiableSolrParams());
|
||||
CommitUpdateCommand commitCmd = new CommitUpdateCommand(r, false);
|
||||
commitCmd.softCommit = true;
|
||||
core.getUpdateHandler().commit(commitCmd);
|
||||
RefCounted<SolrIndexSearcher> searchHolder = core
|
||||
.getNewestSearcher(false);
|
||||
SolrIndexSearcher searcher = searchHolder.get();
|
||||
try {
|
||||
log.debug(core.getCoreContainer()
|
||||
.getZkController().getNodeName()
|
||||
+ " to replicate "
|
||||
+ searcher.search(new MatchAllDocsQuery(), 1).totalHits
|
||||
+ " gen:"
|
||||
+ core.getDeletionPolicy().getLatestCommit().getGeneration()
|
||||
+ " data:" + core.getDataDir());
|
||||
} finally {
|
||||
searchHolder.decref();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.debug("Error in solrcloud_debug block", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
Thread.sleep(1000);
|
||||
}
|
||||
|
|
|
@ -310,15 +310,19 @@ public class UnifiedSolrHighlighter extends SolrHighlighter implements PluginInf
|
|||
String type = params.getFieldParam(field, HighlightParams.BS_TYPE);
|
||||
if (fragsize == 0 || "WHOLE".equals(type)) { // 0 is special value; no fragmenting
|
||||
return new WholeBreakIterator();
|
||||
} else if ("SEPARATOR".equals(type)) {
|
||||
char customSep = parseBiSepChar(params.getFieldParam(field, HighlightParams.BS_SEP));
|
||||
return new CustomSeparatorBreakIterator(customSep);
|
||||
}
|
||||
String language = params.getFieldParam(field, HighlightParams.BS_LANGUAGE);
|
||||
String country = params.getFieldParam(field, HighlightParams.BS_COUNTRY);
|
||||
String variant = params.getFieldParam(field, HighlightParams.BS_VARIANT);
|
||||
Locale locale = parseLocale(language, country, variant);
|
||||
BreakIterator baseBI = parseBreakIterator(type, locale);
|
||||
|
||||
BreakIterator baseBI;
|
||||
if ("SEPARATOR".equals(type)) {
|
||||
char customSep = parseBiSepChar(params.getFieldParam(field, HighlightParams.BS_SEP));
|
||||
baseBI = new CustomSeparatorBreakIterator(customSep);
|
||||
} else {
|
||||
String language = params.getFieldParam(field, HighlightParams.BS_LANGUAGE);
|
||||
String country = params.getFieldParam(field, HighlightParams.BS_COUNTRY);
|
||||
String variant = params.getFieldParam(field, HighlightParams.BS_VARIANT);
|
||||
Locale locale = parseLocale(language, country, variant);
|
||||
baseBI = parseBreakIterator(type, locale);
|
||||
}
|
||||
|
||||
if (fragsize <= 1) { // no real minimum size
|
||||
return baseBI;
|
||||
|
|
|
@ -69,12 +69,16 @@ public final class SlowCompositeReaderWrapper extends LeafReader {
|
|||
if (reader.leaves().isEmpty()) {
|
||||
metaData = new LeafMetaData(Version.LATEST.major, Version.LATEST, null);
|
||||
} else {
|
||||
Version minVersion = reader.leaves().stream()
|
||||
.map(LeafReaderContext::reader)
|
||||
.map(LeafReader::getMetaData)
|
||||
.map(LeafMetaData::getMinVersion)
|
||||
.reduce((v1, v2) -> v1 == null ? null : v2 == null ? null : v2.onOrAfter(v1) ? v1 : v2)
|
||||
.get();
|
||||
Version minVersion = Version.LATEST;
|
||||
for (LeafReaderContext leafReaderContext : reader.leaves()) {
|
||||
Version leafVersion = leafReaderContext.reader().getMetaData().getMinVersion();
|
||||
if (leafVersion == null) {
|
||||
minVersion = null;
|
||||
break;
|
||||
} else if (minVersion.onOrAfter(leafVersion)) {
|
||||
minVersion = leafVersion;
|
||||
}
|
||||
}
|
||||
metaData = new LeafMetaData(reader.leaves().get(0).reader().getMetaData().getCreatedVersionMajor(), minVersion, null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1105,7 +1105,7 @@ public class SolrMetricManager {
|
|||
SolrMetricReporter reporter = loadReporter(registryName, cc.getResourceLoader(), info, null);
|
||||
((SolrClusterReporter)reporter).setCoreContainer(cc);
|
||||
} catch (Exception e) {
|
||||
log.warn("Could not load node reporter, pluginInfo=" + info, e);
|
||||
log.warn("Could not load cluster reporter, pluginInfo=" + info, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,6 +46,7 @@ import org.apache.lucene.spatial.SpatialStrategy;
|
|||
import org.apache.lucene.spatial.query.SpatialArgs;
|
||||
import org.apache.lucene.spatial.query.SpatialArgsParser;
|
||||
import org.apache.lucene.spatial.query.SpatialOperation;
|
||||
import org.apache.lucene.spatial.spatial4j.Geo3dSpatialContextFactory;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.params.SolrParams;
|
||||
import org.apache.solr.response.TextResponseWriter;
|
||||
|
@ -131,6 +132,10 @@ public abstract class AbstractSpatialFieldType<T extends SpatialStrategy> extend
|
|||
argEntry.setValue("org.locationtech.spatial4j.context.jts.JtsSpatialContextFactory");
|
||||
continue;
|
||||
}
|
||||
if (argEntry.getKey().equals(CTX_PARAM) && argEntry.getValue().equals("Geo3D")) {
|
||||
argEntry.setValue(Geo3dSpatialContextFactory.class.getName());
|
||||
continue;
|
||||
}
|
||||
// Warn about using old Spatial4j class names
|
||||
if (argEntry.getValue().contains(OLD_SPATIAL4J_PREFIX)) {
|
||||
log.warn("Replace '" + OLD_SPATIAL4J_PREFIX + "' with '" + NEW_SPATIAL4J_PREFIX + "' in your schema.");
|
||||
|
|
|
@ -54,7 +54,6 @@ import org.apache.solr.uninverting.UninvertingReader.Type;
|
|||
* @see ExternalFileFieldReloader
|
||||
*/
|
||||
public class ExternalFileField extends FieldType implements SchemaAware {
|
||||
private FieldType ftype;
|
||||
private String keyFieldName;
|
||||
private IndexSchema schema;
|
||||
private float defVal;
|
||||
|
|
|
@ -244,6 +244,7 @@ public class UpdateLog implements PluginInfoInitialized, SolrMetricProducer {
|
|||
protected Gauge<Integer> bufferedOpsGauge;
|
||||
protected Meter applyingBufferedOpsMeter;
|
||||
protected Meter replayOpsMeter;
|
||||
protected Meter copyOverOldUpdatesMeter;
|
||||
|
||||
public static class LogPtr {
|
||||
final long pointer;
|
||||
|
@ -435,6 +436,7 @@ public class UpdateLog implements PluginInfoInitialized, SolrMetricProducer {
|
|||
manager.registerGauge(null, registry, () -> getTotalLogsSize(), true, "bytes", scope, "replay", "remaining");
|
||||
applyingBufferedOpsMeter = manager.meter(null, registry, "ops", scope, "applyingBuffered");
|
||||
replayOpsMeter = manager.meter(null, registry, "ops", scope, "replay");
|
||||
copyOverOldUpdatesMeter = manager.meter(null, registry, "ops", scope, "copyOverOldUpdates");
|
||||
manager.registerGauge(null, registry, () -> state.getValue(), true, "state", scope);
|
||||
}
|
||||
|
||||
|
@ -1158,12 +1160,12 @@ public class UpdateLog implements PluginInfoInitialized, SolrMetricProducer {
|
|||
|
||||
protected void copyAndSwitchToNewTlog(CommitUpdateCommand cuc) {
|
||||
synchronized (this) {
|
||||
if (tlog == null && prevTlog == null && prevMapLog2 == null && logs.isEmpty()) {
|
||||
if (tlog == null) {
|
||||
return;
|
||||
}
|
||||
preCommit(cuc);
|
||||
try {
|
||||
copyOverOldUpdates(cuc.getVersion(), false);
|
||||
copyOverOldUpdates(cuc.getVersion());
|
||||
} finally {
|
||||
postCommit(cuc);
|
||||
}
|
||||
|
@ -1173,9 +1175,8 @@ public class UpdateLog implements PluginInfoInitialized, SolrMetricProducer {
|
|||
/**
|
||||
* Copy over updates from prevTlog or last tlog (in tlog folder) to a new tlog
|
||||
* @param commitVersion any updates that have version larger than the commitVersion will be copied over
|
||||
* @param omitCommitted if a tlog is already committed then don't read it
|
||||
*/
|
||||
public void copyOverOldUpdates(long commitVersion, boolean omitCommitted) {
|
||||
public void copyOverOldUpdates(long commitVersion) {
|
||||
TransactionLog oldTlog = prevTlog;
|
||||
if (oldTlog == null && !logs.isEmpty()) {
|
||||
oldTlog = logs.getFirst();
|
||||
|
@ -1185,11 +1186,12 @@ public class UpdateLog implements PluginInfoInitialized, SolrMetricProducer {
|
|||
}
|
||||
|
||||
try {
|
||||
if (omitCommitted && oldTlog.endsWithCommit()) return;
|
||||
if (oldTlog.endsWithCommit()) return;
|
||||
} catch (IOException e) {
|
||||
log.warn("Exception reading log", e);
|
||||
return;
|
||||
}
|
||||
copyOverOldUpdatesMeter.mark();
|
||||
|
||||
SolrQueryRequest req = new LocalSolrQueryRequest(uhandler.core,
|
||||
new ModifiableSolrParams());
|
||||
|
|
|
@ -36,7 +36,7 @@ public class ClassificationUpdateProcessorParams {
|
|||
|
||||
private int minDf; // knn specific - the minimum Document Frequency for considering a term
|
||||
|
||||
private int k; // knn specific - thw window of top results to evaluate, when assigning the class
|
||||
private int k; // knn specific - the window of top results to evaluate, when assigning the class
|
||||
|
||||
public String[] getInputFieldNames() {
|
||||
return inputFieldNames;
|
||||
|
|
|
@ -41,12 +41,12 @@ import org.apache.lucene.util.CharsRefBuilder;
|
|||
import org.apache.solr.client.solrj.SolrRequest;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.SolrRequest.METHOD;
|
||||
import org.apache.solr.common.cloud.DistributedQueue;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrClient;
|
||||
import org.apache.solr.client.solrj.request.GenericSolrRequest;
|
||||
import org.apache.solr.client.solrj.request.UpdateRequest;
|
||||
import org.apache.solr.client.solrj.response.SimpleSolrResponse;
|
||||
import org.apache.solr.cloud.CloudDescriptor;
|
||||
import org.apache.solr.cloud.DistributedQueue;
|
||||
import org.apache.solr.cloud.Overseer;
|
||||
import org.apache.solr.cloud.ZkController;
|
||||
import org.apache.solr.cloud.overseer.OverseerAction;
|
||||
|
|
|
@ -82,6 +82,7 @@ public abstract class UpdateRequestProcessor implements Closeable {
|
|||
|
||||
@Override
|
||||
public final void close() throws IOException {
|
||||
@SuppressWarnings("resource")
|
||||
UpdateRequestProcessor p = this;
|
||||
while (p != null) {
|
||||
try {
|
||||
|
|
|
@ -56,6 +56,9 @@
|
|||
|
||||
<fieldType name="srptgeom" class="solr.RptWithGeometrySpatialField"/>
|
||||
|
||||
<fieldType name="srptgeom_geo3d" class="solr.RptWithGeometrySpatialField"
|
||||
spatialContextFactory="Geo3D" planetModel="wgs84"/><!-- or sphere -->
|
||||
|
||||
<fieldType name="bbox" class="solr.BBoxField"
|
||||
numberType="tdoubleDV" distanceUnits="degrees" storeSubFields="false"/>
|
||||
|
||||
|
@ -75,6 +78,7 @@
|
|||
<field name="stqpt_geohash" type="stqpt_geohash" multiValued="true"/>
|
||||
<field name="pointvector" type="pointvector"/>
|
||||
<field name="srptgeom" type="srptgeom"/>
|
||||
<field name="srptgeom_geo3d" type="srptgeom_geo3d"/>
|
||||
<field name="bbox" type="bbox"/>
|
||||
<field name="pbbox" type="pbbox"/>
|
||||
<field name="bbox_ndv" type="bbox_ndv"/>
|
||||
|
|
|
@ -32,5 +32,11 @@
|
|||
initialSize="0"
|
||||
autowarmCount="100%"
|
||||
regenerator="solr.NoOpRegenerator"/>
|
||||
<cache name="perSegSpatialFieldCache_srptgeom_geo3d"
|
||||
class="solr.LRUCache"
|
||||
size="3"
|
||||
initialSize="0"
|
||||
autowarmCount="100%"
|
||||
regenerator="solr.NoOpRegenerator"/>
|
||||
</query>
|
||||
</config>
|
|
@ -0,0 +1,22 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<schema name="minimal" version="1.1">
|
||||
<fieldType name="string" class="solr.StrField"/>
|
||||
<field name="id" type="string" indexed="true" stored="true" required="true" multiValued="false" />
|
||||
<uniquekey>id</uniquekey>
|
||||
</schema>
|
|
@ -0,0 +1,43 @@
|
|||
<?xml version="1.0" ?>
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<!-- Minimal solrconfig.xml with /select, /admin and /update only -->
|
||||
|
||||
<config>
|
||||
<dataDir>${solr.data.dir:}</dataDir>
|
||||
|
||||
<directoryFactory name="DirectoryFactory" class="solr.StandardDirectoryFactory"/>
|
||||
|
||||
<schemaFactory class="ClassicIndexSchemaFactory"/>
|
||||
|
||||
<luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
|
||||
|
||||
<updateHandler class="solr.DirectUpdateHandler2">
|
||||
<commitWithin>
|
||||
<softCommit>${solr.commitwithin.softcommit:true}</softCommit>
|
||||
</commitWithin>
|
||||
</updateHandler>
|
||||
|
||||
<requestHandler name="/select" class="solr.SearchHandler">
|
||||
<lst name="defaults">
|
||||
<str name="echoParams">explicit</str>
|
||||
<str name="indent">true</str>
|
||||
</lst>
|
||||
</requestHandler>
|
||||
</config>
|
|
@ -0,0 +1,96 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.solr.backcompat;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.lucene.index.TestBackwardsCompatibility;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.params.CommonParams;
|
||||
import org.apache.solr.util.TestHarness;
|
||||
import org.junit.Test;
|
||||
|
||||
/** Verify we can read/write previous versions' Lucene indexes. */
|
||||
public class TestLuceneIndexBackCompat extends SolrTestCaseJ4 {
|
||||
private static final String[] oldNames = TestBackwardsCompatibility.getOldNames();
|
||||
private static final String[] oldSingleSegmentNames = TestBackwardsCompatibility.getOldSingleSegmentNames();
|
||||
|
||||
@Test
|
||||
public void testOldIndexes() throws Exception {
|
||||
List<String> names = new ArrayList<>(oldNames.length + oldSingleSegmentNames.length);
|
||||
names.addAll(Arrays.asList(oldNames));
|
||||
names.addAll(Arrays.asList(oldSingleSegmentNames));
|
||||
for (String name : names) {
|
||||
setupCore(name);
|
||||
|
||||
assertQ(req("q", "*:*", "rows", "0"), "//result[@numFound='35']");
|
||||
|
||||
assertU(adoc("id", "id_123456789"));
|
||||
assertU(commit());
|
||||
|
||||
deleteCore();
|
||||
}
|
||||
}
|
||||
|
||||
private void setupCore(String coreName) throws Exception {
|
||||
if (h != null) {
|
||||
h.close();
|
||||
}
|
||||
Path solrHome = createTempDir(coreName).toAbsolutePath();
|
||||
Files.createDirectories(solrHome);
|
||||
Path coreDir = solrHome.resolve(coreName);
|
||||
Path confDir = coreDir.resolve("conf");
|
||||
Files.createDirectories(confDir);
|
||||
Path dataDir = coreDir.resolve("data");
|
||||
Path indexDir = dataDir.resolve("index");
|
||||
Files.createDirectories(indexDir);
|
||||
|
||||
Files.copy(getFile("solr/solr.xml").toPath(), solrHome.resolve("solr.xml"));
|
||||
FileUtils.copyDirectory(configset("backcompat").toFile(), confDir.toFile());
|
||||
|
||||
try (Writer writer = new OutputStreamWriter(Files.newOutputStream(coreDir.resolve("core.properties")), StandardCharsets.UTF_8)) {
|
||||
Properties coreProps = new Properties();
|
||||
coreProps.put("name", coreName);
|
||||
coreProps.store(writer, null);
|
||||
}
|
||||
|
||||
InputStream resource = TestBackwardsCompatibility.class.getResourceAsStream("index." + coreName + ".zip");
|
||||
assertNotNull("Index name " + coreName + " not found", resource);
|
||||
TestUtil.unzip(resource, indexDir);
|
||||
|
||||
configString = "solrconfig.xml";
|
||||
schemaString = "schema.xml";
|
||||
testSolrHome = solrHome;
|
||||
System.setProperty("solr.solr.home", solrHome.toString());
|
||||
ignoreException("ignore_exception");
|
||||
solrConfig = TestHarness.createConfig(testSolrHome, coreName, getSolrConfigFile());
|
||||
h = new TestHarness(coreName, dataDir.toString(), solrConfig, getSchemaFile());
|
||||
lrf = h.getRequestFactory("",0,20, CommonParams.VERSION,"2.2");
|
||||
}
|
||||
}
|
|
@ -21,6 +21,7 @@ import java.util.HashMap;
|
|||
import java.util.Map;
|
||||
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.common.cloud.DistributedQueue;
|
||||
import org.apache.solr.client.solrj.impl.CloudSolrClient;
|
||||
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.CoreStatus;
|
||||
|
@ -33,7 +34,6 @@ import org.apache.solr.common.cloud.ZkNodeProps;
|
|||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.apache.solr.common.util.Utils;
|
||||
import org.apache.solr.util.FileUtils;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -83,7 +83,7 @@ public class DeleteShardTest extends SolrCloudTestCase {
|
|||
}
|
||||
|
||||
protected void setSliceState(String collection, String slice, State state) throws SolrServerException, IOException,
|
||||
KeeperException, InterruptedException {
|
||||
Exception {
|
||||
|
||||
CloudSolrClient client = cluster.getSolrClient();
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ import java.util.concurrent.TimeoutException;
|
|||
import java.util.function.Predicate;
|
||||
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.cloud.DistributedQueue;
|
||||
import org.apache.solr.common.cloud.SolrZkClient;
|
||||
import org.apache.solr.common.util.ExecutorUtil;
|
||||
import org.apache.solr.common.util.SolrjNamedThreadFactory;
|
||||
|
@ -95,7 +96,7 @@ public class DistributedQueueTest extends SolrTestCaseJ4 {
|
|||
String dqZNode = "/distqueue/test";
|
||||
byte[] data = "hello world".getBytes(UTF8);
|
||||
|
||||
DistributedQueue consumer = makeDistributedQueue(dqZNode);
|
||||
ZkDistributedQueue consumer = makeDistributedQueue(dqZNode);
|
||||
DistributedQueue producer = makeDistributedQueue(dqZNode);
|
||||
DistributedQueue producer2 = makeDistributedQueue(dqZNode);
|
||||
|
||||
|
@ -124,7 +125,7 @@ public class DistributedQueueTest extends SolrTestCaseJ4 {
|
|||
String dqZNode = "/distqueue/test";
|
||||
String testData = "hello world";
|
||||
|
||||
DistributedQueue dq = makeDistributedQueue(dqZNode);
|
||||
ZkDistributedQueue dq = makeDistributedQueue(dqZNode);
|
||||
|
||||
assertNull(dq.peek());
|
||||
Future<String> future = executor.submit(() -> new String(dq.peek(true), UTF8));
|
||||
|
@ -171,7 +172,7 @@ public class DistributedQueueTest extends SolrTestCaseJ4 {
|
|||
@Test
|
||||
public void testLeakChildWatcher() throws Exception {
|
||||
String dqZNode = "/distqueue/test";
|
||||
DistributedQueue dq = makeDistributedQueue(dqZNode);
|
||||
ZkDistributedQueue dq = makeDistributedQueue(dqZNode);
|
||||
assertTrue(dq.peekElements(1, 1, s1 -> true).isEmpty());
|
||||
assertEquals(1, dq.watcherCount());
|
||||
assertFalse(dq.isDirty());
|
||||
|
@ -207,7 +208,7 @@ public class DistributedQueueTest extends SolrTestCaseJ4 {
|
|||
@Test
|
||||
public void testLocallyOffer() throws Exception {
|
||||
String dqZNode = "/distqueue/test";
|
||||
DistributedQueue dq = makeDistributedQueue(dqZNode);
|
||||
ZkDistributedQueue dq = makeDistributedQueue(dqZNode);
|
||||
dq.peekElements(1, 1, s -> true);
|
||||
for (int i = 0; i < 100; i++) {
|
||||
byte[] data = String.valueOf(i).getBytes(UTF8);
|
||||
|
@ -224,7 +225,7 @@ public class DistributedQueueTest extends SolrTestCaseJ4 {
|
|||
String dqZNode = "/distqueue/test";
|
||||
byte[] data = "hello world".getBytes(UTF8);
|
||||
|
||||
DistributedQueue dq = makeDistributedQueue(dqZNode);
|
||||
ZkDistributedQueue dq = makeDistributedQueue(dqZNode);
|
||||
|
||||
// Populate with data.
|
||||
dq.offer(data);
|
||||
|
@ -280,8 +281,8 @@ public class DistributedQueueTest extends SolrTestCaseJ4 {
|
|||
assertFalse(sessionId == zkClient.getSolrZooKeeper().getSessionId());
|
||||
}
|
||||
|
||||
protected DistributedQueue makeDistributedQueue(String dqZNode) throws Exception {
|
||||
return new DistributedQueue(zkClient, setupNewDistributedQueueZNode(dqZNode));
|
||||
protected ZkDistributedQueue makeDistributedQueue(String dqZNode) throws Exception {
|
||||
return new ZkDistributedQueue(zkClient, setupNewDistributedQueueZNode(dqZNode));
|
||||
}
|
||||
|
||||
private static class QueueChangerThread extends Thread {
|
||||
|
|
|
@ -204,7 +204,7 @@ public class ForceLeaderTest extends HttpPartitionTest {
|
|||
}
|
||||
|
||||
protected void unsetLeader(String collection, String slice) throws Exception {
|
||||
DistributedQueue inQueue = Overseer.getStateUpdateQueue(cloudClient.getZkStateReader().getZkClient());
|
||||
ZkDistributedQueue inQueue = Overseer.getStateUpdateQueue(cloudClient.getZkStateReader().getZkClient());
|
||||
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
||||
|
||||
ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.LEADER.toLower(),
|
||||
|
@ -232,7 +232,7 @@ public class ForceLeaderTest extends HttpPartitionTest {
|
|||
|
||||
protected void setReplicaState(String collection, String slice, Replica replica, Replica.State state) throws SolrServerException, IOException,
|
||||
KeeperException, InterruptedException {
|
||||
DistributedQueue inQueue = Overseer.getStateUpdateQueue(cloudClient.getZkStateReader().getZkClient());
|
||||
ZkDistributedQueue inQueue = Overseer.getStateUpdateQueue(cloudClient.getZkStateReader().getZkClient());
|
||||
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
|
||||
|
||||
String baseUrl = zkStateReader.getBaseUrlForNodeName(replica.getNodeName());
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.util.Random;
|
|||
import org.apache.solr.client.solrj.SolrClient;
|
||||
import org.apache.solr.client.solrj.SolrRequest;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.common.cloud.DistributedQueue;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrClient;
|
||||
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
|
||||
import org.apache.solr.client.solrj.request.CollectionAdminRequest.Create;
|
||||
|
@ -68,7 +69,7 @@ public class MultiThreadedOCPTest extends AbstractFullDistribZkTestBase {
|
|||
|
||||
private void testFillWorkQueue() throws Exception {
|
||||
try (SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) {
|
||||
DistributedQueue distributedQueue = new DistributedQueue(cloudClient.getZkStateReader().getZkClient(),
|
||||
DistributedQueue distributedQueue = new ZkDistributedQueue(cloudClient.getZkStateReader().getZkClient(),
|
||||
"/overseer/collection-queue-work", new Overseer.Stats());
|
||||
//fill the work queue with blocked tasks by adding more than the no:of parallel tasks
|
||||
for (int i = 0; i < MAX_PARALLEL_TASKS+5; i++) {
|
||||
|
@ -149,7 +150,7 @@ public class MultiThreadedOCPTest extends AbstractFullDistribZkTestBase {
|
|||
|
||||
private void testTaskExclusivity() throws Exception, SolrServerException {
|
||||
|
||||
DistributedQueue distributedQueue = new DistributedQueue(cloudClient.getZkStateReader().getZkClient(),
|
||||
DistributedQueue distributedQueue = new ZkDistributedQueue(cloudClient.getZkStateReader().getZkClient(),
|
||||
"/overseer/collection-queue-work", new Overseer.Stats());
|
||||
try (SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) {
|
||||
|
||||
|
|
|
@ -140,7 +140,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
ZkStateReader.REPLICATION_FACTOR, "1",
|
||||
ZkStateReader.NUM_SHARDS_PROP, numShards+"",
|
||||
"createNodeSet", "");
|
||||
DistributedQueue q = Overseer.getStateUpdateQueue(zkClient);
|
||||
ZkDistributedQueue q = Overseer.getStateUpdateQueue(zkClient);
|
||||
q.offer(Utils.toJSON(m));
|
||||
|
||||
}
|
||||
|
@ -157,7 +157,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
ZkStateReader.CORE_NAME_PROP, coreName,
|
||||
ZkStateReader.CORE_NODE_NAME_PROP, coreNodeName,
|
||||
ZkStateReader.COLLECTION_PROP, collection);
|
||||
DistributedQueue q = Overseer.getStateUpdateQueue(zkClient);
|
||||
ZkDistributedQueue q = Overseer.getStateUpdateQueue(zkClient);
|
||||
q.offer(Utils.toJSON(m));
|
||||
return null;
|
||||
} else {
|
||||
|
@ -170,7 +170,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
ZkStateReader.SHARD_ID_PROP, shard,
|
||||
ZkStateReader.NUM_SHARDS_PROP, Integer.toString(numShards),
|
||||
ZkStateReader.BASE_URL_PROP, "http://" + nodeName + "/solr/");
|
||||
DistributedQueue q = Overseer.getStateUpdateQueue(zkClient);
|
||||
ZkDistributedQueue q = Overseer.getStateUpdateQueue(zkClient);
|
||||
q.offer(Utils.toJSON(m));
|
||||
}
|
||||
|
||||
|
@ -291,7 +291,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
ZkStateReader.REPLICATION_FACTOR, "1",
|
||||
ZkStateReader.NUM_SHARDS_PROP, "3",
|
||||
"createNodeSet", "");
|
||||
DistributedQueue q = Overseer.getStateUpdateQueue(zkClient);
|
||||
ZkDistributedQueue q = Overseer.getStateUpdateQueue(zkClient);
|
||||
q.offer(Utils.toJSON(m));
|
||||
|
||||
for (int i = 0; i < numShards; i++) {
|
||||
|
@ -430,7 +430,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
|
||||
overseerClient = electNewOverseer(server.getZkAddress());
|
||||
|
||||
DistributedQueue q = Overseer.getStateUpdateQueue(zkClient);
|
||||
ZkDistributedQueue q = Overseer.getStateUpdateQueue(zkClient);
|
||||
|
||||
ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(),
|
||||
"name", COLLECTION,
|
||||
|
@ -849,7 +849,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
ZkStateReader.REPLICATION_FACTOR, "1",
|
||||
ZkStateReader.MAX_SHARDS_PER_NODE, "1"
|
||||
);
|
||||
DistributedQueue q = Overseer.getStateUpdateQueue(controllerClient);
|
||||
ZkDistributedQueue q = Overseer.getStateUpdateQueue(controllerClient);
|
||||
q.offer(Utils.toJSON(m));
|
||||
controllerClient.makePath("/collections/perf" + i, true);
|
||||
}
|
||||
|
@ -864,7 +864,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
ZkStateReader.NUM_SHARDS_PROP, "1",
|
||||
ZkStateReader.BASE_URL_PROP, "http://" + "node1"
|
||||
+ "/solr/");
|
||||
DistributedQueue q = Overseer.getStateUpdateQueue(controllerClient);
|
||||
ZkDistributedQueue q = Overseer.getStateUpdateQueue(controllerClient);
|
||||
q.offer(Utils.toJSON(m));
|
||||
if (j >= MAX_COLLECTIONS - 1) j = 0;
|
||||
if (k >= MAX_CORES - 1) k = 0;
|
||||
|
@ -881,7 +881,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
ZkStateReader.NUM_SHARDS_PROP, "1",
|
||||
ZkStateReader.BASE_URL_PROP, "http://" + "node1"
|
||||
+ "/solr/");
|
||||
DistributedQueue q = Overseer.getStateUpdateQueue(controllerClient);
|
||||
ZkDistributedQueue q = Overseer.getStateUpdateQueue(controllerClient);
|
||||
q.offer(Utils.toJSON(m));
|
||||
|
||||
Timer t = new Timer();
|
||||
|
@ -971,7 +971,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
reader = new ZkStateReader(zkClient);
|
||||
reader.createClusterStateWatchersAndUpdate();
|
||||
//prepopulate work queue with some items to emulate previous overseer died before persisting state
|
||||
DistributedQueue queue = Overseer.getInternalWorkQueue(zkClient, new Overseer.Stats());
|
||||
ZkDistributedQueue queue = Overseer.getInternalWorkQueue(zkClient, new Overseer.Stats());
|
||||
|
||||
ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(),
|
||||
"name", COLLECTION,
|
||||
|
@ -1053,7 +1053,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
|
||||
overseerClient = electNewOverseer(server.getZkAddress());
|
||||
|
||||
DistributedQueue q = Overseer.getStateUpdateQueue(zkClient);
|
||||
ZkDistributedQueue q = Overseer.getStateUpdateQueue(zkClient);
|
||||
|
||||
|
||||
ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, CollectionParams.CollectionAction.CREATE.toLower(),
|
||||
|
@ -1229,7 +1229,7 @@ public class OverseerTest extends SolrTestCaseJ4 {
|
|||
|
||||
overseerClient = electNewOverseer(server.getZkAddress());
|
||||
|
||||
DistributedQueue q = Overseer.getStateUpdateQueue(zkClient);
|
||||
ZkDistributedQueue q = Overseer.getStateUpdateQueue(zkClient);
|
||||
|
||||
// create collection
|
||||
{
|
||||
|
|
|
@ -88,7 +88,7 @@ public class TestCloudJSONFacetJoinDomain extends SolrCloudTestCase {
|
|||
@BeforeClass
|
||||
private static void createMiniSolrCloudCluster() throws Exception {
|
||||
// sanity check constants
|
||||
assertTrue("bad test constants: must have UNIQUE_FIELD_VALS < FACET_LIMIT since refinement not currently supported",
|
||||
assertTrue("bad test constants: must have UNIQUE_FIELD_VALS < FACET_LIMIT to get accurate counts without refinements",
|
||||
UNIQUE_FIELD_VALS < FACET_LIMIT);
|
||||
assertTrue("bad test constants: some suffixes will never be tested",
|
||||
(STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) && (INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM));
|
||||
|
@ -135,6 +135,12 @@ public class TestCloudJSONFacetJoinDomain extends SolrCloudTestCase {
|
|||
}
|
||||
}
|
||||
CLOUD_CLIENT.add(doc);
|
||||
if (random().nextInt(100) < 1) {
|
||||
CLOUD_CLIENT.commit(); // commit 1% of the time to create new segments
|
||||
}
|
||||
if (random().nextInt(100) < 5) {
|
||||
CLOUD_CLIENT.add(doc); // duplicate the doc 5% of the time to create deleted docs
|
||||
}
|
||||
}
|
||||
CLOUD_CLIENT.commit();
|
||||
}
|
||||
|
@ -299,7 +305,7 @@ public class TestCloudJSONFacetJoinDomain extends SolrCloudTestCase {
|
|||
|
||||
public void testRandom() throws Exception {
|
||||
|
||||
final int numIters = atLeast(3);
|
||||
final int numIters = atLeast(10);
|
||||
for (int iter = 0; iter < numIters; iter++) {
|
||||
assertFacetCountsAreCorrect(TermFacet.buildRandomFacets(), buildRandomQuery());
|
||||
}
|
||||
|
@ -449,8 +455,45 @@ public class TestCloudJSONFacetJoinDomain extends SolrCloudTestCase {
|
|||
* recursively generates the <code>json.facet</code> param value to use for testing this facet
|
||||
*/
|
||||
private CharSequence toJSONFacetParamValue() {
|
||||
// NOTE: since refinement isn't supported, we have to use the max cardinality of the field as limit
|
||||
StringBuilder sb = new StringBuilder("{ type:terms, field:" + field + ", limit: " + FACET_LIMIT);
|
||||
int limit = random().nextInt(FACET_LIMIT*2);
|
||||
String limitStr = ", limit:" + limit;
|
||||
if (limit >= FACET_LIMIT && random().nextBoolean()) {
|
||||
limitStr = ", limit:-1"; // unlimited
|
||||
} else if (limit == 10 && random().nextBoolean()) {
|
||||
limitStr=""; // don't specify limit since it's the default
|
||||
}
|
||||
|
||||
int overrequest = -1;
|
||||
switch(random().nextInt(10)) {
|
||||
case 0:
|
||||
case 1:
|
||||
case 2:
|
||||
case 3:
|
||||
overrequest = 0; // 40% of the time, no overrequest to better stress refinement
|
||||
break;
|
||||
case 4:
|
||||
case 5:
|
||||
overrequest = random().nextInt(FACET_LIMIT);
|
||||
break;
|
||||
case 6:
|
||||
overrequest = random().nextInt(Integer.MAX_VALUE);
|
||||
break;
|
||||
default: break;
|
||||
}
|
||||
String overrequestStr = overrequest==-1 ? "" : ", overrequest:"+overrequest;
|
||||
|
||||
boolean refine = (overrequest >= 0 && (long)limit + overrequest < FACET_LIMIT)
|
||||
|| (overrequest < 0 && limit < FACET_LIMIT) // don't assume how much overrequest we do by default, just check the limit
|
||||
|| random().nextInt(10)==0; // once in a while, turn on refinement even when it isn't needed.
|
||||
|
||||
// refine = false; // NOTE: Uncomment this line to see if refinement testing is adequate (should get fails occasionally)
|
||||
String refineStr=", refine:" + refine;
|
||||
if (!refine) {
|
||||
// if refine==false, don't specify it sometimes (it's the default)
|
||||
if (random().nextBoolean()) refineStr="";
|
||||
}
|
||||
|
||||
StringBuilder sb = new StringBuilder("{ type:terms, field:" + field + limitStr + overrequestStr + refineStr);
|
||||
if (! subFacets.isEmpty()) {
|
||||
sb.append(", facet:");
|
||||
sb.append(toJSONFacetParamValue(subFacets));
|
||||
|
|
|
@ -179,7 +179,7 @@ public class TestRandomRequestDistribution extends AbstractFullDistribZkTestBase
|
|||
ZkStateReader.STATE_PROP, Replica.State.DOWN.toString());
|
||||
|
||||
log.info("Forcing {} to go into 'down' state", notLeader.getStr(ZkStateReader.CORE_NAME_PROP));
|
||||
DistributedQueue q = Overseer.getStateUpdateQueue(cloudClient.getZkStateReader().getZkClient());
|
||||
ZkDistributedQueue q = Overseer.getStateUpdateQueue(cloudClient.getZkStateReader().getZkClient());
|
||||
q.offer(Utils.toJSON(m));
|
||||
|
||||
verifyReplicaStatus(cloudClient.getZkStateReader(), "football", "shard1", notLeader.getName(), Replica.State.DOWN);
|
||||
|
|
|
@ -0,0 +1,84 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.solr.cloud;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
|
||||
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
|
||||
import org.apache.solr.client.solrj.response.CollectionAdminResponse;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
public class TestShardsWithSingleReplica extends SolrCloudTestCase {
|
||||
|
||||
@BeforeClass
|
||||
public static void setupCluster() throws Exception {
|
||||
System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory");
|
||||
System.setProperty("solr.ulog.numRecordsToKeep", "1000");
|
||||
|
||||
configureCluster(3)
|
||||
.addConfig("config", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf"))
|
||||
.configure();
|
||||
}
|
||||
|
||||
public void testSkipLeaderOperations() throws Exception {
|
||||
String overseerLeader = getOverseerLeader();
|
||||
List<JettySolrRunner> notOverseerNodes = cluster.getJettySolrRunners()
|
||||
.stream()
|
||||
.filter(solrRunner -> !solrRunner.getNodeName().equals(overseerLeader))
|
||||
.collect(Collectors.toList());
|
||||
String collection = "collection1";
|
||||
CollectionAdminRequest
|
||||
.createCollection(collection, 2, 1)
|
||||
.setCreateNodeSet(notOverseerNodes
|
||||
.stream()
|
||||
.map(JettySolrRunner::getNodeName)
|
||||
.collect(Collectors.joining(","))
|
||||
)
|
||||
.process(cluster.getSolrClient());
|
||||
|
||||
for (JettySolrRunner solrRunner : notOverseerNodes) {
|
||||
cluster.stopJettySolrRunner(solrRunner);
|
||||
}
|
||||
waitForState("Expected empty liveNodes", collection,
|
||||
(liveNodes, collectionState) -> liveNodes.size() == 1);
|
||||
|
||||
CollectionAdminResponse resp = CollectionAdminRequest.getOverseerStatus().process(cluster.getSolrClient());
|
||||
for (JettySolrRunner solrRunner : notOverseerNodes) {
|
||||
cluster.startJettySolrRunner(solrRunner);
|
||||
}
|
||||
|
||||
waitForState("Expected 2x1 for collection: " + collection, collection,
|
||||
clusterShape(2, 1));
|
||||
CollectionAdminResponse resp2 = CollectionAdminRequest.getOverseerStatus().process(cluster.getSolrClient());
|
||||
assertEquals(getNumLeaderOpeations(resp), getNumLeaderOpeations(resp2));
|
||||
}
|
||||
|
||||
private int getNumLeaderOpeations(CollectionAdminResponse resp) {
|
||||
return (int) resp.getResponse().findRecursive("overseer_operations", "leader", "requests");
|
||||
}
|
||||
|
||||
private String getOverseerLeader() throws IOException, SolrServerException {
|
||||
CollectionAdminResponse resp = CollectionAdminRequest.getOverseerStatus().process(cluster.getSolrClient());
|
||||
return (String) resp.getResponse().get("leader");
|
||||
}
|
||||
|
||||
}
|
|
@ -22,6 +22,7 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
@ -29,6 +30,8 @@ import java.util.concurrent.Semaphore;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Repeat;
|
||||
import com.codahale.metrics.Meter;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.client.ClientProtocolException;
|
||||
import org.apache.http.client.HttpClient;
|
||||
|
@ -60,8 +63,6 @@ import org.apache.solr.common.util.NamedList;
|
|||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.update.DirectUpdateHandler2;
|
||||
import org.apache.solr.update.SolrIndexWriter;
|
||||
import org.apache.solr.update.UpdateHandler;
|
||||
import org.apache.solr.update.UpdateLog;
|
||||
import org.apache.solr.util.RefCounted;
|
||||
import org.apache.solr.util.TestInjection;
|
||||
import org.apache.solr.util.TimeOut;
|
||||
|
@ -71,8 +72,6 @@ import org.junit.BeforeClass;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Repeat;
|
||||
|
||||
@Slow
|
||||
public class TestTlogReplica extends SolrCloudTestCase {
|
||||
|
||||
|
@ -464,16 +463,13 @@ public class TestTlogReplica extends SolrCloudTestCase {
|
|||
.process(cloudClient, collectionName);
|
||||
|
||||
{
|
||||
UpdateHandler updateHandler = getSolrCore(true).get(0).getUpdateHandler();
|
||||
RefCounted<IndexWriter> iwRef = updateHandler.getSolrCoreState().getIndexWriter(null);
|
||||
assertTrue("IndexWriter at leader must see updates ", iwRef.get().hasUncommittedChanges());
|
||||
iwRef.decref();
|
||||
long docsPending = (long) getSolrCore(true).get(0).getMetricRegistry().getGauges().get("UPDATE.updateHandler.docsPending").getValue();
|
||||
assertEquals(4, docsPending);
|
||||
}
|
||||
|
||||
for (SolrCore solrCore : getSolrCore(false)) {
|
||||
RefCounted<IndexWriter> iwRef = solrCore.getUpdateHandler().getSolrCoreState().getIndexWriter(null);
|
||||
assertFalse("IndexWriter at replicas must not see updates ", iwRef.get().hasUncommittedChanges());
|
||||
iwRef.decref();
|
||||
long docsPending = (long) solrCore.getMetricRegistry().getGauges().get("UPDATE.updateHandler.docsPending").getValue();
|
||||
assertEquals(0, docsPending);
|
||||
}
|
||||
|
||||
checkRTG(1, 4, cluster.getJettySolrRunners());
|
||||
|
@ -486,16 +482,12 @@ public class TestTlogReplica extends SolrCloudTestCase {
|
|||
// The DBQ is not processed at replicas, so we still can get doc2 and other docs by RTG
|
||||
checkRTG(2,4, getSolrRunner(false));
|
||||
|
||||
Map<SolrCore, Long> timeCopyOverPerCores = getTimesCopyOverOldUpdates(getSolrCore(false));
|
||||
new UpdateRequest()
|
||||
.commit(cloudClient, collectionName);
|
||||
|
||||
waitForNumDocsInAllActiveReplicas(2);
|
||||
|
||||
// Update log roll over
|
||||
for (SolrCore solrCore : getSolrCore(false)) {
|
||||
UpdateLog updateLog = solrCore.getUpdateHandler().getUpdateLog();
|
||||
assertFalse(updateLog.hasUncommittedChanges());
|
||||
}
|
||||
assertCopyOverOldUpdates(1, timeCopyOverPerCores);
|
||||
|
||||
// UpdateLog copy over old updates
|
||||
for (int i = 15; i <= 150; i++) {
|
||||
|
@ -506,6 +498,7 @@ public class TestTlogReplica extends SolrCloudTestCase {
|
|||
}
|
||||
checkRTG(120,150, cluster.getJettySolrRunners());
|
||||
waitForReplicasCatchUp(20);
|
||||
assertCopyOverOldUpdates(2, timeCopyOverPerCores);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -535,7 +528,7 @@ public class TestTlogReplica extends SolrCloudTestCase {
|
|||
waitForState("Replica didn't recover", collectionName, activeReplicaCount(0,2,0));
|
||||
// We skip peerSync, so replica will always trigger commit on leader
|
||||
// We query only the non-leader replicas, since we haven't opened a new searcher on the leader yet
|
||||
waitForNumDocsInAllReplicas(4, getNonLeaderReplias(collectionName), 0);// Should be immediate
|
||||
waitForNumDocsInAllReplicas(4, getNonLeaderReplias(collectionName), 10); //timeout for stale collection state
|
||||
|
||||
// If I add the doc immediately, the leader fails to communicate with the follower with broken pipe.
|
||||
// Options are, wait or retry...
|
||||
|
@ -556,8 +549,8 @@ public class TestTlogReplica extends SolrCloudTestCase {
|
|||
DirectUpdateHandler2.commitOnClose = true;
|
||||
ChaosMonkey.start(solrRunner);
|
||||
waitForState("Replica didn't recover", collectionName, activeReplicaCount(0,2,0));
|
||||
waitForNumDocsInAllReplicas(5, getNonLeaderReplias(collectionName), 10); //timeout for stale collection state
|
||||
checkRTG(3,7, cluster.getJettySolrRunners());
|
||||
waitForNumDocsInAllReplicas(5, getNonLeaderReplias(collectionName), 0);// Should be immediate
|
||||
cluster.getSolrClient().commit(collectionName);
|
||||
|
||||
// Test replica recovery apply buffer updates
|
||||
|
@ -910,4 +903,23 @@ public class TestTlogReplica extends SolrCloudTestCase {
|
|||
fail("Some replicas are not in sync with leader");
|
||||
|
||||
}
|
||||
|
||||
private void assertCopyOverOldUpdates(long delta, Map<SolrCore, Long> timesPerCore) {
|
||||
for (SolrCore core : timesPerCore.keySet()) {
|
||||
assertEquals(timesPerCore.get(core) + delta, getTimesCopyOverOldUpdates(core));
|
||||
}
|
||||
}
|
||||
|
||||
private Map<SolrCore, Long> getTimesCopyOverOldUpdates(List<SolrCore> cores) {
|
||||
Map<SolrCore, Long> timesPerCore = new HashMap<>();
|
||||
for (SolrCore core : cores) {
|
||||
long times = getTimesCopyOverOldUpdates(core);
|
||||
timesPerCore.put(core, times);
|
||||
}
|
||||
return timesPerCore;
|
||||
}
|
||||
|
||||
private long getTimesCopyOverOldUpdates(SolrCore core) {
|
||||
return ((Meter)core.getMetricRegistry().getMetrics().get("TLOG.copyOverOldUpdates.ops")).getCount();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -256,6 +256,9 @@ public class TestUnifiedSolrHighlighter extends SolrTestCaseJ4 {
|
|||
req("q", "text:document", "sort", "id asc", "hl", "true", "hl.bs.type", "SEPARATOR","hl.bs.separator","#","hl.fragsize", "-1"),
|
||||
"//lst[@name='highlighting']/lst[@name='104']/arr[@name='text']/str='While the other <em>document</em> contains the same #'");
|
||||
|
||||
assertQ("CUSTOM breakiterator with fragsize 70",
|
||||
req("q", "text:document", "sort", "id asc", "hl", "true", "hl.bs.type", "SEPARATOR","hl.bs.separator","#","hl.fragsize", "70"),
|
||||
"//lst[@name='highlighting']/lst[@name='103']/arr[@name='text']/str='This <em>document</em> contains # special characters, while the other <em>document</em> contains the same #'");
|
||||
}
|
||||
|
||||
public void testFragsize() {
|
||||
|
|
|
@ -76,16 +76,16 @@ public class SpatialRPTFieldTypeTest extends AbstractBadConfigTestBase {
|
|||
assertU(commit());
|
||||
String q;
|
||||
|
||||
q = "geo:{!geofilt score=distance filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}";
|
||||
q = "geo:{!geofilt score=distance filter=false sfield=geo pt="+QUERY_COORDINATES+" d=180}";
|
||||
assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_DEGREES+"']");
|
||||
|
||||
q = "geo:{!geofilt score=degrees filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}";
|
||||
q = "geo:{!geofilt score=degrees filter=false sfield=geo pt="+QUERY_COORDINATES+" d=180}";
|
||||
assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_DEGREES+"']");
|
||||
|
||||
q = "geo:{!geofilt score=kilometers filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}";
|
||||
q = "geo:{!geofilt score=kilometers filter=false sfield=geo pt="+QUERY_COORDINATES+" d=180}";
|
||||
assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_KILOMETERS+"']");
|
||||
|
||||
q = "geo:{!geofilt score=miles filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}";
|
||||
q = "geo:{!geofilt score=miles filter=false sfield=geo pt="+QUERY_COORDINATES+" d=180}";
|
||||
assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_MILES+"']");
|
||||
}
|
||||
|
||||
|
@ -264,6 +264,10 @@ public class SpatialRPTFieldTypeTest extends AbstractBadConfigTestBase {
|
|||
if(format!=null) {
|
||||
rptMap.put("format", format);
|
||||
}
|
||||
if (random().nextBoolean()) {
|
||||
// use Geo3D sometimes
|
||||
rptMap.put("spatialContextFactory", "Geo3D");
|
||||
}
|
||||
fieldType.init(oldSchema, rptMap);
|
||||
fieldType.setTypeName("location_rpt");
|
||||
SchemaField newField = new SchemaField("geo", fieldType, SchemaField.STORED | SchemaField.INDEXED, null);
|
||||
|
|
|
@ -103,7 +103,22 @@ public class TestSolr4Spatial2 extends SolrTestCaseJ4 {
|
|||
|
||||
@Test
|
||||
public void testRptWithGeometryField() throws Exception {
|
||||
String fieldName = "srptgeom"; //note: fails with "srpt_geohash" because it's not as precise
|
||||
testRptWithGeometryField("srptgeom");//note: fails with "srpt_geohash" because it's not as precise
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRptWithGeometryGeo3dField() throws Exception {
|
||||
String fieldName = "srptgeom_geo3d";
|
||||
testRptWithGeometryField(fieldName);
|
||||
|
||||
// show off that Geo3D supports polygons
|
||||
String polygonWKT = "POLYGON((-11 12, 10.5 12, -11 11, -11 12))"; //right-angle triangle
|
||||
assertJQ(req(
|
||||
"q", "{!cache=false field f=" + fieldName + "}Intersects(" + polygonWKT + ")",
|
||||
"sort", "id asc"), "/response/numFound==2");
|
||||
}
|
||||
|
||||
private void testRptWithGeometryField(String fieldName) throws Exception {
|
||||
assertU(adoc("id", "0", fieldName, "ENVELOPE(-10, 20, 15, 10)"));
|
||||
assertU(adoc("id", "1", fieldName, "BUFFER(POINT(-10 15), 5)"));//circle at top-left corner
|
||||
assertU(optimize());// one segment.
|
||||
|
@ -118,7 +133,7 @@ public class TestSolr4Spatial2 extends SolrTestCaseJ4 {
|
|||
|
||||
// The tricky thing is verifying the cache works correctly...
|
||||
|
||||
MetricsMap cacheMetrics = (MetricsMap) h.getCore().getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.perSegSpatialFieldCache_srptgeom");
|
||||
MetricsMap cacheMetrics = (MetricsMap) h.getCore().getCoreMetricManager().getRegistry().getMetrics().get("CACHE.searcher.perSegSpatialFieldCache_" + fieldName);
|
||||
assertEquals("1", cacheMetrics.getValue().get("cumulative_inserts").toString());
|
||||
assertEquals("0", cacheMetrics.getValue().get("cumulative_hits").toString());
|
||||
|
||||
|
@ -140,7 +155,7 @@ public class TestSolr4Spatial2 extends SolrTestCaseJ4 {
|
|||
assertJQ(sameReq, "/response/numFound==1", "/response/docs/[0]/id=='1'");
|
||||
|
||||
// When there are new segments, we accumulate another hit. This tests the cache was not blown away on commit.
|
||||
// Checking equality for the first reader's cache key indicates wether the cache should still be valid.
|
||||
// Checking equality for the first reader's cache key indicates whether the cache should still be valid.
|
||||
Object leafKey2 = getFirstLeafReaderKey();
|
||||
assertEquals(leafKey1.equals(leafKey2) ? "2" : "1", cacheMetrics.getValue().get("cumulative_hits").toString());
|
||||
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
a7068544963ed46839c8352eddd87271fa93b967
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue