mirror of https://github.com/apache/lucene.git
Merge remote-tracking branch 'origin/master'
This commit is contained in:
commit
c04f0cb603
|
@ -110,6 +110,11 @@ Changes in Runtime Behavior
|
|||
and codec components are no longer allowed to use this extension
|
||||
(Robert Muir, Mike McCandless)
|
||||
|
||||
* LUCENE-6835: Directory.listAll now returns entries in sorted order,
|
||||
to not leak platform-specific behavior, and "retrying file deletion"
|
||||
is now the responsibility of Directory.deleteFile, not the caller.
|
||||
(Robert Muir, Mike McCandless)
|
||||
|
||||
Tests
|
||||
|
||||
* LUCENE-7009: Add expectThrows utility to LuceneTestCase. This uses a lambda
|
||||
|
@ -181,6 +186,10 @@ API Changes
|
|||
|
||||
Optimizations
|
||||
|
||||
* LUCENE-6930: Decouple GeoPointField from NumericType by using a custom
|
||||
and efficient GeoPointTokenStream and TermEnum designed for GeoPoint prefix
|
||||
terms. (Nick Knize)
|
||||
|
||||
* LUCENE-6951: Improve GeoPointInPolygonQuery using point orientation based
|
||||
line crossing algorithm, and adding result for multi-value docs when least
|
||||
1 point satisfies polygon criteria. (Nick Knize)
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.CharsRef;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
/** Tests that > 64k affixes actually works and doesnt overflow some internal int */
|
||||
public class Test64kAffixes extends LuceneTestCase {
|
||||
|
@ -54,9 +55,6 @@ public class Test64kAffixes extends LuceneTestCase {
|
|||
dictWriter.close();
|
||||
|
||||
try (InputStream affStream = Files.newInputStream(affix); InputStream dictStream = Files.newInputStream(dict); Directory tempDir2 = newDirectory()) {
|
||||
if (tempDir2 instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper) tempDir2).setEnableVirusScanner(false);
|
||||
}
|
||||
Dictionary dictionary = new Dictionary(tempDir2, "dictionary", affStream, dictStream);
|
||||
Stemmer stemmer = new Stemmer(dictionary);
|
||||
// drinks should still stem to drink
|
||||
|
|
|
@ -215,10 +215,6 @@ public class TestAllDictionaries extends LuceneTestCase {
|
|||
}
|
||||
|
||||
private Directory getDirectory() {
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper) dir).setEnableVirusScanner(false);
|
||||
}
|
||||
return dir;
|
||||
return newDirectory();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -189,9 +189,6 @@ public class TestAllDictionaries2 extends LuceneTestCase {
|
|||
try (InputStream dictionary = Files.newInputStream(dicEntry);
|
||||
InputStream affix = Files.newInputStream(affEntry);
|
||||
Directory tempDir = newDirectory()) {
|
||||
if (tempDir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper) tempDir).setEnableVirusScanner(false);
|
||||
}
|
||||
Dictionary dic = new Dictionary(tempDir, "dictionary", affix, dictionary);
|
||||
System.out.println(tests[i] + "\t" + RamUsageTester.humanSizeOf(dic) + "\t(" +
|
||||
"words=" + RamUsageTester.humanSizeOf(dic.words) + ", " +
|
||||
|
@ -226,9 +223,6 @@ public class TestAllDictionaries2 extends LuceneTestCase {
|
|||
try (InputStream dictionary = Files.newInputStream(dicEntry);
|
||||
InputStream affix = Files.newInputStream(affEntry);
|
||||
Directory tempDir = newDirectory()) {
|
||||
if (tempDir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper) tempDir).setEnableVirusScanner(false);
|
||||
}
|
||||
new Dictionary(tempDir, "dictionary", affix, dictionary);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -260,10 +260,6 @@ public class TestDictionary extends LuceneTestCase {
|
|||
}
|
||||
|
||||
private Directory getDirectory() {
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper) dir).setEnableVirusScanner(false);
|
||||
}
|
||||
return dir;
|
||||
return newDirectory();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -129,10 +129,6 @@ public class TestHunspellStemFilter extends BaseTokenStreamTestCase {
|
|||
}
|
||||
|
||||
private static Directory getDirectory() {
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper) dir).setEnableVirusScanner(false);
|
||||
}
|
||||
return dir;
|
||||
return newDirectory();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -57,25 +57,21 @@ public class TestFilesystemResourceLoader extends LuceneTestCase {
|
|||
|
||||
public void testBaseDir() throws Exception {
|
||||
final Path base = createTempDir("fsResourceLoaderBase");
|
||||
Writer os = Files.newBufferedWriter(base.resolve("template.txt"), StandardCharsets.UTF_8);
|
||||
try {
|
||||
Writer os = Files.newBufferedWriter(base.resolve("template.txt"), StandardCharsets.UTF_8);
|
||||
try {
|
||||
os.write("foobar\n");
|
||||
} finally {
|
||||
IOUtils.closeWhileHandlingException(os);
|
||||
}
|
||||
|
||||
ResourceLoader rl = new FilesystemResourceLoader(base);
|
||||
assertEquals("foobar", WordlistLoader.getLines(rl.openResource("template.txt"), StandardCharsets.UTF_8).get(0));
|
||||
// Same with full path name:
|
||||
String fullPath = base.resolve("template.txt").toAbsolutePath().toString();
|
||||
assertEquals("foobar",
|
||||
WordlistLoader.getLines(rl.openResource(fullPath), StandardCharsets.UTF_8).get(0));
|
||||
assertClasspathDelegation(rl);
|
||||
assertNotFound(rl);
|
||||
os.write("foobar\n");
|
||||
} finally {
|
||||
IOUtils.rm(base);
|
||||
IOUtils.closeWhileHandlingException(os);
|
||||
}
|
||||
|
||||
ResourceLoader rl = new FilesystemResourceLoader(base);
|
||||
assertEquals("foobar", WordlistLoader.getLines(rl.openResource("template.txt"), StandardCharsets.UTF_8).get(0));
|
||||
// Same with full path name:
|
||||
String fullPath = base.resolve("template.txt").toAbsolutePath().toString();
|
||||
assertEquals("foobar",
|
||||
WordlistLoader.getLines(rl.openResource(fullPath), StandardCharsets.UTF_8).get(0));
|
||||
assertClasspathDelegation(rl);
|
||||
assertNotFound(rl);
|
||||
}
|
||||
|
||||
public void testDelegation() throws Exception {
|
||||
|
|
|
@ -60,7 +60,6 @@ import org.apache.lucene.search.TermQuery;
|
|||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.NIOFSDirectory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.SimpleFSDirectory;
|
||||
|
@ -584,7 +583,6 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
checker.close();
|
||||
|
||||
dir.close();
|
||||
IOUtils.rm(oldIndexDir);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1238,12 +1236,6 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
System.out.println("testUpgradeOldSingleSegmentIndexWithAdditions: index=" +name);
|
||||
}
|
||||
Directory dir = newDirectory(oldIndexDirs.get(name));
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// we need to ensure we delete old commits for this test,
|
||||
// otherwise IndexUpgrader gets angry
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
assertEquals("Original index must be single segment", 1, getNumberOfSegments(dir));
|
||||
|
||||
// create a bunch of dummy segments
|
||||
|
|
|
@ -439,8 +439,6 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory());
|
||||
assertEquals(numLines + " lines were created but " + ir.numDocs() + " docs are in the index", numLines, ir.numDocs());
|
||||
ir.close();
|
||||
|
||||
Files.delete(lineFile);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -30,8 +30,6 @@ import java.nio.file.Path;
|
|||
|
||||
import org.apache.commons.compress.compressors.CompressorStreamFactory;
|
||||
import org.apache.lucene.benchmark.BenchmarkTestCase;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -135,16 +133,6 @@ public class StreamUtilsTest extends BenchmarkTestCase {
|
|||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
testDir = getWorkDir().resolve("ContentSourceTest");
|
||||
IOUtils.rm(testDir);
|
||||
Files.createDirectory(testDir);
|
||||
testDir = createTempDir("ContentSourceTest");
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
IOUtils.rm(testDir);
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -17,6 +17,13 @@
|
|||
package org.apache.lucene.codecs.lucene50;
|
||||
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.IndexFileNames;
|
||||
|
@ -29,13 +36,6 @@ import org.apache.lucene.store.IndexOutput;
|
|||
import org.apache.lucene.store.Lock;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Class for accessing a compound stream.
|
||||
* This class implements a directory, but is limited to only read operations.
|
||||
|
|
|
@ -79,11 +79,6 @@ import org.apache.lucene.util.InfoStream;
|
|||
|
||||
final class IndexFileDeleter implements Closeable {
|
||||
|
||||
/* Files that we tried to delete but failed (likely
|
||||
* because they are open and we are running on Windows),
|
||||
* so we will retry them again later: */
|
||||
private final Set<String> deletable = new HashSet<>();
|
||||
|
||||
/* Reference count for all files in the index.
|
||||
* Counts how many existing commits reference a file.
|
||||
**/
|
||||
|
@ -220,6 +215,7 @@ final class IndexFileDeleter implements Closeable {
|
|||
// Now delete anything with ref count at 0. These are
|
||||
// presumably abandoned files eg due to crash of
|
||||
// IndexWriter.
|
||||
Set<String> toDelete = new HashSet<>();
|
||||
for(Map.Entry<String, RefCount> entry : refCounts.entrySet() ) {
|
||||
RefCount rc = entry.getValue();
|
||||
final String fileName = entry.getKey();
|
||||
|
@ -231,10 +227,12 @@ final class IndexFileDeleter implements Closeable {
|
|||
if (infoStream.isEnabled("IFD")) {
|
||||
infoStream.message("IFD", "init: removing unreferenced file \"" + fileName + "\"");
|
||||
}
|
||||
deleteFile(fileName);
|
||||
toDelete.add(fileName);
|
||||
}
|
||||
}
|
||||
|
||||
deleteFiles(toDelete);
|
||||
|
||||
// Finally, give policy a chance to remove things on
|
||||
// startup:
|
||||
policy.onInit(commits);
|
||||
|
@ -425,7 +423,7 @@ final class IndexFileDeleter implements Closeable {
|
|||
*/
|
||||
void refresh() throws IOException {
|
||||
assert locked();
|
||||
deletable.clear();
|
||||
Set<String> toDelete = new HashSet<>();
|
||||
|
||||
String[] files = directory.listAll();
|
||||
|
||||
|
@ -445,15 +443,15 @@ final class IndexFileDeleter implements Closeable {
|
|||
if (infoStream.isEnabled("IFD")) {
|
||||
infoStream.message("IFD", "refresh: removing newly created unreferenced file \"" + fileName + "\"");
|
||||
}
|
||||
deletable.add(fileName);
|
||||
toDelete.add(fileName);
|
||||
}
|
||||
}
|
||||
|
||||
deletePendingFiles();
|
||||
deleteFiles(toDelete);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
public void close() throws IOException {
|
||||
// DecRef old files from the last checkpoint, if any:
|
||||
assert locked();
|
||||
|
||||
|
@ -464,8 +462,6 @@ final class IndexFileDeleter implements Closeable {
|
|||
lastFiles.clear();
|
||||
}
|
||||
}
|
||||
|
||||
deletePendingFiles();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -489,39 +485,6 @@ final class IndexFileDeleter implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
public void deletePendingFiles() {
|
||||
assert locked();
|
||||
|
||||
// Clone the set because it will change as we iterate:
|
||||
List<String> toDelete = new ArrayList<>(deletable);
|
||||
|
||||
// First pass: delete any segments_N files. We do these first to be certain stale commit points are removed
|
||||
// before we remove any files they reference. If any delete of segments_N fails, we leave all other files
|
||||
// undeleted so index is never in a corrupt state:
|
||||
for (String fileName : toDelete) {
|
||||
RefCount rc = refCounts.get(fileName);
|
||||
if (rc != null && rc.count > 0) {
|
||||
// LUCENE-5904: should never happen! This means we are about to pending-delete a referenced index file
|
||||
throw new IllegalStateException("file \"" + fileName + "\" is in pending delete set but has non-zero refCount=" + rc.count);
|
||||
} else if (fileName.startsWith(IndexFileNames.SEGMENTS)) {
|
||||
if (deleteFile(fileName) == false) {
|
||||
if (infoStream.isEnabled("IFD")) {
|
||||
infoStream.message("IFD", "failed to remove commit point \"" + fileName + "\"; skipping deletion of all other pending files");
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only delete other files if we were able to remove the segments_N files; this way we never
|
||||
// leave a corrupt commit in the index even in the presense of virus checkers:
|
||||
for(String fileName : toDelete) {
|
||||
if (fileName.startsWith(IndexFileNames.SEGMENTS) == false) {
|
||||
deleteFile(fileName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* For definition of "check point" see IndexWriter comments:
|
||||
* "Clarification: Check Points (and commits)".
|
||||
|
@ -610,12 +573,15 @@ final class IndexFileDeleter implements Closeable {
|
|||
}
|
||||
|
||||
/** Decrefs all provided files, even on exception; throws first exception hit, if any. */
|
||||
void decRef(Collection<String> files) {
|
||||
void decRef(Collection<String> files) throws IOException {
|
||||
assert locked();
|
||||
Set<String> toDelete = new HashSet<>();
|
||||
Throwable firstThrowable = null;
|
||||
for(final String file : files) {
|
||||
try {
|
||||
decRef(file);
|
||||
if (decRef(file)) {
|
||||
toDelete.add(file);
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
if (firstThrowable == null) {
|
||||
// Save first exception and throw it in the end, but be sure to finish decRef all files
|
||||
|
@ -625,7 +591,7 @@ final class IndexFileDeleter implements Closeable {
|
|||
}
|
||||
|
||||
try {
|
||||
deletePendingFiles();
|
||||
deleteFiles(toDelete);
|
||||
} catch (Throwable t) {
|
||||
if (firstThrowable == null) {
|
||||
// Save first exception and throw it in the end, but be sure to finish decRef all files
|
||||
|
@ -634,27 +600,31 @@ final class IndexFileDeleter implements Closeable {
|
|||
}
|
||||
|
||||
// NOTE: does nothing if firstThrowable is null
|
||||
IOUtils.reThrowUnchecked(firstThrowable);
|
||||
IOUtils.reThrow(firstThrowable);
|
||||
}
|
||||
|
||||
/** Decrefs all provided files, ignoring any exceptions hit; call this if
|
||||
* you are already handling an exception. */
|
||||
void decRefWhileHandlingException(Collection<String> files) {
|
||||
assert locked();
|
||||
Set<String> toDelete = new HashSet<>();
|
||||
for(final String file : files) {
|
||||
try {
|
||||
decRef(file);
|
||||
if (decRef(file)) {
|
||||
toDelete.add(file);
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
deletePendingFiles();
|
||||
deleteFiles(toDelete);
|
||||
} catch (Throwable t) {
|
||||
}
|
||||
}
|
||||
|
||||
private void decRef(String fileName) {
|
||||
/** Returns true if the file should now be deleted. */
|
||||
private boolean decRef(String fileName) {
|
||||
assert locked();
|
||||
RefCount rc = getRefCount(fileName);
|
||||
if (infoStream.isEnabled("IFD")) {
|
||||
|
@ -662,14 +632,13 @@ final class IndexFileDeleter implements Closeable {
|
|||
infoStream.message("IFD", " DecRef \"" + fileName + "\": pre-decr count is " + rc.count);
|
||||
}
|
||||
}
|
||||
if (0 == rc.DecRef()) {
|
||||
if (rc.DecRef() == 0) {
|
||||
// This file is no longer referenced by any past
|
||||
// commit points nor by the in-memory SegmentInfos:
|
||||
try {
|
||||
deletable.add(fileName);
|
||||
} finally {
|
||||
refCounts.remove(fileName);
|
||||
}
|
||||
refCounts.remove(fileName);
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -692,8 +661,6 @@ final class IndexFileDeleter implements Closeable {
|
|||
RefCount rc;
|
||||
if (!refCounts.containsKey(fileName)) {
|
||||
rc = new RefCount(fileName);
|
||||
// We should never incRef a file we are already wanting to delete:
|
||||
assert deletable.contains(fileName) == false: "file \"" + fileName + "\" cannot be incRef'd: it's already pending delete";
|
||||
refCounts.put(fileName, rc);
|
||||
} else {
|
||||
rc = refCounts.get(fileName);
|
||||
|
@ -705,6 +672,7 @@ final class IndexFileDeleter implements Closeable {
|
|||
* (have not yet been incref'd). */
|
||||
void deleteNewFiles(Collection<String> files) throws IOException {
|
||||
assert locked();
|
||||
Set<String> toDelete = new HashSet<>();
|
||||
for (final String fileName: files) {
|
||||
// NOTE: it's very unusual yet possible for the
|
||||
// refCount to be present and 0: it can happen if you
|
||||
|
@ -716,45 +684,36 @@ final class IndexFileDeleter implements Closeable {
|
|||
if (infoStream.isEnabled("IFD")) {
|
||||
infoStream.message("IFD", "will delete new file \"" + fileName + "\"");
|
||||
}
|
||||
deletable.add(fileName);
|
||||
toDelete.add(fileName);
|
||||
}
|
||||
}
|
||||
|
||||
deletePendingFiles();
|
||||
deleteFiles(toDelete);
|
||||
}
|
||||
|
||||
/** Returns true if the delete succeeded. Otherwise, the fileName is
|
||||
* added to the deletable set so we will retry the delete later, and
|
||||
* we return false. */
|
||||
private boolean deleteFile(String fileName) {
|
||||
private void deleteFiles(Collection<String> names) throws IOException {
|
||||
assert locked();
|
||||
ensureOpen();
|
||||
try {
|
||||
if (infoStream.isEnabled("IFD")) {
|
||||
infoStream.message("IFD", "delete \"" + fileName + "\"");
|
||||
|
||||
if (infoStream.isEnabled("IFD")) {
|
||||
infoStream.message("IFD", "delete \"" + names + "\"");
|
||||
}
|
||||
|
||||
for(String name : names) {
|
||||
try {
|
||||
directory.deleteFile(name);
|
||||
} catch (NoSuchFileException | FileNotFoundException e) {
|
||||
// IndexWriter should only ask us to delete files it knows it wrote, so if we hit this, something is wrong!
|
||||
|
||||
if (Constants.WINDOWS) {
|
||||
// TODO: can we remove this OS-specific hacky logic? If windows deleteFile is buggy, we should instead contain this workaround in
|
||||
// a WindowsFSDirectory ...
|
||||
// LUCENE-6684: we suppress this assert for Windows, since a file could be in a confusing "pending delete" state, and falsely
|
||||
// return NSFE/FNFE
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
directory.deleteFile(fileName);
|
||||
deletable.remove(fileName);
|
||||
return true;
|
||||
} catch (IOException e) { // if delete fails
|
||||
|
||||
// IndexWriter should only ask us to delete files it knows it wrote, so if we hit this, something is wrong!
|
||||
// LUCENE-6684: we suppress this assert for Windows, since a file could be in a confusing "pending delete" state:
|
||||
assert Constants.WINDOWS || e instanceof NoSuchFileException == false: "hit unexpected NoSuchFileException: file=" + fileName;
|
||||
assert Constants.WINDOWS || e instanceof FileNotFoundException == false: "hit unexpected FileNotFoundException: file=" + fileName;
|
||||
|
||||
// Some operating systems (e.g. Windows) don't
|
||||
// permit a file to be deleted while it is opened
|
||||
// for read (e.g. by another process or thread). So
|
||||
// we assume that when a delete fails it is because
|
||||
// the file is open in another process, and queue
|
||||
// the file for subsequent deletion.
|
||||
|
||||
if (infoStream.isEnabled("IFD")) {
|
||||
infoStream.message("IFD", "unable to remove file \"" + fileName + "\": " + e.toString() + "; Will re-try later.");
|
||||
}
|
||||
deletable.add(fileName);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -51,6 +51,7 @@ import org.apache.lucene.search.MatchAllDocsQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.FilterDirectory;
|
||||
import org.apache.lucene.store.FlushInfo;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
|
@ -753,6 +754,10 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
|||
* IO error
|
||||
*/
|
||||
public IndexWriter(Directory d, IndexWriterConfig conf) throws IOException {
|
||||
if (d instanceof FSDirectory && ((FSDirectory) d).checkPendingDeletions()) {
|
||||
throw new IllegalArgumentException("Directory " + d + " still has pending deleted files; cannot initialize IndexWriter");
|
||||
}
|
||||
|
||||
conf.setIndexWriter(this); // prevent reuse by other instances
|
||||
config = conf;
|
||||
infoStream = config.getInfoStream();
|
||||
|
@ -3569,8 +3574,6 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
|||
}
|
||||
}
|
||||
|
||||
deleter.deletePendingFiles();
|
||||
|
||||
if (infoStream.isEnabled("IW")) {
|
||||
infoStream.message("IW", "after commitMerge: " + segString());
|
||||
}
|
||||
|
@ -4615,15 +4618,12 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
|||
* be deleted the next time commit() is called.
|
||||
*/
|
||||
public synchronized void deleteUnusedFiles() throws IOException {
|
||||
// TODO: should we remove this method now that it's the Directory's job to retry deletions? Except, for the super expert IDP use case
|
||||
// it's still needed?
|
||||
ensureOpen(false);
|
||||
deleter.deletePendingFiles();
|
||||
deleter.revisitPolicy();
|
||||
}
|
||||
|
||||
private synchronized void deletePendingFiles() throws IOException {
|
||||
deleter.deletePendingFiles();
|
||||
}
|
||||
|
||||
/**
|
||||
* NOTE: this method creates a compound file for all files returned by
|
||||
* info.files(). While, generally, this may include separate norms and
|
||||
|
|
|
@ -21,8 +21,8 @@ import java.util.ArrayList;
|
|||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
|
|
|
@ -43,7 +43,7 @@ import org.apache.lucene.util.IOUtils;
|
|||
public abstract class Directory implements Closeable {
|
||||
|
||||
/**
|
||||
* Returns an array of strings, one for each entry in the directory.
|
||||
* Returns an array of strings, one for each entry in the directory, in sorted (UTF16, java's String.compare) order.
|
||||
*
|
||||
* @throws IOException in case of IO error
|
||||
*/
|
||||
|
@ -67,7 +67,6 @@ public abstract class Directory implements Closeable {
|
|||
*/
|
||||
public abstract long fileLength(String name) throws IOException;
|
||||
|
||||
|
||||
/** Creates a new, empty file in the directory with the given name.
|
||||
Returns a stream writing this file. */
|
||||
public abstract IndexOutput createOutput(String name, IOContext context) throws IOException;
|
||||
|
|
|
@ -17,20 +17,28 @@
|
|||
package org.apache.lucene.store;
|
||||
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FilterOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.channels.ClosedChannelException; // javadoc @link
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.FileAlreadyExistsException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.NoSuchFileException;
|
||||
import java.nio.file.OpenOption;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.apache.lucene.index.IndexFileNames;
|
||||
|
@ -120,6 +128,12 @@ public abstract class FSDirectory extends BaseDirectory {
|
|||
|
||||
protected final Path directory; // The underlying filesystem directory
|
||||
|
||||
/** Maps files that we are trying to delete (or we tried already but failed)
|
||||
* before attempting to delete that key. */
|
||||
private final Set<String> pendingDeletes = Collections.newSetFromMap(new ConcurrentHashMap<String,Boolean>());
|
||||
|
||||
private final AtomicInteger opsSinceLastDelete = new AtomicInteger();
|
||||
|
||||
/** Used to generate temp file names in {@link #createTempOutput}. */
|
||||
private final AtomicLong nextTempFileCounter = new AtomicLong();
|
||||
|
||||
|
@ -188,63 +202,81 @@ public abstract class FSDirectory extends BaseDirectory {
|
|||
}
|
||||
}
|
||||
|
||||
/** Lists all files (including subdirectories) in the
|
||||
* directory.
|
||||
/** Lists all files (including subdirectories) in the directory.
|
||||
*
|
||||
* @throws IOException if there was an I/O error during listing */
|
||||
public static String[] listAll(Path dir) throws IOException {
|
||||
return listAll(dir, null);
|
||||
}
|
||||
|
||||
private static String[] listAll(Path dir, Set<String> skipNames) throws IOException {
|
||||
List<String> entries = new ArrayList<>();
|
||||
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir)) {
|
||||
for (Path path : stream) {
|
||||
entries.add(path.getFileName().toString());
|
||||
String name = path.getFileName().toString();
|
||||
if (skipNames != null && skipNames.contains(name) == false) {
|
||||
entries.add(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return entries.toArray(new String[entries.size()]);
|
||||
String[] array = entries.toArray(new String[entries.size()]);
|
||||
// Directory.listAll javadocs state that we sort the results here, so we don't let filesystem
|
||||
// specifics leak out of this abstraction:
|
||||
Arrays.sort(array);
|
||||
return array;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] listAll() throws IOException {
|
||||
ensureOpen();
|
||||
return listAll(directory);
|
||||
return listAll(directory, pendingDeletes);
|
||||
}
|
||||
|
||||
/** Returns the length in bytes of a file in the directory. */
|
||||
@Override
|
||||
public long fileLength(String name) throws IOException {
|
||||
ensureOpen();
|
||||
if (pendingDeletes.contains(name)) {
|
||||
throw new NoSuchFileException("file \"" + name + "\" is pending delete");
|
||||
}
|
||||
return Files.size(directory.resolve(name));
|
||||
}
|
||||
|
||||
/** Removes an existing file in the directory. */
|
||||
@Override
|
||||
public void deleteFile(String name) throws IOException {
|
||||
ensureOpen();
|
||||
Files.delete(directory.resolve(name));
|
||||
}
|
||||
|
||||
/** Creates an IndexOutput for the file with the given name. */
|
||||
@Override
|
||||
public IndexOutput createOutput(String name, IOContext context) throws IOException {
|
||||
ensureOpen();
|
||||
|
||||
// If this file was pending delete, we are now bringing it back to life:
|
||||
pendingDeletes.remove(name);
|
||||
maybeDeletePendingFiles();
|
||||
return new FSIndexOutput(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexOutput createTempOutput(String prefix, String suffix, IOContext context) throws IOException {
|
||||
ensureOpen();
|
||||
maybeDeletePendingFiles();
|
||||
while (true) {
|
||||
try {
|
||||
String name = IndexFileNames.segmentFileName(prefix, suffix + "_" + Long.toString(nextTempFileCounter.getAndIncrement(), Character.MAX_RADIX), "tmp");
|
||||
if (pendingDeletes.contains(name)) {
|
||||
continue;
|
||||
}
|
||||
return new FSIndexOutput(name,
|
||||
StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.CREATE_NEW);
|
||||
} catch (FileAlreadyExistsException faee) {
|
||||
// Retry with next random name
|
||||
// Retry with next incremented name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void ensureCanRead(String name) throws IOException {
|
||||
if (pendingDeletes.contains(name)) {
|
||||
throw new NoSuchFileException("file \"" + name + "\" is pending delete and cannot be opened for read");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sync(Collection<String> names) throws IOException {
|
||||
ensureOpen();
|
||||
|
@ -252,21 +284,27 @@ public abstract class FSDirectory extends BaseDirectory {
|
|||
for (String name : names) {
|
||||
fsync(name);
|
||||
}
|
||||
maybeDeletePendingFiles();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void renameFile(String source, String dest) throws IOException {
|
||||
ensureOpen();
|
||||
if (pendingDeletes.contains(source)) {
|
||||
throw new NoSuchFileException("file \"" + source + "\" is pending delete and cannot be moved");
|
||||
}
|
||||
pendingDeletes.remove(dest);
|
||||
Files.move(directory.resolve(source), directory.resolve(dest), StandardCopyOption.ATOMIC_MOVE);
|
||||
// TODO: should we move directory fsync to a separate 'syncMetadata' method?
|
||||
// for example, to improve listCommits(), IndexFileDeleter could also call that after deleting segments_Ns
|
||||
IOUtils.fsync(directory, true);
|
||||
maybeDeletePendingFiles();
|
||||
}
|
||||
|
||||
/** Closes the store to future operations. */
|
||||
@Override
|
||||
public synchronized void close() {
|
||||
public synchronized void close() throws IOException {
|
||||
isOpen = false;
|
||||
deletePendingFiles();
|
||||
}
|
||||
|
||||
/** @return the underlying filesystem directory */
|
||||
|
@ -275,12 +313,79 @@ public abstract class FSDirectory extends BaseDirectory {
|
|||
return directory;
|
||||
}
|
||||
|
||||
/** For debug output. */
|
||||
@Override
|
||||
public String toString() {
|
||||
return this.getClass().getSimpleName() + "@" + directory + " lockFactory=" + lockFactory;
|
||||
}
|
||||
|
||||
protected void fsync(String name) throws IOException {
|
||||
IOUtils.fsync(directory.resolve(name), false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteFile(String name) throws IOException {
|
||||
if (pendingDeletes.contains(name)) {
|
||||
throw new NoSuchFileException("file \"" + name + "\" is already pending delete");
|
||||
}
|
||||
privateDeleteFile(name);
|
||||
maybeDeletePendingFiles();
|
||||
}
|
||||
|
||||
/** Tries to delete any pending deleted files, and returns true if
|
||||
* there are still files that could not be deleted. */
|
||||
public boolean checkPendingDeletions() throws IOException {
|
||||
deletePendingFiles();
|
||||
return pendingDeletes.isEmpty() == false;
|
||||
}
|
||||
|
||||
/** Try to delete any pending files that we had previously tried to delete but failed
|
||||
* because we are on Windows and the files were still held open. */
|
||||
public synchronized void deletePendingFiles() throws IOException {
|
||||
if (pendingDeletes.isEmpty() == false) {
|
||||
|
||||
// TODO: we could fix IndexInputs from FSDirectory subclasses to call this when they are closed?
|
||||
|
||||
// Clone the set since we mutate it in privateDeleteFile:
|
||||
for(String name : new HashSet<>(pendingDeletes)) {
|
||||
privateDeleteFile(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void maybeDeletePendingFiles() throws IOException {
|
||||
if (pendingDeletes.isEmpty() == false) {
|
||||
// This is a silly heuristic to try to avoid O(N^2), where N = number of files pending deletion, behaviour on Windows:
|
||||
int count = opsSinceLastDelete.incrementAndGet();
|
||||
if (count >= pendingDeletes.size()) {
|
||||
opsSinceLastDelete.addAndGet(-count);
|
||||
deletePendingFiles();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void privateDeleteFile(String name) throws IOException {
|
||||
try {
|
||||
Files.delete(directory.resolve(name));
|
||||
pendingDeletes.remove(name);
|
||||
} catch (NoSuchFileException | FileNotFoundException e) {
|
||||
// We were asked to delete a non-existent file:
|
||||
pendingDeletes.remove(name);
|
||||
throw e;
|
||||
} catch (IOException ioe) {
|
||||
// On windows, a file delete can fail because there's still an open
|
||||
// file handle against it. We record this in pendingDeletes and
|
||||
// try again later.
|
||||
|
||||
// TODO: this is hacky/lenient (we don't know which IOException this is), and
|
||||
// it should only happen on filesystems that can do this, so really we should
|
||||
// move this logic to WindowsDirectory or something
|
||||
|
||||
// TODO: can/should we do if (Constants.WINDOWS) here, else throw the exc?
|
||||
// but what about a Linux box with a CIFS mount?
|
||||
pendingDeletes.add(name);
|
||||
}
|
||||
}
|
||||
|
||||
final class FSIndexOutput extends OutputStreamIndexOutput {
|
||||
/**
|
||||
* The maximum chunk size is 8192 bytes, because file channel mallocs
|
||||
|
@ -307,8 +412,4 @@ public abstract class FSDirectory extends BaseDirectory {
|
|||
}, CHUNK_SIZE);
|
||||
}
|
||||
}
|
||||
|
||||
protected void fsync(String name) throws IOException {
|
||||
IOUtils.fsync(directory.resolve(name), false);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.io.IOException;
|
|||
import java.nio.file.AtomicMoveNotSupportedException;
|
||||
import java.nio.file.NoSuchFileException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
@ -118,7 +119,9 @@ public class FileSwitchDirectory extends Directory {
|
|||
if (exc != null && files.isEmpty()) {
|
||||
throw exc;
|
||||
}
|
||||
return files.toArray(new String[files.size()]);
|
||||
String[] result = files.toArray(new String[files.size()]);
|
||||
Arrays.sort(result);
|
||||
return result;
|
||||
}
|
||||
|
||||
/** Utility method to return a file's extension. */
|
||||
|
@ -141,7 +144,11 @@ public class FileSwitchDirectory extends Directory {
|
|||
|
||||
@Override
|
||||
public void deleteFile(String name) throws IOException {
|
||||
getDirectory(name).deleteFile(name);
|
||||
if (getDirectory(name) == primaryDir) {
|
||||
primaryDir.deleteFile(name);
|
||||
} else {
|
||||
secondaryDir.deleteFile(name);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -234,6 +234,7 @@ public class MMapDirectory extends FSDirectory {
|
|||
@Override
|
||||
public IndexInput openInput(String name, IOContext context) throws IOException {
|
||||
ensureOpen();
|
||||
ensureCanRead(name);
|
||||
Path path = directory.resolve(name);
|
||||
try (FileChannel c = FileChannel.open(path, StandardOpenOption.READ)) {
|
||||
final String resourceDescription = "MMapIndexInput(path=\"" + path.toString() + "\")";
|
||||
|
|
|
@ -73,10 +73,10 @@ public class NIOFSDirectory extends FSDirectory {
|
|||
this(path, FSLockFactory.getDefault());
|
||||
}
|
||||
|
||||
/** Creates an IndexInput for the file with the given name. */
|
||||
@Override
|
||||
public IndexInput openInput(String name, IOContext context) throws IOException {
|
||||
ensureOpen();
|
||||
ensureCanRead(name);
|
||||
Path path = getDirectory().resolve(name);
|
||||
FileChannel fc = FileChannel.open(path, StandardOpenOption.READ);
|
||||
return new NIOFSIndexInput("NIOFSIndexInput(path=\"" + path + "\")", fc, context);
|
||||
|
|
|
@ -20,12 +20,10 @@ package org.apache.lucene.store;
|
|||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.NoSuchFileException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.store.RAMDirectory; // javadocs
|
||||
|
@ -108,7 +106,9 @@ public class NRTCachingDirectory extends FilterDirectory implements Accountable
|
|||
"cache=" + Arrays.toString(cache.listAll()) + ",delegate=" + Arrays.toString(in.listAll()));
|
||||
}
|
||||
}
|
||||
return files.toArray(new String[files.size()]);
|
||||
String[] result = files.toArray(new String[files.size()]);
|
||||
Arrays.sort(result);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.io.FileNotFoundException;
|
|||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -119,8 +120,12 @@ public class RAMDirectory extends BaseDirectory implements Accountable {
|
|||
// concurrently
|
||||
Set<String> fileNames = fileMap.keySet();
|
||||
List<String> names = new ArrayList<>(fileNames.size());
|
||||
for (String name : fileNames) names.add(name);
|
||||
return names.toArray(new String[names.size()]);
|
||||
for (String name : fileNames) {
|
||||
names.add(name);
|
||||
}
|
||||
String[] namesArray = names.toArray(new String[names.size()]);
|
||||
Arrays.sort(namesArray);
|
||||
return namesArray;
|
||||
}
|
||||
|
||||
public final boolean fileNameExists(String name) {
|
||||
|
|
|
@ -72,6 +72,7 @@ public class SimpleFSDirectory extends FSDirectory {
|
|||
@Override
|
||||
public IndexInput openInput(String name, IOContext context) throws IOException {
|
||||
ensureOpen();
|
||||
ensureCanRead(name);
|
||||
Path path = directory.resolve(name);
|
||||
SeekableByteChannel channel = Files.newByteChannel(path, StandardOpenOption.READ);
|
||||
return new SimpleFSIndexInput("SimpleFSIndexInput(path=\"" + path + "\")", channel, context);
|
||||
|
|
|
@ -191,7 +191,7 @@ public final class IOUtils {
|
|||
* Note that the files should not be null.
|
||||
*/
|
||||
public static void deleteFilesIgnoringExceptions(Directory dir, Collection<String> files) {
|
||||
for (String name : files) {
|
||||
for(String name : files) {
|
||||
try {
|
||||
dir.deleteFile(name);
|
||||
} catch (Throwable ignored) {
|
||||
|
@ -212,11 +212,11 @@ public final class IOUtils {
|
|||
* completes normally if there were no exceptions.
|
||||
*
|
||||
* @param dir Directory to delete files from
|
||||
* @param files file names to delete
|
||||
* @param names file names to delete
|
||||
*/
|
||||
public static void deleteFiles(Directory dir, Collection<String> files) throws IOException {
|
||||
public static void deleteFiles(Directory dir, Collection<String> names) throws IOException {
|
||||
Throwable th = null;
|
||||
for (String name : files) {
|
||||
for (String name : names) {
|
||||
if (name != null) {
|
||||
try {
|
||||
dir.deleteFile(name);
|
||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.lucene.util.bkd;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
|
|
|
@ -1103,7 +1103,6 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
// LUCENE-2790: tests that the non CFS files were deleted by addIndexes
|
||||
public void testNonCFSLeftovers() throws Exception {
|
||||
Directory[] dirs = new Directory[2];
|
||||
|
@ -1121,7 +1120,6 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
DirectoryReader[] readers = new DirectoryReader[] { DirectoryReader.open(dirs[0]), DirectoryReader.open(dirs[1]) };
|
||||
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory());
|
||||
dir.setEnableVirusScanner(false); // we check for specific list of files
|
||||
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy(true));
|
||||
MergePolicy lmp = conf.getMergePolicy();
|
||||
// Force creation of CFS:
|
||||
|
|
|
@ -42,11 +42,6 @@ public class TestAllFilesCheckIndexHeader extends LuceneTestCase {
|
|||
public void test() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// otherwise we can have unref'd files left in the index that won't be visited when opening a reader and lead to scary looking false failures:
|
||||
((MockDirectoryWrapper) dir).setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
|
||||
conf.setCodec(TestUtil.getDefaultCodec());
|
||||
|
||||
|
|
|
@ -42,11 +42,6 @@ public class TestAllFilesDetectTruncation extends LuceneTestCase {
|
|||
public void test() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// otherwise we can have unref'd files left in the index that won't be visited when opening a reader and lead to scary looking false failures:
|
||||
((MockDirectoryWrapper) dir).setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
|
||||
conf.setCodec(TestUtil.getDefaultCodec());
|
||||
|
||||
|
|
|
@ -180,6 +180,5 @@ public class TestAtomicUpdate extends LuceneTestCase {
|
|||
directory = newFSDirectory(dirPath);
|
||||
runTest(directory);
|
||||
directory.close();
|
||||
IOUtils.rm(dirPath);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1094,10 +1094,6 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
|
|||
|
||||
public void testDeleteUnusedUpdatesFiles() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
// test explicitly needs files to always be actually deleted
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
|
||||
|
|
|
@ -17,8 +17,6 @@
|
|||
package org.apache.lucene.index;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.IntPoint;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
|
@ -47,14 +45,8 @@ public class TestCodecHoldsOpenFiles extends LuceneTestCase {
|
|||
w.commit();
|
||||
w.close();
|
||||
|
||||
for(String fileName : d.listAll()) {
|
||||
try {
|
||||
d.deleteFile(fileName);
|
||||
// may succeed, e.g. if the file is completely read into RAM.
|
||||
} catch (IOException ioe) {
|
||||
// ignore: this means codec (correctly) is holding
|
||||
// the file open
|
||||
}
|
||||
for (String name : d.listAll()) {
|
||||
d.deleteFile(name);
|
||||
}
|
||||
|
||||
for(LeafReaderContext cxt : r.leaves()) {
|
||||
|
|
|
@ -184,10 +184,6 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
|
||||
public void testNoExtraFiles() throws IOException {
|
||||
Directory directory = newDirectory();
|
||||
if (directory instanceof MockDirectoryWrapper) {
|
||||
// test uses IW unref'ed helper which is unaware of retries
|
||||
((MockDirectoryWrapper)directory).setEnableVirusScanner(false);
|
||||
}
|
||||
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setMaxBufferedDocs(2));
|
||||
|
||||
|
|
|
@ -17,6 +17,14 @@
|
|||
package org.apache.lucene.index;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
|
@ -26,24 +34,14 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/*
|
||||
Verify we can read the pre-2.1 file format, do searches
|
||||
against it, and add documents to it.
|
||||
*/
|
||||
|
||||
public class TestDeletionPolicy extends LuceneTestCase {
|
||||
|
||||
private void verifyCommitOrder(List<? extends IndexCommit> commits) {
|
||||
|
@ -223,10 +221,6 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
final double SECONDS = 2.0;
|
||||
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// test manually deletes files
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setIndexDeletionPolicy(new ExpirationTimeDeletionPolicy(dir, SECONDS));
|
||||
MergePolicy mp = conf.getMergePolicy();
|
||||
|
@ -319,10 +313,6 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
boolean useCompoundFile = (pass % 2) != 0;
|
||||
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// test manually deletes files
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setIndexDeletionPolicy(new KeepAllDeletionPolicy(dir))
|
||||
|
@ -570,10 +560,6 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
boolean useCompoundFile = (pass % 2) != 0;
|
||||
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// test manually deletes files
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
|
||||
for(int j=0;j<N+1;j++) {
|
||||
|
@ -634,10 +620,6 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
boolean useCompoundFile = (pass % 2) != 0;
|
||||
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// test manually deletes files
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setOpenMode(OpenMode.CREATE)
|
||||
.setIndexDeletionPolicy(new KeepLastNDeletionPolicy(N))
|
||||
|
|
|
@ -384,7 +384,7 @@ public class TestDemoParallelLeafReader extends LuceneTestCase {
|
|||
|
||||
final Directory dir = openDirectory(leafIndex);
|
||||
|
||||
if (Files.exists(leafIndex.resolve("done")) == false) {
|
||||
if (slowFileExists(dir, "done") == false) {
|
||||
if (DEBUG) System.out.println(Thread.currentThread().getName() + ": TEST: build segment index for " + leaf + " " + segIDGen + " (source: " + info.getDiagnostics().get("source") + ") dir=" + leafIndex);
|
||||
|
||||
if (dir.listAll().length != 0) {
|
||||
|
@ -893,7 +893,8 @@ public class TestDemoParallelLeafReader extends LuceneTestCase {
|
|||
AtomicLong currentSchemaGen = new AtomicLong();
|
||||
|
||||
// TODO: separate refresh thread, search threads, indexing threads
|
||||
ReindexingReader reindexer = getReindexerNewDVFields(createTempDir(), currentSchemaGen);
|
||||
Path root = createTempDir();
|
||||
ReindexingReader reindexer = getReindexerNewDVFields(root, currentSchemaGen);
|
||||
reindexer.commit();
|
||||
|
||||
Document doc = new Document();
|
||||
|
@ -1149,7 +1150,8 @@ public class TestDemoParallelLeafReader extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testBasic() throws Exception {
|
||||
ReindexingReader reindexer = getReindexer(createTempDir());
|
||||
Path tempPath = createTempDir();
|
||||
ReindexingReader reindexer = getReindexer(tempPath);
|
||||
|
||||
// Start with initial empty commit:
|
||||
reindexer.commit();
|
||||
|
|
|
@ -162,174 +162,174 @@ public class TestDirectoryReader extends LuceneTestCase {
|
|||
* @throws Exception on error
|
||||
*/
|
||||
public void testGetFieldNames() throws Exception {
|
||||
Directory d = newDirectory();
|
||||
// set up writer
|
||||
IndexWriter writer = new IndexWriter(
|
||||
d,
|
||||
newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
);
|
||||
Directory d = newDirectory();
|
||||
// set up writer
|
||||
IndexWriter writer = new IndexWriter(
|
||||
d,
|
||||
newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
);
|
||||
|
||||
Document doc = new Document();
|
||||
Document doc = new Document();
|
||||
|
||||
FieldType customType3 = new FieldType();
|
||||
customType3.setStored(true);
|
||||
FieldType customType3 = new FieldType();
|
||||
customType3.setStored(true);
|
||||
|
||||
doc.add(new StringField("keyword", "test1", Field.Store.YES));
|
||||
doc.add(new TextField("text", "test1", Field.Store.YES));
|
||||
doc.add(new Field("unindexed", "test1", customType3));
|
||||
doc.add(new TextField("unstored","test1", Field.Store.NO));
|
||||
writer.addDocument(doc);
|
||||
|
||||
writer.close();
|
||||
// set up reader
|
||||
DirectoryReader reader = DirectoryReader.open(d);
|
||||
FieldInfos fieldInfos = MultiFields.getMergedFieldInfos(reader);
|
||||
assertNotNull(fieldInfos.fieldInfo("keyword"));
|
||||
assertNotNull(fieldInfos.fieldInfo("text"));
|
||||
assertNotNull(fieldInfos.fieldInfo("unindexed"));
|
||||
assertNotNull(fieldInfos.fieldInfo("unstored"));
|
||||
reader.close();
|
||||
// add more documents
|
||||
writer = new IndexWriter(
|
||||
d,
|
||||
newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setOpenMode(OpenMode.APPEND)
|
||||
.setMergePolicy(newLogMergePolicy())
|
||||
);
|
||||
// want to get some more segments here
|
||||
int mergeFactor = ((LogMergePolicy) writer.getConfig().getMergePolicy()).getMergeFactor();
|
||||
for (int i = 0; i < 5*mergeFactor; i++) {
|
||||
doc = new Document();
|
||||
doc.add(new StringField("keyword", "test1", Field.Store.YES));
|
||||
doc.add(new TextField("text", "test1", Field.Store.YES));
|
||||
doc.add(new Field("unindexed", "test1", customType3));
|
||||
doc.add(new TextField("unstored","test1", Field.Store.NO));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
// new fields are in some different segments (we hope)
|
||||
for (int i = 0; i < 5*mergeFactor; i++) {
|
||||
doc = new Document();
|
||||
doc.add(new StringField("keyword2", "test1", Field.Store.YES));
|
||||
doc.add(new TextField("text2", "test1", Field.Store.YES));
|
||||
doc.add(new Field("unindexed2", "test1", customType3));
|
||||
doc.add(new TextField("unstored2","test1", Field.Store.NO));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
// new termvector fields
|
||||
|
||||
writer.close();
|
||||
// set up reader
|
||||
DirectoryReader reader = DirectoryReader.open(d);
|
||||
FieldInfos fieldInfos = MultiFields.getMergedFieldInfos(reader);
|
||||
assertNotNull(fieldInfos.fieldInfo("keyword"));
|
||||
assertNotNull(fieldInfos.fieldInfo("text"));
|
||||
assertNotNull(fieldInfos.fieldInfo("unindexed"));
|
||||
assertNotNull(fieldInfos.fieldInfo("unstored"));
|
||||
reader.close();
|
||||
// add more documents
|
||||
writer = new IndexWriter(
|
||||
d,
|
||||
newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setOpenMode(OpenMode.APPEND)
|
||||
.setMergePolicy(newLogMergePolicy())
|
||||
);
|
||||
// want to get some more segments here
|
||||
int mergeFactor = ((LogMergePolicy) writer.getConfig().getMergePolicy()).getMergeFactor();
|
||||
for (int i = 0; i < 5*mergeFactor; i++) {
|
||||
doc = new Document();
|
||||
doc.add(new StringField("keyword", "test1", Field.Store.YES));
|
||||
doc.add(new TextField("text", "test1", Field.Store.YES));
|
||||
doc.add(new Field("unindexed", "test1", customType3));
|
||||
doc.add(new TextField("unstored","test1", Field.Store.NO));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
// new fields are in some different segments (we hope)
|
||||
for (int i = 0; i < 5*mergeFactor; i++) {
|
||||
doc = new Document();
|
||||
doc.add(new StringField("keyword2", "test1", Field.Store.YES));
|
||||
doc.add(new TextField("text2", "test1", Field.Store.YES));
|
||||
doc.add(new Field("unindexed2", "test1", customType3));
|
||||
doc.add(new TextField("unstored2","test1", Field.Store.NO));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
// new termvector fields
|
||||
|
||||
FieldType customType5 = new FieldType(TextField.TYPE_STORED);
|
||||
customType5.setStoreTermVectors(true);
|
||||
FieldType customType6 = new FieldType(TextField.TYPE_STORED);
|
||||
customType6.setStoreTermVectors(true);
|
||||
customType6.setStoreTermVectorOffsets(true);
|
||||
FieldType customType7 = new FieldType(TextField.TYPE_STORED);
|
||||
customType7.setStoreTermVectors(true);
|
||||
customType7.setStoreTermVectorPositions(true);
|
||||
FieldType customType8 = new FieldType(TextField.TYPE_STORED);
|
||||
customType8.setStoreTermVectors(true);
|
||||
customType8.setStoreTermVectorOffsets(true);
|
||||
customType8.setStoreTermVectorPositions(true);
|
||||
FieldType customType5 = new FieldType(TextField.TYPE_STORED);
|
||||
customType5.setStoreTermVectors(true);
|
||||
FieldType customType6 = new FieldType(TextField.TYPE_STORED);
|
||||
customType6.setStoreTermVectors(true);
|
||||
customType6.setStoreTermVectorOffsets(true);
|
||||
FieldType customType7 = new FieldType(TextField.TYPE_STORED);
|
||||
customType7.setStoreTermVectors(true);
|
||||
customType7.setStoreTermVectorPositions(true);
|
||||
FieldType customType8 = new FieldType(TextField.TYPE_STORED);
|
||||
customType8.setStoreTermVectors(true);
|
||||
customType8.setStoreTermVectorOffsets(true);
|
||||
customType8.setStoreTermVectorPositions(true);
|
||||
|
||||
for (int i = 0; i < 5*mergeFactor; i++) {
|
||||
doc = new Document();
|
||||
doc.add(new TextField("tvnot", "tvnot", Field.Store.YES));
|
||||
doc.add(new Field("termvector", "termvector", customType5));
|
||||
doc.add(new Field("tvoffset", "tvoffset", customType6));
|
||||
doc.add(new Field("tvposition", "tvposition", customType7));
|
||||
doc.add(new Field("tvpositionoffset", "tvpositionoffset", customType8));
|
||||
writer.addDocument(doc);
|
||||
for (int i = 0; i < 5*mergeFactor; i++) {
|
||||
doc = new Document();
|
||||
doc.add(new TextField("tvnot", "tvnot", Field.Store.YES));
|
||||
doc.add(new Field("termvector", "termvector", customType5));
|
||||
doc.add(new Field("tvoffset", "tvoffset", customType6));
|
||||
doc.add(new Field("tvposition", "tvposition", customType7));
|
||||
doc.add(new Field("tvpositionoffset", "tvpositionoffset", customType8));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
|
||||
writer.close();
|
||||
|
||||
// verify fields again
|
||||
reader = DirectoryReader.open(d);
|
||||
fieldInfos = MultiFields.getMergedFieldInfos(reader);
|
||||
|
||||
Collection<String> allFieldNames = new HashSet<>();
|
||||
Collection<String> indexedFieldNames = new HashSet<>();
|
||||
Collection<String> notIndexedFieldNames = new HashSet<>();
|
||||
Collection<String> tvFieldNames = new HashSet<>();
|
||||
|
||||
for(FieldInfo fieldInfo : fieldInfos) {
|
||||
final String name = fieldInfo.name;
|
||||
allFieldNames.add(name);
|
||||
if (fieldInfo.getIndexOptions() != IndexOptions.NONE) {
|
||||
indexedFieldNames.add(name);
|
||||
} else {
|
||||
notIndexedFieldNames.add(name);
|
||||
}
|
||||
|
||||
writer.close();
|
||||
|
||||
// verify fields again
|
||||
reader = DirectoryReader.open(d);
|
||||
fieldInfos = MultiFields.getMergedFieldInfos(reader);
|
||||
|
||||
Collection<String> allFieldNames = new HashSet<>();
|
||||
Collection<String> indexedFieldNames = new HashSet<>();
|
||||
Collection<String> notIndexedFieldNames = new HashSet<>();
|
||||
Collection<String> tvFieldNames = new HashSet<>();
|
||||
|
||||
for(FieldInfo fieldInfo : fieldInfos) {
|
||||
final String name = fieldInfo.name;
|
||||
allFieldNames.add(name);
|
||||
if (fieldInfo.getIndexOptions() != IndexOptions.NONE) {
|
||||
indexedFieldNames.add(name);
|
||||
} else {
|
||||
notIndexedFieldNames.add(name);
|
||||
}
|
||||
if (fieldInfo.hasVectors()) {
|
||||
tvFieldNames.add(name);
|
||||
}
|
||||
if (fieldInfo.hasVectors()) {
|
||||
tvFieldNames.add(name);
|
||||
}
|
||||
}
|
||||
|
||||
assertTrue(allFieldNames.contains("keyword"));
|
||||
assertTrue(allFieldNames.contains("text"));
|
||||
assertTrue(allFieldNames.contains("unindexed"));
|
||||
assertTrue(allFieldNames.contains("unstored"));
|
||||
assertTrue(allFieldNames.contains("keyword2"));
|
||||
assertTrue(allFieldNames.contains("text2"));
|
||||
assertTrue(allFieldNames.contains("unindexed2"));
|
||||
assertTrue(allFieldNames.contains("unstored2"));
|
||||
assertTrue(allFieldNames.contains("tvnot"));
|
||||
assertTrue(allFieldNames.contains("termvector"));
|
||||
assertTrue(allFieldNames.contains("tvposition"));
|
||||
assertTrue(allFieldNames.contains("tvoffset"));
|
||||
assertTrue(allFieldNames.contains("tvpositionoffset"));
|
||||
assertTrue(allFieldNames.contains("keyword"));
|
||||
assertTrue(allFieldNames.contains("text"));
|
||||
assertTrue(allFieldNames.contains("unindexed"));
|
||||
assertTrue(allFieldNames.contains("unstored"));
|
||||
assertTrue(allFieldNames.contains("keyword2"));
|
||||
assertTrue(allFieldNames.contains("text2"));
|
||||
assertTrue(allFieldNames.contains("unindexed2"));
|
||||
assertTrue(allFieldNames.contains("unstored2"));
|
||||
assertTrue(allFieldNames.contains("tvnot"));
|
||||
assertTrue(allFieldNames.contains("termvector"));
|
||||
assertTrue(allFieldNames.contains("tvposition"));
|
||||
assertTrue(allFieldNames.contains("tvoffset"));
|
||||
assertTrue(allFieldNames.contains("tvpositionoffset"));
|
||||
|
||||
// verify that only indexed fields were returned
|
||||
assertEquals(11, indexedFieldNames.size()); // 6 original + the 5 termvector fields
|
||||
assertTrue(indexedFieldNames.contains("keyword"));
|
||||
assertTrue(indexedFieldNames.contains("text"));
|
||||
assertTrue(indexedFieldNames.contains("unstored"));
|
||||
assertTrue(indexedFieldNames.contains("keyword2"));
|
||||
assertTrue(indexedFieldNames.contains("text2"));
|
||||
assertTrue(indexedFieldNames.contains("unstored2"));
|
||||
assertTrue(indexedFieldNames.contains("tvnot"));
|
||||
assertTrue(indexedFieldNames.contains("termvector"));
|
||||
assertTrue(indexedFieldNames.contains("tvposition"));
|
||||
assertTrue(indexedFieldNames.contains("tvoffset"));
|
||||
assertTrue(indexedFieldNames.contains("tvpositionoffset"));
|
||||
// verify that only indexed fields were returned
|
||||
assertEquals(11, indexedFieldNames.size()); // 6 original + the 5 termvector fields
|
||||
assertTrue(indexedFieldNames.contains("keyword"));
|
||||
assertTrue(indexedFieldNames.contains("text"));
|
||||
assertTrue(indexedFieldNames.contains("unstored"));
|
||||
assertTrue(indexedFieldNames.contains("keyword2"));
|
||||
assertTrue(indexedFieldNames.contains("text2"));
|
||||
assertTrue(indexedFieldNames.contains("unstored2"));
|
||||
assertTrue(indexedFieldNames.contains("tvnot"));
|
||||
assertTrue(indexedFieldNames.contains("termvector"));
|
||||
assertTrue(indexedFieldNames.contains("tvposition"));
|
||||
assertTrue(indexedFieldNames.contains("tvoffset"));
|
||||
assertTrue(indexedFieldNames.contains("tvpositionoffset"));
|
||||
|
||||
// verify that only unindexed fields were returned
|
||||
assertEquals(2, notIndexedFieldNames.size()); // the following fields
|
||||
assertTrue(notIndexedFieldNames.contains("unindexed"));
|
||||
assertTrue(notIndexedFieldNames.contains("unindexed2"));
|
||||
// verify that only unindexed fields were returned
|
||||
assertEquals(2, notIndexedFieldNames.size()); // the following fields
|
||||
assertTrue(notIndexedFieldNames.contains("unindexed"));
|
||||
assertTrue(notIndexedFieldNames.contains("unindexed2"));
|
||||
|
||||
// verify index term vector fields
|
||||
assertEquals(tvFieldNames.toString(), 4, tvFieldNames.size()); // 4 field has term vector only
|
||||
assertTrue(tvFieldNames.contains("termvector"));
|
||||
// verify index term vector fields
|
||||
assertEquals(tvFieldNames.toString(), 4, tvFieldNames.size()); // 4 field has term vector only
|
||||
assertTrue(tvFieldNames.contains("termvector"));
|
||||
|
||||
reader.close();
|
||||
d.close();
|
||||
reader.close();
|
||||
d.close();
|
||||
}
|
||||
|
||||
public void testTermVectors() throws Exception {
|
||||
Directory d = newDirectory();
|
||||
// set up writer
|
||||
IndexWriter writer = new IndexWriter(
|
||||
d,
|
||||
newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setMergePolicy(newLogMergePolicy())
|
||||
);
|
||||
// want to get some more segments here
|
||||
// new termvector fields
|
||||
int mergeFactor = ((LogMergePolicy) writer.getConfig().getMergePolicy()).getMergeFactor();
|
||||
FieldType customType5 = new FieldType(TextField.TYPE_STORED);
|
||||
customType5.setStoreTermVectors(true);
|
||||
FieldType customType6 = new FieldType(TextField.TYPE_STORED);
|
||||
customType6.setStoreTermVectors(true);
|
||||
customType6.setStoreTermVectorOffsets(true);
|
||||
FieldType customType7 = new FieldType(TextField.TYPE_STORED);
|
||||
customType7.setStoreTermVectors(true);
|
||||
customType7.setStoreTermVectorPositions(true);
|
||||
FieldType customType8 = new FieldType(TextField.TYPE_STORED);
|
||||
customType8.setStoreTermVectors(true);
|
||||
customType8.setStoreTermVectorOffsets(true);
|
||||
customType8.setStoreTermVectorPositions(true);
|
||||
for (int i = 0; i < 5 * mergeFactor; i++) {
|
||||
Document doc = new Document();
|
||||
public void testTermVectors() throws Exception {
|
||||
Directory d = newDirectory();
|
||||
// set up writer
|
||||
IndexWriter writer = new IndexWriter(
|
||||
d,
|
||||
newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setMergePolicy(newLogMergePolicy())
|
||||
);
|
||||
// want to get some more segments here
|
||||
// new termvector fields
|
||||
int mergeFactor = ((LogMergePolicy) writer.getConfig().getMergePolicy()).getMergeFactor();
|
||||
FieldType customType5 = new FieldType(TextField.TYPE_STORED);
|
||||
customType5.setStoreTermVectors(true);
|
||||
FieldType customType6 = new FieldType(TextField.TYPE_STORED);
|
||||
customType6.setStoreTermVectors(true);
|
||||
customType6.setStoreTermVectorOffsets(true);
|
||||
FieldType customType7 = new FieldType(TextField.TYPE_STORED);
|
||||
customType7.setStoreTermVectors(true);
|
||||
customType7.setStoreTermVectorPositions(true);
|
||||
FieldType customType8 = new FieldType(TextField.TYPE_STORED);
|
||||
customType8.setStoreTermVectors(true);
|
||||
customType8.setStoreTermVectorOffsets(true);
|
||||
customType8.setStoreTermVectorPositions(true);
|
||||
for (int i = 0; i < 5 * mergeFactor; i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(new TextField("tvnot", "one two two three three three", Field.Store.YES));
|
||||
doc.add(new Field("termvector", "one two two three three three", customType5));
|
||||
doc.add(new Field("tvoffset", "one two two three three three", customType6));
|
||||
|
@ -337,30 +337,29 @@ public void testTermVectors() throws Exception {
|
|||
doc.add(new Field("tvpositionoffset", "one two two three three three", customType8));
|
||||
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
writer.close();
|
||||
d.close();
|
||||
}
|
||||
|
||||
void assertTermDocsCount(String msg,
|
||||
IndexReader reader,
|
||||
Term term,
|
||||
int expected)
|
||||
throws IOException {
|
||||
PostingsEnum tdocs = TestUtil.docs(random(), reader,
|
||||
term.field(),
|
||||
new BytesRef(term.text()),
|
||||
null,
|
||||
0);
|
||||
int count = 0;
|
||||
if (tdocs != null) {
|
||||
while(tdocs.nextDoc()!= DocIdSetIterator.NO_MORE_DOCS) {
|
||||
count++;
|
||||
}
|
||||
writer.close();
|
||||
d.close();
|
||||
}
|
||||
assertEquals(msg + ", count mismatch", expected, count);
|
||||
}
|
||||
|
||||
void assertTermDocsCount(String msg,
|
||||
IndexReader reader,
|
||||
Term term,
|
||||
int expected)
|
||||
throws IOException {
|
||||
PostingsEnum tdocs = TestUtil.docs(random(), reader,
|
||||
term.field(),
|
||||
new BytesRef(term.text()),
|
||||
null,
|
||||
0);
|
||||
int count = 0;
|
||||
if (tdocs != null) {
|
||||
while(tdocs.nextDoc()!= DocIdSetIterator.NO_MORE_DOCS) {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
assertEquals(msg + ", count mismatch", expected, count);
|
||||
}
|
||||
|
||||
public void testBinaryFields() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
|
@ -436,35 +435,36 @@ void assertTermDocsCount(String msg,
|
|||
rmDir(fileDirName);
|
||||
}*/
|
||||
|
||||
public void testFilesOpenClose() throws IOException {
|
||||
// Create initial data set
|
||||
Path dirFile = createTempDir("TestIndexReader.testFilesOpenClose");
|
||||
Directory dir = newFSDirectory(dirFile);
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
|
||||
addDoc(writer, "test");
|
||||
writer.close();
|
||||
dir.close();
|
||||
public void testFilesOpenClose() throws IOException {
|
||||
// Create initial data set
|
||||
Path dirFile = createTempDir("TestIndexReader.testFilesOpenClose");
|
||||
Directory dir = newFSDirectory(dirFile);
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
|
||||
addDoc(writer, "test");
|
||||
writer.close();
|
||||
dir.close();
|
||||
|
||||
// Try to erase the data - this ensures that the writer closed all files
|
||||
IOUtils.rm(dirFile);
|
||||
dir = newFSDirectory(dirFile);
|
||||
// Try to erase the data - this ensures that the writer closed all files
|
||||
IOUtils.rm(dirFile);
|
||||
dir = newFSDirectory(dirFile);
|
||||
|
||||
// Now create the data set again, just as before
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setOpenMode(OpenMode.CREATE));
|
||||
addDoc(writer, "test");
|
||||
writer.close();
|
||||
dir.close();
|
||||
// Now create the data set again, just as before
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setOpenMode(OpenMode.CREATE));
|
||||
addDoc(writer, "test");
|
||||
writer.close();
|
||||
dir.close();
|
||||
|
||||
// Now open existing directory and test that reader closes all files
|
||||
dir = newFSDirectory(dirFile);
|
||||
DirectoryReader reader1 = DirectoryReader.open(dir);
|
||||
reader1.close();
|
||||
dir.close();
|
||||
// Now open existing directory and test that reader closes all files
|
||||
dir = newFSDirectory(dirFile);
|
||||
DirectoryReader reader1 = DirectoryReader.open(dir);
|
||||
reader1.close();
|
||||
dir.close();
|
||||
|
||||
// The following will fail if reader did not close
|
||||
// all files
|
||||
IOUtils.rm(dirFile);
|
||||
// The following will fail if reader did not close
|
||||
// all files
|
||||
IOUtils.rm(dirFile);
|
||||
}
|
||||
|
||||
public void testOpenReaderAfterDelete() throws IOException {
|
||||
|
@ -717,7 +717,6 @@ public void testFilesOpenClose() throws IOException {
|
|||
// good exception
|
||||
public void testNoDir() throws Throwable {
|
||||
Path tempDir = createTempDir("doesnotexist");
|
||||
IOUtils.rm(tempDir);
|
||||
Directory dir = newFSDirectory(tempDir);
|
||||
try {
|
||||
DirectoryReader.open(dir);
|
||||
|
@ -1053,7 +1052,6 @@ public void testFilesOpenClose() throws IOException {
|
|||
|
||||
public void testIndexExistsOnNonExistentDirectory() throws Exception {
|
||||
Path tempDir = createTempDir("testIndexExistsOnNonExistentDirectory");
|
||||
IOUtils.rm(tempDir);
|
||||
Directory dir = newFSDirectory(tempDir);
|
||||
assertFalse(DirectoryReader.indexExists(dir));
|
||||
dir.close();
|
||||
|
|
|
@ -45,8 +45,6 @@ import org.apache.lucene.store.RAMDirectory;
|
|||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
|
||||
|
||||
public class TestDirectoryReaderReopen extends LuceneTestCase {
|
||||
|
||||
public void testReopen() throws Exception {
|
||||
|
@ -625,10 +623,6 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
|
|||
|
||||
public void testOverDecRefDuringReopen() throws Exception {
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// ensure we produce enough of our exceptions
|
||||
dir.setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
|
||||
iwc.setCodec(TestUtil.getDefaultCodec());
|
||||
|
@ -762,8 +756,8 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
|
|||
DirectoryReader r = DirectoryReader.open(dir);
|
||||
|
||||
// Blow away the index:
|
||||
for(String fileName : dir.listAll()) {
|
||||
dir.deleteFile(fileName);
|
||||
for(String name : dir.listAll()) {
|
||||
dir.deleteFile(name);
|
||||
}
|
||||
|
||||
w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
|
||||
|
|
|
@ -120,8 +120,6 @@ public class TestDoc extends LuceneTestCase {
|
|||
// We create unreferenced files (we don't even write
|
||||
// a segments file):
|
||||
((MockDirectoryWrapper) directory).setAssertNoUnrefencedFilesOnClose(false);
|
||||
// this test itself deletes files (has no retry mechanism)
|
||||
((MockDirectoryWrapper) directory).setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
IndexWriter writer = new IndexWriter(
|
||||
|
@ -164,8 +162,6 @@ public class TestDoc extends LuceneTestCase {
|
|||
// We create unreferenced files (we don't even write
|
||||
// a segments file):
|
||||
((MockDirectoryWrapper) directory).setAssertNoUnrefencedFilesOnClose(false);
|
||||
// this test itself deletes files (has no retry mechanism)
|
||||
((MockDirectoryWrapper) directory).setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
writer = new IndexWriter(
|
||||
|
@ -237,8 +233,8 @@ public class TestDoc extends LuceneTestCase {
|
|||
Collection<String> filesToDelete = si.files();
|
||||
codec.compoundFormat().write(dir, si, context);
|
||||
si.setUseCompoundFile(true);
|
||||
for (final String fileToDelete : filesToDelete) {
|
||||
si1.info.dir.deleteFile(fileToDelete);
|
||||
for(String name : filesToDelete) {
|
||||
si1.info.dir.deleteFile(name);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -188,42 +188,37 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
public void testExceptions() throws Throwable {
|
||||
Path indexDir = createTempDir("testfieldswriterexceptions");
|
||||
|
||||
try {
|
||||
Directory fsDir = newFSDirectory(indexDir);
|
||||
FaultyFSDirectory dir = new FaultyFSDirectory(fsDir);
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setOpenMode(OpenMode.CREATE);
|
||||
IndexWriter writer = new IndexWriter(dir, iwc);
|
||||
for(int i=0;i<2;i++)
|
||||
writer.addDocument(testDoc);
|
||||
writer.forceMerge(1);
|
||||
writer.close();
|
||||
Directory fsDir = newFSDirectory(indexDir);
|
||||
FaultyFSDirectory dir = new FaultyFSDirectory(fsDir);
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setOpenMode(OpenMode.CREATE);
|
||||
IndexWriter writer = new IndexWriter(dir, iwc);
|
||||
for(int i=0;i<2;i++)
|
||||
writer.addDocument(testDoc);
|
||||
writer.forceMerge(1);
|
||||
writer.close();
|
||||
|
||||
IndexReader reader = DirectoryReader.open(dir);
|
||||
dir.startFailing();
|
||||
IndexReader reader = DirectoryReader.open(dir);
|
||||
dir.startFailing();
|
||||
|
||||
boolean exc = false;
|
||||
boolean exc = false;
|
||||
|
||||
for(int i=0;i<2;i++) {
|
||||
try {
|
||||
reader.document(i);
|
||||
} catch (IOException ioe) {
|
||||
// expected
|
||||
exc = true;
|
||||
}
|
||||
try {
|
||||
reader.document(i);
|
||||
} catch (IOException ioe) {
|
||||
// expected
|
||||
exc = true;
|
||||
}
|
||||
for(int i=0;i<2;i++) {
|
||||
try {
|
||||
reader.document(i);
|
||||
} catch (IOException ioe) {
|
||||
// expected
|
||||
exc = true;
|
||||
}
|
||||
try {
|
||||
reader.document(i);
|
||||
} catch (IOException ioe) {
|
||||
// expected
|
||||
exc = true;
|
||||
}
|
||||
assertTrue(exc);
|
||||
reader.close();
|
||||
dir.close();
|
||||
} finally {
|
||||
IOUtils.rm(indexDir);
|
||||
}
|
||||
|
||||
assertTrue(exc);
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,6 +18,9 @@ package org.apache.lucene.index;
|
|||
|
||||
|
||||
import java.io.*;
|
||||
import java.net.URI;
|
||||
import java.nio.file.FileSystem;
|
||||
import java.nio.file.Path;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
|
@ -26,6 +29,8 @@ import org.apache.lucene.codecs.simpletext.SimpleTextCodec;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.mockfile.FilterPath;
|
||||
import org.apache.lucene.mockfile.VirusCheckingFS;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
|
@ -34,6 +39,7 @@ import org.apache.lucene.store.IndexOutput;
|
|||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.InfoStream;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
/*
|
||||
Verify we can read the pre-2.1 file format, do searches
|
||||
|
@ -46,8 +52,6 @@ public class TestIndexFileDeleter extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setPreventDoubleWrite(false);
|
||||
// ensure we actually delete files
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
MergePolicy mergePolicy = newLogMergePolicy(true, 10);
|
||||
|
@ -220,9 +224,9 @@ public class TestIndexFileDeleter extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testVirusScannerDoesntCorruptIndex() throws IOException {
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
dir.setPreventDoubleWrite(false); // we arent trying to test this
|
||||
dir.setEnableVirusScanner(false); // we have our own to make test reproduce always
|
||||
Path path = createTempDir();
|
||||
Directory dir = newFSDirectory(addVirusChecker(path));
|
||||
TestUtil.disableVirusChecker(dir);
|
||||
|
||||
// add empty commit
|
||||
new IndexWriter(dir, new IndexWriterConfig(null)).close();
|
||||
|
@ -230,25 +234,12 @@ public class TestIndexFileDeleter extends LuceneTestCase {
|
|||
dir.createOutput("_0.si", IOContext.DEFAULT).close();
|
||||
|
||||
// start virus scanner
|
||||
final AtomicBoolean stopScanning = new AtomicBoolean();
|
||||
dir.failOn(new MockDirectoryWrapper.Failure() {
|
||||
@Override
|
||||
public void eval(MockDirectoryWrapper dir) throws IOException {
|
||||
if (stopScanning.get()) {
|
||||
return;
|
||||
}
|
||||
for (StackTraceElement f : new Exception().getStackTrace()) {
|
||||
if ("deleteFile".equals(f.getMethodName())) {
|
||||
throw new IOException("temporarily cannot delete file");
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
TestUtil.enableVirusChecker(dir);
|
||||
|
||||
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(null));
|
||||
iw.addDocument(new Document());
|
||||
// stop virus scanner
|
||||
stopScanning.set(true);
|
||||
TestUtil.disableVirusChecker(dir);
|
||||
iw.commit();
|
||||
iw.close();
|
||||
dir.close();
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -94,10 +94,6 @@ public class TestIndexWriterCommit extends LuceneTestCase {
|
|||
*/
|
||||
public void testCommitOnCloseAbort() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// test uses IW unref'ed check which is unaware of retries
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setMaxBufferedDocs(10));
|
||||
for (int i = 0; i < 14; i++) {
|
||||
|
@ -189,11 +185,6 @@ public class TestIndexWriterCommit extends LuceneTestCase {
|
|||
final String contentFormat = TestUtil.getPostingsFormat("content");
|
||||
assumeFalse("This test cannot run with Memory codec", idFormat.equals("Memory") || contentFormat.equals("Memory"));
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// the virus scanner can use up too much disk space :)
|
||||
// an alternative is to expose MDW.triedToDelete and discount it
|
||||
dir.setEnableVirusScanner(false);
|
||||
}
|
||||
Analyzer analyzer;
|
||||
if (random().nextBoolean()) {
|
||||
// no payloads
|
||||
|
@ -279,10 +270,6 @@ public class TestIndexWriterCommit extends LuceneTestCase {
|
|||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setPreventDoubleWrite(false);
|
||||
}
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// test uses IW unref'ed check which is unaware of retries
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
|
@ -599,14 +586,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
|
|||
IndexReader reader2 = DirectoryReader.open(dir);
|
||||
assertEquals(0, reader2.numDocs());
|
||||
|
||||
// We need to let IW delete the partial segments_N that was written in prepareCommit, else we get a false fail below:
|
||||
if (mockDir != null) {
|
||||
mockDir.setEnableVirusScanner(false);
|
||||
}
|
||||
writer.rollback();
|
||||
if (mockDir != null) {
|
||||
mockDir.setEnableVirusScanner(true);
|
||||
}
|
||||
|
||||
IndexReader reader3 = DirectoryReader.openIfChanged(reader);
|
||||
assertNull(reader3);
|
||||
|
|
|
@ -498,10 +498,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
|
||||
// First build up a starting index:
|
||||
MockDirectoryWrapper startDir = newMockDirectory();
|
||||
// TODO: find the resource leak that only occurs sometimes here.
|
||||
startDir.setNoDeleteOpenFile(false);
|
||||
// test uses IW unref'ed helper which is unaware of retries
|
||||
startDir.setEnableVirusScanner(false);
|
||||
|
||||
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
|
||||
for (int i = 0; i < 157; i++) {
|
||||
Document d = new Document();
|
||||
|
@ -527,8 +524,6 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), TestUtil.ramCopyOf(startDir));
|
||||
dir.setPreventDoubleWrite(false);
|
||||
dir.setAllowRandomFileNotFoundException(false);
|
||||
// test uses IW unref'ed helper which is unaware of retries
|
||||
dir.setEnableVirusScanner(false);
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
|
||||
.setMaxBufferedDocs(1000)
|
||||
|
@ -913,8 +908,6 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
String[] text = { "Amsterdam", "Venice" };
|
||||
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
// test uses IW unref'ed helper which is unaware of retries
|
||||
dir.setEnableVirusScanner(false);
|
||||
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
|
||||
modifier.commit();
|
||||
dir.failOn(failure.reset());
|
||||
|
|
|
@ -27,7 +27,7 @@ public class TestIndexWriterDeleteByQuery extends LuceneTestCase {
|
|||
|
||||
// LUCENE-6379
|
||||
public void testDeleteMatchAllDocsQuery() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
Directory dir = newMaybeVirusCheckingDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
|
||||
Document doc = new Document();
|
||||
// Norms are disabled:
|
||||
|
|
|
@ -57,14 +57,14 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.InfoStream;
|
||||
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
@SuppressCodecs("SimpleText") // too slow here
|
||||
|
@ -950,7 +950,6 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
for (FailOnlyInCommit failure : failures) {
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
dir.setFailOnCreateOutput(false);
|
||||
dir.setEnableVirusScanner(false); // we check for specific list of files
|
||||
int fileCount = dir.listAll().length;
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
|
||||
Document doc = new Document();
|
||||
|
@ -965,7 +964,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
} catch (RuntimeException re) {
|
||||
// Expected
|
||||
}
|
||||
assertTrue(failure.failOnCommit && failure.failOnDeleteFile);
|
||||
assertTrue("failOnCommit=" + failure.failOnCommit + " failOnDeleteFile=" + failure.failOnDeleteFile, failure.failOnCommit && failure.failOnDeleteFile);
|
||||
w.rollback();
|
||||
String files[] = dir.listAll();
|
||||
assertTrue(files.length == fileCount || (files.length == fileCount+1 && Arrays.asList(files).contains(IndexWriter.WRITE_LOCK_NAME)));
|
||||
|
@ -1178,10 +1177,6 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
public void testSimulatedCorruptIndex1() throws IOException {
|
||||
BaseDirectoryWrapper dir = newDirectory();
|
||||
dir.setCheckIndexOnClose(false); // we are corrupting it!
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// we want to ensure our corruption always succeeds!
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
IndexWriter writer = null;
|
||||
|
||||
|
@ -1230,10 +1225,6 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
public void testSimulatedCorruptIndex2() throws IOException {
|
||||
BaseDirectoryWrapper dir = newDirectory();
|
||||
dir.setCheckIndexOnClose(false); // we are corrupting it!
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// we want to ensure our corruption always succeeds!
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
IndexWriter writer = null;
|
||||
|
||||
writer = new IndexWriter(
|
||||
|
|
|
@ -126,7 +126,6 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
|
|||
public void testForceMergeTempSpaceUsage() throws IOException {
|
||||
|
||||
final MockDirectoryWrapper dir = newMockDirectory();
|
||||
dir.setEnableVirusScanner(false);
|
||||
// don't use MockAnalyzer, variable length payloads can cause merge to make things bigger,
|
||||
// since things are optimized for fixed length case. this is a problem for MemoryPF's encoding.
|
||||
// (it might have other problems too)
|
||||
|
|
|
@ -17,6 +17,10 @@
|
|||
package org.apache.lucene.index;
|
||||
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
|
@ -24,10 +28,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
public class TestIndexWriterFromReader extends LuceneTestCase {
|
||||
|
||||
|
@ -111,10 +112,6 @@ public class TestIndexWriterFromReader extends LuceneTestCase {
|
|||
// Pull NRT reader after writer has committed and then indexed another doc:
|
||||
public void testAfterCommitThenIndex() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// We only hit exc if stale segments file was deleted:
|
||||
((MockDirectoryWrapper) dir).setEnableVirusScanner(false);
|
||||
}
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
|
||||
w.addDocument(new Document());
|
||||
w.commit();
|
||||
|
@ -140,10 +137,6 @@ public class TestIndexWriterFromReader extends LuceneTestCase {
|
|||
// NRT rollback: pull NRT reader after writer has committed and then before indexing another doc
|
||||
public void testNRTRollback() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// We only hit exc if stale segments file was deleted:
|
||||
((MockDirectoryWrapper) dir).setEnableVirusScanner(false);
|
||||
}
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
|
||||
w.addDocument(new Document());
|
||||
w.commit();
|
||||
|
|
|
@ -64,7 +64,6 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
|||
System.out.println("TEST: cycle: diskFree=" + diskFree);
|
||||
}
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory());
|
||||
dir.setEnableVirusScanner(false); // currently uses the IW unreferenced files method, unaware of retries
|
||||
dir.setMaxSizeInBytes(diskFree);
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
|
||||
MergeScheduler ms = writer.getConfig().getMergeScheduler();
|
||||
|
|
|
@ -26,10 +26,12 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.LineFileDocs;
|
||||
import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.PrintStreamInfoStream;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
@SuppressFileSystems("WindowsFS")
|
||||
public class TestIndexWriterOutOfFileDescriptors extends LuceneTestCase {
|
||||
public void test() throws Exception {
|
||||
MockDirectoryWrapper dir = newMockFSDirectory(createTempDir("TestIndexWriterOutOfFileDescriptors"));
|
||||
|
|
|
@ -40,12 +40,6 @@ public class TestNRTReaderCleanup extends LuceneTestCase {
|
|||
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
|
||||
// don't act like windows either, or the test won't simulate the condition
|
||||
dir.setEnableVirusScanner(false);
|
||||
|
||||
// Allow deletion of still open files:
|
||||
dir.setNoDeleteOpenFile(false);
|
||||
|
||||
// Allow writing to same file more than once:
|
||||
dir.setPreventDoubleWrite(false);
|
||||
|
||||
|
@ -66,8 +60,8 @@ public class TestNRTReaderCleanup extends LuceneTestCase {
|
|||
w.close();
|
||||
|
||||
// Blow away index and make a new writer:
|
||||
for(String fileName : dir.listAll()) {
|
||||
dir.deleteFile(fileName);
|
||||
for(String name : dir.listAll()) {
|
||||
dir.deleteFile(name);
|
||||
}
|
||||
|
||||
w = new RandomIndexWriter(random(), dir);
|
||||
|
|
|
@ -39,12 +39,6 @@ public class TestNeverDelete extends LuceneTestCase {
|
|||
final Path tmpDir = createTempDir("TestNeverDelete");
|
||||
final BaseDirectoryWrapper d = newFSDirectory(tmpDir);
|
||||
|
||||
// We want to "see" files removed if Lucene removed
|
||||
// them. This is still worth running on Windows since
|
||||
// some files the IR opens and closes.
|
||||
if (d instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)d).setNoDeleteOpenFile(false);
|
||||
}
|
||||
final RandomIndexWriter w = new RandomIndexWriter(random(),
|
||||
d,
|
||||
newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
|
@ -107,7 +101,5 @@ public class TestNeverDelete extends LuceneTestCase {
|
|||
}
|
||||
w.close();
|
||||
d.close();
|
||||
|
||||
IOUtils.rm(tmpDir);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1182,10 +1182,6 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
|
|||
@Test
|
||||
public void testDeleteUnusedUpdatesFiles() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
// test explicitly needs files to always be actually deleted
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
|
||||
|
|
|
@ -189,10 +189,7 @@ public class TestOmitPositions extends LuceneTestCase {
|
|||
// Verifies no *.prx exists when all fields omit term positions:
|
||||
public void testNoPrxFile() throws Throwable {
|
||||
Directory ram = newDirectory();
|
||||
if (ram instanceof MockDirectoryWrapper) {
|
||||
// we verify some files get deleted
|
||||
((MockDirectoryWrapper)ram).setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
Analyzer analyzer = new MockAnalyzer(random());
|
||||
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(analyzer)
|
||||
.setMaxBufferedDocs(3)
|
||||
|
|
|
@ -40,6 +40,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
|
||||
public class TestOmitTf extends LuceneTestCase {
|
||||
|
@ -219,10 +220,6 @@ public class TestOmitTf extends LuceneTestCase {
|
|||
// Verifies no *.prx exists when all fields omit term freq:
|
||||
public void testNoPrxFile() throws Throwable {
|
||||
Directory ram = newDirectory();
|
||||
if (ram instanceof MockDirectoryWrapper) {
|
||||
// we verify some files get deleted
|
||||
((MockDirectoryWrapper)ram).setEnableVirusScanner(false);
|
||||
}
|
||||
Analyzer analyzer = new MockAnalyzer(random());
|
||||
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(analyzer)
|
||||
.setMaxBufferedDocs(3)
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
@ -49,7 +50,6 @@ public class TestPersistentSnapshotDeletionPolicy extends TestSnapshotDeletionPo
|
|||
public void testExistingSnapshots() throws Exception {
|
||||
int numSnapshots = 3;
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
dir.setEnableVirusScanner(false); // test relies on files actually being deleted
|
||||
IndexWriter writer = new IndexWriter(dir, getConfig(random(), getDeletionPolicy(dir)));
|
||||
PersistentSnapshotDeletionPolicy psdp = (PersistentSnapshotDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
|
||||
assertNull(psdp.getLastSaveFile());
|
||||
|
|
|
@ -39,10 +39,6 @@ public class TestRollingUpdates extends LuceneTestCase {
|
|||
public void testRollingUpdates() throws Exception {
|
||||
Random random = new Random(random().nextLong());
|
||||
final BaseDirectoryWrapper dir = newDirectory();
|
||||
// test checks for no unref'ed files with the IW helper method, which isn't aware of "tried to delete files"
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
final LineFileDocs docs = new LineFileDocs(random, true);
|
||||
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.lucene.util.ThreadInterruptedException;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -103,10 +104,6 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase {
|
|||
}
|
||||
|
||||
private void runTest(Random random, Directory dir) throws Exception {
|
||||
// we use the IW unref'ed files check which is unaware of retries:
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
// Run for ~1 seconds
|
||||
final long stopTime = System.currentTimeMillis() + 1000;
|
||||
|
||||
|
@ -257,10 +254,6 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase {
|
|||
|
||||
// Create 3 snapshots: snapshot0, snapshot1, snapshot2
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// we verify some files get deleted
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
IndexWriter writer = new IndexWriter(dir, getConfig(random(), getDeletionPolicy()));
|
||||
SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
|
||||
prepareIndexAndSnapshots(sdp, writer, numSnapshots);
|
||||
|
@ -285,10 +278,7 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase {
|
|||
@Test
|
||||
public void testMultiThreadedSnapshotting() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// test relies on files actually being deleted
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
final IndexWriter writer = new IndexWriter(dir, getConfig(random(), getDeletionPolicy()));
|
||||
final SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
|
||||
|
||||
|
@ -364,10 +354,6 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase {
|
|||
@Test
|
||||
public void testReleaseSnapshot() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// we rely upon existence of files
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
IndexWriter writer = new IndexWriter(dir, getConfig(random(), getDeletionPolicy()));
|
||||
SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
|
||||
prepareIndexAndSnapshots(sdp, writer, 1);
|
||||
|
@ -417,10 +403,6 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase {
|
|||
// Tests the behavior of SDP when commits that are given at ctor are missing
|
||||
// on onInit().
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// we rely upon existence of files
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
IndexWriter writer = new IndexWriter(dir, getConfig(random(), getDeletionPolicy()));
|
||||
SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
|
||||
writer.addDocument(new Document());
|
||||
|
|
|
@ -164,7 +164,7 @@ public class TestStressIndexing extends LuceneTestCase {
|
|||
FSDirectory.
|
||||
*/
|
||||
public void testStressIndexAndSearching() throws Exception {
|
||||
Directory directory = newDirectory();
|
||||
Directory directory = newMaybeVirusCheckingDirectory();
|
||||
if (directory instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper) directory).setAssertNoUnrefencedFilesOnClose(true);
|
||||
}
|
||||
|
|
|
@ -50,7 +50,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
|
|||
static int seed=0;
|
||||
|
||||
public void testRandomIWReader() throws Throwable {
|
||||
Directory dir = newDirectory();
|
||||
Directory dir = newMaybeVirusCheckingDirectory();
|
||||
|
||||
// TODO: verify equals using IW.getReader
|
||||
DocsAndWriter dw = indexRandomIWReader(5, 3, 100, dir);
|
||||
|
@ -63,8 +63,8 @@ public class TestStressIndexing2 extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testRandom() throws Throwable {
|
||||
Directory dir1 = newDirectory();
|
||||
Directory dir2 = newDirectory();
|
||||
Directory dir1 = newMaybeVirusCheckingDirectory();
|
||||
Directory dir2 = newMaybeVirusCheckingDirectory();
|
||||
// mergeFactor=2; maxBufferedDocs=2; Map docs = indexRandom(1, 3, 2, dir1);
|
||||
boolean doReaderPooling = random().nextBoolean();
|
||||
Map<String,Document> docs = indexRandom(5, 3, 100, dir1, doReaderPooling);
|
||||
|
|
|
@ -104,7 +104,7 @@ public class TestStressNRT extends LuceneTestCase {
|
|||
|
||||
List<Thread> threads = new ArrayList<>();
|
||||
|
||||
Directory dir = newDirectory();
|
||||
Directory dir = newMaybeVirusCheckingDirectory();
|
||||
|
||||
final RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random())));
|
||||
writer.setDoRandomForceMergeAssert(false);
|
||||
|
|
|
@ -43,15 +43,6 @@ public class TestSwappedIndexFiles extends LuceneTestCase {
|
|||
Directory dir1 = newDirectory();
|
||||
Directory dir2 = newDirectory();
|
||||
|
||||
if (dir1 instanceof MockDirectoryWrapper) {
|
||||
// otherwise we can have unref'd files left in the index that won't be visited when opening a reader and lead to scary looking false failures:
|
||||
((MockDirectoryWrapper) dir1).setEnableVirusScanner(false);
|
||||
}
|
||||
if (dir2 instanceof MockDirectoryWrapper) {
|
||||
// otherwise we can have unref'd files left in the index that won't be visited when opening a reader and lead to scary looking false failures:
|
||||
((MockDirectoryWrapper) dir2).setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
// Disable CFS 80% of the time so we can truncate individual files, but the other 20% of the time we test truncation of .cfs/.cfe too:
|
||||
boolean useCFS = random().nextInt(5) == 1;
|
||||
|
||||
|
|
|
@ -175,9 +175,9 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
iwc.setCodec(getCodec());
|
||||
Directory dir;
|
||||
if (values.length > 100000) {
|
||||
dir = noVirusChecker(newFSDirectory(createTempDir("TestRangeTree")));
|
||||
dir = newMaybeVirusCheckingFSDirectory(createTempDir("TestRangeTree"));
|
||||
} else {
|
||||
dir = getDirectory();
|
||||
dir = newMaybeVirusCheckingDirectory();
|
||||
}
|
||||
|
||||
int missingPct = random().nextInt(100);
|
||||
|
@ -439,9 +439,9 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
|
||||
Directory dir;
|
||||
if (docValues.length > 100000) {
|
||||
dir = noVirusChecker(newFSDirectory(createTempDir("TestPointRangeQuery")));
|
||||
dir = newFSDirectory(createTempDir("TestPointQueries"));
|
||||
} else {
|
||||
dir = getDirectory();
|
||||
dir = newDirectory();
|
||||
}
|
||||
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
|
||||
|
@ -721,7 +721,7 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testMinMaxLong() throws Exception {
|
||||
Directory dir = getDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
iwc.setCodec(getCodec());
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
|
||||
|
@ -760,7 +760,7 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testBasicSortedSet() throws Exception {
|
||||
Directory dir = getDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
iwc.setCodec(getCodec());
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
|
||||
|
@ -825,7 +825,7 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testLongMinMaxNumeric() throws Exception {
|
||||
Directory dir = getDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
iwc.setCodec(getCodec());
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
|
||||
|
@ -851,7 +851,7 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testLongMinMaxSortedSet() throws Exception {
|
||||
Directory dir = getDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
iwc.setCodec(getCodec());
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
|
||||
|
@ -878,7 +878,7 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testSortedSetNoOrdsMatch() throws Exception {
|
||||
Directory dir = getDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
iwc.setCodec(getCodec());
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
|
||||
|
@ -900,7 +900,7 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testNumericNoValuesMatch() throws Exception {
|
||||
Directory dir = getDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
iwc.setCodec(getCodec());
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
|
||||
|
@ -920,7 +920,7 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testNoDocs() throws Exception {
|
||||
Directory dir = getDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
iwc.setCodec(getCodec());
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
|
||||
|
@ -935,7 +935,7 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testWrongNumDims() throws Exception {
|
||||
Directory dir = getDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
iwc.setCodec(getCodec());
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
|
||||
|
@ -958,7 +958,7 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testWrongNumBytes() throws Exception {
|
||||
Directory dir = getDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
iwc.setCodec(getCodec());
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
|
||||
|
@ -982,7 +982,7 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testAllPointDocsWereDeletedAndThenMergedAgain() throws Exception {
|
||||
Directory dir = getDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
iwc.setCodec(getCodec());
|
||||
IndexWriter w = new IndexWriter(dir, iwc);
|
||||
|
@ -1018,17 +1018,6 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
IOUtils.close(w, dir);
|
||||
}
|
||||
|
||||
private static Directory noVirusChecker(Directory dir) {
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper) dir).setEnableVirusScanner(false);
|
||||
}
|
||||
return dir;
|
||||
}
|
||||
|
||||
private static Directory getDirectory() {
|
||||
return noVirusChecker(newDirectory());
|
||||
}
|
||||
|
||||
private static Codec getCodec() {
|
||||
if (Codec.getDefault().getName().equals("Lucene60")) {
|
||||
int maxPointsInLeafNode = TestUtil.nextInt(random(), 16, 2048);
|
||||
|
@ -1059,7 +1048,7 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testExactPointQuery() throws Exception {
|
||||
Directory dir = getDirectory();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
iwc.setCodec(getCodec());
|
||||
IndexWriter w = new IndexWriter(dir, iwc);
|
||||
|
|
|
@ -211,58 +211,54 @@ public class TestBufferedIndexInput extends LuceneTestCase {
|
|||
public void testSetBufferSize() throws IOException {
|
||||
Path indexDir = createTempDir("testSetBufferSize");
|
||||
MockFSDirectory dir = new MockFSDirectory(indexDir, random());
|
||||
try {
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
new IndexWriterConfig(new MockAnalyzer(random())).
|
||||
setOpenMode(OpenMode.CREATE).
|
||||
setMergePolicy(newLogMergePolicy(false))
|
||||
);
|
||||
for(int i=0;i<37;i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(newTextField("content", "aaa bbb ccc ddd" + i, Field.Store.YES));
|
||||
doc.add(newTextField("id", "" + i, Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
|
||||
dir.allIndexInputs.clear();
|
||||
|
||||
IndexReader reader = DirectoryReader.open(writer);
|
||||
Term aaa = new Term("content", "aaa");
|
||||
Term bbb = new Term("content", "bbb");
|
||||
|
||||
reader.close();
|
||||
|
||||
dir.tweakBufferSizes();
|
||||
writer.deleteDocuments(new Term("id", "0"));
|
||||
reader = DirectoryReader.open(writer);
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
ScoreDoc[] hits = searcher.search(new TermQuery(bbb), 1000).scoreDocs;
|
||||
dir.tweakBufferSizes();
|
||||
assertEquals(36, hits.length);
|
||||
|
||||
reader.close();
|
||||
|
||||
dir.tweakBufferSizes();
|
||||
writer.deleteDocuments(new Term("id", "4"));
|
||||
reader = DirectoryReader.open(writer);
|
||||
searcher = newSearcher(reader);
|
||||
|
||||
hits = searcher.search(new TermQuery(bbb), 1000).scoreDocs;
|
||||
dir.tweakBufferSizes();
|
||||
assertEquals(35, hits.length);
|
||||
dir.tweakBufferSizes();
|
||||
hits = searcher.search(new TermQuery(new Term("id", "33")), 1000).scoreDocs;
|
||||
dir.tweakBufferSizes();
|
||||
assertEquals(1, hits.length);
|
||||
hits = searcher.search(new TermQuery(aaa), 1000).scoreDocs;
|
||||
dir.tweakBufferSizes();
|
||||
assertEquals(35, hits.length);
|
||||
writer.close();
|
||||
reader.close();
|
||||
} finally {
|
||||
IOUtils.rm(indexDir);
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
new IndexWriterConfig(new MockAnalyzer(random())).
|
||||
setOpenMode(OpenMode.CREATE).
|
||||
setMergePolicy(newLogMergePolicy(false))
|
||||
);
|
||||
for(int i=0;i<37;i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(newTextField("content", "aaa bbb ccc ddd" + i, Field.Store.YES));
|
||||
doc.add(newTextField("id", "" + i, Field.Store.YES));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
|
||||
dir.allIndexInputs.clear();
|
||||
|
||||
IndexReader reader = DirectoryReader.open(writer);
|
||||
Term aaa = new Term("content", "aaa");
|
||||
Term bbb = new Term("content", "bbb");
|
||||
|
||||
reader.close();
|
||||
|
||||
dir.tweakBufferSizes();
|
||||
writer.deleteDocuments(new Term("id", "0"));
|
||||
reader = DirectoryReader.open(writer);
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
ScoreDoc[] hits = searcher.search(new TermQuery(bbb), 1000).scoreDocs;
|
||||
dir.tweakBufferSizes();
|
||||
assertEquals(36, hits.length);
|
||||
|
||||
reader.close();
|
||||
|
||||
dir.tweakBufferSizes();
|
||||
writer.deleteDocuments(new Term("id", "4"));
|
||||
reader = DirectoryReader.open(writer);
|
||||
searcher = newSearcher(reader);
|
||||
|
||||
hits = searcher.search(new TermQuery(bbb), 1000).scoreDocs;
|
||||
dir.tweakBufferSizes();
|
||||
assertEquals(35, hits.length);
|
||||
dir.tweakBufferSizes();
|
||||
hits = searcher.search(new TermQuery(new Term("id", "33")), 1000).scoreDocs;
|
||||
dir.tweakBufferSizes();
|
||||
assertEquals(1, hits.length);
|
||||
hits = searcher.search(new TermQuery(aaa), 1000).scoreDocs;
|
||||
dir.tweakBufferSizes();
|
||||
assertEquals(35, hits.length);
|
||||
writer.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
private static class MockFSDirectory extends FilterDirectory {
|
||||
|
|
|
@ -23,8 +23,8 @@ import java.nio.file.Path;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
public class TestDirectory extends LuceneTestCase {
|
||||
|
||||
|
@ -110,23 +110,17 @@ public class TestDirectory extends LuceneTestCase {
|
|||
dir.close();
|
||||
assertFalse(dir.isOpen);
|
||||
}
|
||||
|
||||
IOUtils.rm(path);
|
||||
}
|
||||
|
||||
// LUCENE-1468
|
||||
@SuppressWarnings("resource")
|
||||
public void testCopySubdir() throws Throwable {
|
||||
Path path = createTempDir("testsubdir");
|
||||
try {
|
||||
Files.createDirectory(path.resolve("subdir"));
|
||||
FSDirectory fsDir = new SimpleFSDirectory(path);
|
||||
RAMDirectory ramDir = new RAMDirectory(fsDir, newIOContext(random()));
|
||||
List<String> files = Arrays.asList(ramDir.listAll());
|
||||
assertFalse(files.contains("subdir"));
|
||||
} finally {
|
||||
IOUtils.rm(path);
|
||||
}
|
||||
Files.createDirectory(path.resolve("subdir"));
|
||||
FSDirectory fsDir = new SimpleFSDirectory(path);
|
||||
RAMDirectory ramDir = new RAMDirectory(fsDir, newIOContext(random()));
|
||||
List<String> files = Arrays.asList(ramDir.listAll());
|
||||
assertFalse(files.contains("subdir"));
|
||||
}
|
||||
|
||||
// LUCENE-1468
|
||||
|
@ -145,7 +139,6 @@ public class TestDirectory extends LuceneTestCase {
|
|||
}
|
||||
} finally {
|
||||
fsDir.close();
|
||||
IOUtils.rm(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -100,7 +100,6 @@ public class TestFileSwitchDirectory extends BaseDirectoryTestCase {
|
|||
public void testNoDir() throws Throwable {
|
||||
Path primDir = createTempDir("foo");
|
||||
Path secondDir = createTempDir("bar");
|
||||
IOUtils.rm(primDir, secondDir);
|
||||
Directory dir = newFSSwitchDirectory(primDir, secondDir, Collections.<String>emptySet());
|
||||
try {
|
||||
DirectoryReader.open(dir);
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.nio.file.Files;
|
|||
import java.nio.file.Path;
|
||||
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
/** Simple tests for NativeFSLockFactory */
|
||||
public class TestNativeFSLockFactory extends BaseLockFactoryTestCase {
|
||||
|
@ -79,19 +80,14 @@ public class TestNativeFSLockFactory extends BaseLockFactoryTestCase {
|
|||
|
||||
/** delete the lockfile and test ensureValid fails */
|
||||
public void testDeleteLockFile() throws IOException {
|
||||
Directory dir = getDirectory(createTempDir());
|
||||
try {
|
||||
try (Directory dir = getDirectory(createTempDir())) {
|
||||
assumeFalse("we must be able to delete an open file", TestUtil.hasWindowsFS(dir));
|
||||
|
||||
Lock lock = dir.obtainLock("test.lock");
|
||||
lock.ensureValid();
|
||||
|
||||
try {
|
||||
dir.deleteFile("test.lock");
|
||||
} catch (Exception e) {
|
||||
// we can't delete a file for some reason, just clean up and assume the test.
|
||||
IOUtils.closeWhileHandlingException(lock);
|
||||
assumeNoException("test requires the ability to delete a locked file", e);
|
||||
}
|
||||
|
||||
|
||||
dir.deleteFile("test.lock");
|
||||
|
||||
try {
|
||||
lock.ensureValid();
|
||||
fail("no exception");
|
||||
|
@ -100,9 +96,6 @@ public class TestNativeFSLockFactory extends BaseLockFactoryTestCase {
|
|||
} finally {
|
||||
IOUtils.closeWhileHandlingException(lock);
|
||||
}
|
||||
} finally {
|
||||
// Do this in finally clause in case the assumeNoException is false:
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -83,7 +83,6 @@ public class TestRAMDirectory extends BaseDirectoryTestCase {
|
|||
assertFalse(files.contains("subdir"));
|
||||
} finally {
|
||||
IOUtils.close(fsDir);
|
||||
IOUtils.rm(path);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.io.IOException;
|
|||
import java.nio.file.Path;
|
||||
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
/** Simple tests for SimpleFSLockFactory */
|
||||
public class TestSimpleFSLockFactory extends BaseLockFactoryTestCase {
|
||||
|
|
|
@ -38,6 +38,7 @@ import java.util.UUID;
|
|||
import org.apache.lucene.mockfile.FilterFileSystem;
|
||||
import org.apache.lucene.mockfile.FilterFileSystemProvider;
|
||||
import org.apache.lucene.mockfile.FilterPath;
|
||||
import org.apache.lucene.util.LuceneTestCase.SuppressFileSystems;
|
||||
|
||||
/** Simple test methods for IOUtils */
|
||||
public class TestIOUtils extends LuceneTestCase {
|
||||
|
|
|
@ -54,30 +54,14 @@ public class TestOfflineSorter extends LuceneTestCase {
|
|||
super.tearDown();
|
||||
}
|
||||
|
||||
private static Directory newDirectoryNoVirusScanner() {
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper) dir).setEnableVirusScanner(false);
|
||||
}
|
||||
return dir;
|
||||
}
|
||||
|
||||
private static Directory newFSDirectoryNoVirusScanner() {
|
||||
Directory dir = newFSDirectory(createTempDir());
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper) dir).setEnableVirusScanner(false);
|
||||
}
|
||||
return dir;
|
||||
}
|
||||
|
||||
public void testEmpty() throws Exception {
|
||||
try (Directory dir = newDirectoryNoVirusScanner()) {
|
||||
try (Directory dir = newDirectory()) {
|
||||
checkSort(dir, new OfflineSorter(dir, "foo"), new byte [][] {});
|
||||
}
|
||||
}
|
||||
|
||||
public void testSingleLine() throws Exception {
|
||||
try (Directory dir = newDirectoryNoVirusScanner()) {
|
||||
try (Directory dir = newDirectory()) {
|
||||
checkSort(dir, new OfflineSorter(dir, "foo"), new byte [][] {
|
||||
"Single line only.".getBytes(StandardCharsets.UTF_8)
|
||||
});
|
||||
|
@ -86,7 +70,7 @@ public class TestOfflineSorter extends LuceneTestCase {
|
|||
|
||||
public void testIntermediateMerges() throws Exception {
|
||||
// Sort 20 mb worth of data with 1mb buffer, binary merging.
|
||||
try (Directory dir = newDirectoryNoVirusScanner()) {
|
||||
try (Directory dir = newDirectory()) {
|
||||
SortInfo info = checkSort(dir, new OfflineSorter(dir, "foo", OfflineSorter.DEFAULT_COMPARATOR, BufferSize.megabytes(1), 2),
|
||||
generateRandom((int)OfflineSorter.MB * 20));
|
||||
assertTrue(info.mergeRounds > 10);
|
||||
|
@ -95,7 +79,7 @@ public class TestOfflineSorter extends LuceneTestCase {
|
|||
|
||||
public void testSmallRandom() throws Exception {
|
||||
// Sort 20 mb worth of data with 1mb buffer.
|
||||
try (Directory dir = newDirectoryNoVirusScanner()) {
|
||||
try (Directory dir = newDirectory()) {
|
||||
SortInfo sortInfo = checkSort(dir, new OfflineSorter(dir, "foo", OfflineSorter.DEFAULT_COMPARATOR, BufferSize.megabytes(1), OfflineSorter.MAX_TEMPFILES),
|
||||
generateRandom((int)OfflineSorter.MB * 20));
|
||||
assertEquals(1, sortInfo.mergeRounds);
|
||||
|
@ -105,7 +89,7 @@ public class TestOfflineSorter extends LuceneTestCase {
|
|||
@Nightly
|
||||
public void testLargerRandom() throws Exception {
|
||||
// Sort 100MB worth of data with 15mb buffer.
|
||||
try (Directory dir = newFSDirectoryNoVirusScanner()) {
|
||||
try (Directory dir = newFSDirectory(createTempDir())) {
|
||||
checkSort(dir, new OfflineSorter(dir, "foo", OfflineSorter.DEFAULT_COMPARATOR, BufferSize.megabytes(16), OfflineSorter.MAX_TEMPFILES),
|
||||
generateRandom((int)OfflineSorter.MB * 100));
|
||||
}
|
||||
|
@ -223,7 +207,7 @@ public class TestOfflineSorter extends LuceneTestCase {
|
|||
Thread[] threads = new Thread[TestUtil.nextInt(random(), 4, 10)];
|
||||
final AtomicBoolean failed = new AtomicBoolean();
|
||||
final int iters = atLeast(1000);
|
||||
try (Directory dir = newDirectoryNoVirusScanner()) {
|
||||
try (Directory dir = newDirectory()) {
|
||||
for(int i=0;i<threads.length;i++) {
|
||||
final int threadID = i;
|
||||
threads[i] = new Thread() {
|
||||
|
|
|
@ -394,9 +394,6 @@ public class TestBKD extends LuceneTestCase {
|
|||
try {
|
||||
dir.setRandomIOExceptionRate(0.05);
|
||||
dir.setRandomIOExceptionRateOnOpen(0.05);
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
dir.setEnableVirusScanner(false);
|
||||
}
|
||||
verify(dir, docValues, null, numDims, numBytesPerDim, 50, maxMBHeap);
|
||||
} catch (IllegalArgumentException iae) {
|
||||
// This just means we got a too-small maxMB for the maxPointsInLeafNode; just retry w/ more heap
|
||||
|
@ -848,9 +845,6 @@ public class TestBKD extends LuceneTestCase {
|
|||
} else {
|
||||
dir = newDirectory();
|
||||
}
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper) dir).setEnableVirusScanner(false);
|
||||
}
|
||||
return dir;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
import org.apache.lucene.util.TimeUnits;
|
||||
import org.apache.lucene.util.packed.PackedInts;
|
||||
import org.junit.Ignore;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
|
||||
|
||||
@Ignore("Requires tons of heap to run (30 GB hits OOME but 35 GB passes after ~4.5 hours)")
|
||||
|
|
|
@ -68,8 +68,8 @@ import org.apache.lucene.util.LuceneTestCase.Slow;
|
|||
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.lucene.util.automaton.CompiledAutomaton;
|
||||
import org.apache.lucene.util.automaton.Automaton;
|
||||
import org.apache.lucene.util.automaton.CompiledAutomaton;
|
||||
import org.apache.lucene.util.automaton.RegExp;
|
||||
import org.apache.lucene.util.fst.BytesRefFSTEnum.InputOutput;
|
||||
import org.apache.lucene.util.fst.FST.Arc;
|
||||
|
@ -93,7 +93,6 @@ public class TestFSTs extends LuceneTestCase {
|
|||
super.setUp();
|
||||
dir = newMockDirectory();
|
||||
dir.setPreventDoubleWrite(false);
|
||||
dir.setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
package org.apache.lucene.facet.taxonomy.directory;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
|
@ -14,7 +16,20 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.facet.taxonomy.directory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import org.apache.lucene.facet.FacetTestCase;
|
||||
import org.apache.lucene.facet.taxonomy.FacetLabel;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.DiskOrdinalMap;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.MemoryOrdinalMap;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.OrdinalMap;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
+ * Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
|
@ -14,6 +14,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.facet.taxonomy.writercache;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
@ -28,7 +29,6 @@ import java.util.Random;
|
|||
|
||||
import org.apache.lucene.facet.FacetTestCase;
|
||||
import org.apache.lucene.facet.taxonomy.FacetLabel;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestCompactLabelToOrdinal extends FacetTestCase {
|
||||
|
|
|
@ -65,6 +65,7 @@ public class RAFDirectory extends FSDirectory {
|
|||
@Override
|
||||
public IndexInput openInput(String name, IOContext context) throws IOException {
|
||||
ensureOpen();
|
||||
ensureCanRead(name);
|
||||
final File path = directory.resolve(name).toFile();
|
||||
RandomAccessFile raf = new RandomAccessFile(path, "r");
|
||||
return new RAFIndexInput("SimpleFSIndexInput(path=\"" + path.getPath() + "\")", raf, context);
|
||||
|
|
|
@ -45,7 +45,6 @@ public class TestFSTsMisc extends LuceneTestCase {
|
|||
super.setUp();
|
||||
dir = newMockDirectory();
|
||||
dir.setPreventDoubleWrite(false);
|
||||
dir.setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -41,7 +41,6 @@ import org.apache.lucene.index.CheckIndex;
|
|||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.SegmentInfos;
|
||||
import org.apache.lucene.index.SnapshotDeletionPolicy;
|
||||
import org.apache.lucene.replicator.IndexAndTaxonomyRevision.SnapshotDirectoryTaxonomyWriter;
|
||||
import org.apache.lucene.replicator.ReplicationClient.ReplicationHandler;
|
||||
|
@ -423,22 +422,6 @@ public class IndexAndTaxonomyReplicationClientTest extends ReplicatorTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
if (indexStatus == null || indexStatus.clean == false) {
|
||||
|
||||
// Because segments file for taxo index is replicated after
|
||||
// main index's segments file, if there's an error while replicating
|
||||
// main index's segments file and if virus checker prevents
|
||||
// deletion of taxo index's segments file, it can look like corruption.
|
||||
// But it should be "false" meaning if we remove the latest segments
|
||||
// file then the index is intact. It's like pulling a hideous
|
||||
// looking rock out of the ground, but then you pull the cruft
|
||||
// off the outside of it and discover it's actually a beautiful
|
||||
// diamond:
|
||||
String segmentsFileName = SegmentInfos.getLastCommitSegmentsFileName(handlerTaxoDir);
|
||||
assertTrue(handlerTaxoDir.didTryToDelete(segmentsFileName));
|
||||
handlerTaxoDir.getDelegate().deleteFile(segmentsFileName);
|
||||
TestUtil.checkIndex(handlerTaxoDir.getDelegate());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
failed.set(true);
|
||||
throw new RuntimeException(e);
|
||||
|
|
|
@ -76,10 +76,6 @@ public class IndexAndTaxonomyRevisionTest extends ReplicatorTestCase {
|
|||
|
||||
Directory taxoDir = newDirectory();
|
||||
SnapshotDirectoryTaxonomyWriter taxoWriter = new SnapshotDirectoryTaxonomyWriter(taxoDir);
|
||||
// we look to see that certain files are deleted:
|
||||
if (indexDir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)indexDir).setEnableVirusScanner(false);
|
||||
}
|
||||
try {
|
||||
indexWriter.addDocument(newDocument(taxoWriter));
|
||||
indexWriter.commit();
|
||||
|
@ -100,10 +96,6 @@ public class IndexAndTaxonomyRevisionTest extends ReplicatorTestCase {
|
|||
indexWriter.close();
|
||||
} finally {
|
||||
IOUtils.close(indexWriter, taxoWriter, taxoDir, indexDir);
|
||||
if (indexDir instanceof MockDirectoryWrapper) {
|
||||
// set back to on for other tests
|
||||
((MockDirectoryWrapper)indexDir).setEnableVirusScanner(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -72,10 +72,6 @@ public class IndexRevisionTest extends ReplicatorTestCase {
|
|||
@Test
|
||||
public void testRevisionRelease() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
// we look to see that certain files are deleted:
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)dir).setEnableVirusScanner(false);
|
||||
}
|
||||
IndexWriterConfig conf = new IndexWriterConfig(null);
|
||||
conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(conf.getIndexDeletionPolicy()));
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
|
|
|
@ -118,57 +118,35 @@ public class LocalReplicatorTest extends ReplicatorTestCase {
|
|||
|
||||
@Test
|
||||
public void testPublishSameRevision() throws IOException {
|
||||
// we look to see that certain files are deleted:
|
||||
if (sourceDir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)sourceDir).setEnableVirusScanner(false);
|
||||
}
|
||||
try {
|
||||
Revision rev = createRevision(1);
|
||||
replicator.publish(rev);
|
||||
SessionToken res = replicator.checkForUpdate(null);
|
||||
assertNotNull(res);
|
||||
assertEquals(rev.getVersion(), res.version);
|
||||
replicator.release(res.id);
|
||||
replicator.publish(new IndexRevision(sourceWriter));
|
||||
res = replicator.checkForUpdate(res.version);
|
||||
assertNull(res);
|
||||
Revision rev = createRevision(1);
|
||||
replicator.publish(rev);
|
||||
SessionToken res = replicator.checkForUpdate(null);
|
||||
assertNotNull(res);
|
||||
assertEquals(rev.getVersion(), res.version);
|
||||
replicator.release(res.id);
|
||||
replicator.publish(new IndexRevision(sourceWriter));
|
||||
res = replicator.checkForUpdate(res.version);
|
||||
assertNull(res);
|
||||
|
||||
// now make sure that publishing same revision doesn't leave revisions
|
||||
// "locked", i.e. that replicator releases revisions even when they are not
|
||||
// kept
|
||||
replicator.publish(createRevision(2));
|
||||
assertEquals(1, DirectoryReader.listCommits(sourceDir).size());
|
||||
} finally {
|
||||
if (sourceDir instanceof MockDirectoryWrapper) {
|
||||
// set back to on for other tests
|
||||
((MockDirectoryWrapper)sourceDir).setEnableVirusScanner(true);
|
||||
}
|
||||
}
|
||||
// now make sure that publishing same revision doesn't leave revisions
|
||||
// "locked", i.e. that replicator releases revisions even when they are not
|
||||
// kept
|
||||
replicator.publish(createRevision(2));
|
||||
assertEquals(1, DirectoryReader.listCommits(sourceDir).size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPublishOlderRev() throws IOException {
|
||||
// we look to see that certain files are deleted:
|
||||
if (sourceDir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)sourceDir).setEnableVirusScanner(false);
|
||||
}
|
||||
replicator.publish(createRevision(1));
|
||||
Revision old = new IndexRevision(sourceWriter);
|
||||
replicator.publish(createRevision(2));
|
||||
try {
|
||||
replicator.publish(createRevision(1));
|
||||
Revision old = new IndexRevision(sourceWriter);
|
||||
replicator.publish(createRevision(2));
|
||||
try {
|
||||
replicator.publish(old);
|
||||
fail("should have failed to publish an older revision");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
assertEquals(1, DirectoryReader.listCommits(sourceDir).size());
|
||||
} finally {
|
||||
if (sourceDir instanceof MockDirectoryWrapper) {
|
||||
// set back to on for other tests
|
||||
((MockDirectoryWrapper)sourceDir).setEnableVirusScanner(true);
|
||||
}
|
||||
replicator.publish(old);
|
||||
fail("should have failed to publish an older revision");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected
|
||||
}
|
||||
assertEquals(1, DirectoryReader.listCommits(sourceDir).size());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -209,24 +187,12 @@ public class LocalReplicatorTest extends ReplicatorTestCase {
|
|||
|
||||
@Test
|
||||
public void testRevisionRelease() throws Exception {
|
||||
// we look to see that certain files are deleted:
|
||||
if (sourceDir instanceof MockDirectoryWrapper) {
|
||||
((MockDirectoryWrapper)sourceDir).setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
try {
|
||||
replicator.publish(createRevision(1));
|
||||
assertTrue(slowFileExists(sourceDir, IndexFileNames.SEGMENTS + "_1"));
|
||||
replicator.publish(createRevision(2));
|
||||
// now the files of revision 1 can be deleted
|
||||
assertTrue(slowFileExists(sourceDir, IndexFileNames.SEGMENTS + "_2"));
|
||||
assertFalse("segments_1 should not be found in index directory after revision is released", slowFileExists(sourceDir, IndexFileNames.SEGMENTS + "_1"));
|
||||
} finally {
|
||||
if (sourceDir instanceof MockDirectoryWrapper) {
|
||||
// set back to on for other tests
|
||||
((MockDirectoryWrapper)sourceDir).setEnableVirusScanner(true);
|
||||
}
|
||||
}
|
||||
replicator.publish(createRevision(1));
|
||||
assertTrue(slowFileExists(sourceDir, IndexFileNames.SEGMENTS + "_1"));
|
||||
replicator.publish(createRevision(2));
|
||||
// now the files of revision 1 can be deleted
|
||||
assertTrue(slowFileExists(sourceDir, IndexFileNames.SEGMENTS + "_2"));
|
||||
assertFalse("segments_1 should not be found in index directory after revision is released", slowFileExists(sourceDir, IndexFileNames.SEGMENTS + "_1"));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -39,12 +39,7 @@ import org.eclipse.jetty.server.Server;
|
|||
import org.eclipse.jetty.servlet.ServletHandler;
|
||||
import org.eclipse.jetty.servlet.ServletHolder;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.RuleChain;
|
||||
import org.junit.rules.TestRule;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
|
||||
|
||||
public class HttpReplicatorTest extends ReplicatorTestCase {
|
||||
private Path clientWorkDir;
|
||||
|
|
|
@ -20,7 +20,9 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.spatial.util.GeoUtils;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
|
@ -47,39 +49,83 @@ public final class GeoPointField extends Field {
|
|||
public static final int PRECISION_STEP = 9;
|
||||
|
||||
/**
|
||||
* Type for an GeoPointField that is not stored:
|
||||
* <b>Expert:</b> Optional flag to select term encoding for GeoPointField types
|
||||
*/
|
||||
public enum TermEncoding {
|
||||
/**
|
||||
* encodes prefix terms only resulting in a small index and faster queries - use with
|
||||
* {@code GeoPointTokenStream}
|
||||
*/
|
||||
PREFIX,
|
||||
/**
|
||||
* @deprecated encodes prefix and full resolution terms - use with
|
||||
* {@link org.apache.lucene.analysis.LegacyNumericTokenStream}
|
||||
*/
|
||||
@Deprecated
|
||||
NUMERIC
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Type for a GeoPointField that is not stored:
|
||||
* normalization factors, frequencies, and positions are omitted.
|
||||
*/
|
||||
public static final FieldType TYPE_NOT_STORED = new FieldType();
|
||||
@Deprecated
|
||||
public static final FieldType NUMERIC_TYPE_NOT_STORED = new FieldType();
|
||||
static {
|
||||
TYPE_NOT_STORED.setTokenized(false);
|
||||
TYPE_NOT_STORED.setOmitNorms(true);
|
||||
TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS);
|
||||
TYPE_NOT_STORED.setDocValuesType(DocValuesType.SORTED_NUMERIC);
|
||||
TYPE_NOT_STORED.setNumericType(FieldType.LegacyNumericType.LONG);
|
||||
TYPE_NOT_STORED.setNumericPrecisionStep(PRECISION_STEP);
|
||||
TYPE_NOT_STORED.freeze();
|
||||
NUMERIC_TYPE_NOT_STORED.setTokenized(false);
|
||||
NUMERIC_TYPE_NOT_STORED.setOmitNorms(true);
|
||||
NUMERIC_TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS);
|
||||
NUMERIC_TYPE_NOT_STORED.setDocValuesType(DocValuesType.SORTED_NUMERIC);
|
||||
NUMERIC_TYPE_NOT_STORED.setNumericType(FieldType.LegacyNumericType.LONG);
|
||||
NUMERIC_TYPE_NOT_STORED.setNumericPrecisionStep(PRECISION_STEP);
|
||||
NUMERIC_TYPE_NOT_STORED.freeze();
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Type for a stored GeoPointField:
|
||||
* normalization factors, frequencies, and positions are omitted.
|
||||
*/
|
||||
@Deprecated
|
||||
public static final FieldType NUMERIC_TYPE_STORED = new FieldType();
|
||||
static {
|
||||
NUMERIC_TYPE_STORED.setTokenized(false);
|
||||
NUMERIC_TYPE_STORED.setOmitNorms(true);
|
||||
NUMERIC_TYPE_STORED.setIndexOptions(IndexOptions.DOCS);
|
||||
NUMERIC_TYPE_STORED.setDocValuesType(DocValuesType.SORTED_NUMERIC);
|
||||
NUMERIC_TYPE_STORED.setNumericType(FieldType.LegacyNumericType.LONG);
|
||||
NUMERIC_TYPE_STORED.setNumericPrecisionStep(PRECISION_STEP);
|
||||
NUMERIC_TYPE_STORED.setStored(true);
|
||||
NUMERIC_TYPE_STORED.freeze();
|
||||
}
|
||||
|
||||
/**
|
||||
* Type for a GeoPointField that is not stored:
|
||||
* normalization factors, frequencies, and positions are omitted.
|
||||
*/
|
||||
public static final FieldType PREFIX_TYPE_NOT_STORED = new FieldType();
|
||||
static {
|
||||
PREFIX_TYPE_NOT_STORED.setTokenized(false);
|
||||
PREFIX_TYPE_NOT_STORED.setOmitNorms(true);
|
||||
PREFIX_TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS);
|
||||
PREFIX_TYPE_NOT_STORED.setDocValuesType(DocValuesType.SORTED_NUMERIC);
|
||||
PREFIX_TYPE_NOT_STORED.freeze();
|
||||
}
|
||||
|
||||
/**
|
||||
* Type for a stored GeoPointField:
|
||||
* normalization factors, frequencies, and positions are omitted.
|
||||
*/
|
||||
public static final FieldType TYPE_STORED = new FieldType();
|
||||
public static final FieldType PREFIX_TYPE_STORED = new FieldType();
|
||||
static {
|
||||
TYPE_STORED.setTokenized(false);
|
||||
TYPE_STORED.setOmitNorms(true);
|
||||
TYPE_STORED.setIndexOptions(IndexOptions.DOCS);
|
||||
TYPE_STORED.setDocValuesType(DocValuesType.SORTED_NUMERIC);
|
||||
TYPE_STORED.setNumericType(FieldType.LegacyNumericType.LONG);
|
||||
TYPE_STORED.setNumericPrecisionStep(PRECISION_STEP);
|
||||
TYPE_STORED.setStored(true);
|
||||
TYPE_STORED.freeze();
|
||||
PREFIX_TYPE_STORED.setTokenized(false);
|
||||
PREFIX_TYPE_STORED.setOmitNorms(true);
|
||||
PREFIX_TYPE_STORED.setIndexOptions(IndexOptions.DOCS);
|
||||
PREFIX_TYPE_STORED.setDocValuesType(DocValuesType.SORTED_NUMERIC);
|
||||
PREFIX_TYPE_STORED.setStored(true);
|
||||
PREFIX_TYPE_STORED.freeze();
|
||||
}
|
||||
|
||||
/** Creates a stored or un-stored GeoPointField with the provided value
|
||||
* and default <code>precisionStep</code> set to 64 to avoid wasteful
|
||||
* indexing of lower precision terms.
|
||||
/** Creates a stored or un-stored GeoPointField
|
||||
* @param name field name
|
||||
* @param lon longitude double value [-180.0 : 180.0]
|
||||
* @param lat latitude double value [-90.0 : 90.0]
|
||||
|
@ -87,8 +133,20 @@ public final class GeoPointField extends Field {
|
|||
* @throws IllegalArgumentException if the field name is null.
|
||||
*/
|
||||
public GeoPointField(String name, double lon, double lat, Store stored) {
|
||||
super(name, stored == Store.YES ? TYPE_STORED : TYPE_NOT_STORED);
|
||||
fieldsData = GeoUtils.mortonHash(lon, lat);
|
||||
this(name, lon, lat, getFieldType(stored));
|
||||
}
|
||||
|
||||
/** Creates a stored or un-stored GeoPointField using the specified {@link TermEncoding} method
|
||||
* @param name field name
|
||||
* @param lon longitude double value [-180.0 : 180.0]
|
||||
* @param lat latitude double value [-90.0 : 90.0]
|
||||
* @param termEncoding encoding type to use ({@link TermEncoding#NUMERIC} Terms, or {@link TermEncoding#PREFIX} only Terms)
|
||||
* @param stored Store.YES if the content should also be stored
|
||||
* @throws IllegalArgumentException if the field name is null.
|
||||
*/
|
||||
@Deprecated
|
||||
public GeoPointField(String name, double lon, double lat, TermEncoding termEncoding, Store stored) {
|
||||
this(name, lon, lat, getFieldType(termEncoding, stored));
|
||||
}
|
||||
|
||||
/** Expert: allows you to customize the {@link
|
||||
|
@ -103,23 +161,79 @@ public final class GeoPointField extends Field {
|
|||
*/
|
||||
public GeoPointField(String name, double lon, double lat, FieldType type) {
|
||||
super(name, type);
|
||||
if (type.numericType() != FieldType.LegacyNumericType.LONG) {
|
||||
throw new IllegalArgumentException("type.numericType() must be LONG but got " + type.numericType());
|
||||
|
||||
// field must be indexed
|
||||
// todo does it make sense here to provide the ability to store a GeoPointField but not index?
|
||||
if (type.indexOptions() == IndexOptions.NONE && type.stored() == false) {
|
||||
throw new IllegalArgumentException("type.indexOptions() is set to NONE but type.stored() is false");
|
||||
} else if (type.indexOptions() == IndexOptions.DOCS) {
|
||||
if (type.docValuesType() != DocValuesType.SORTED_NUMERIC) {
|
||||
throw new IllegalArgumentException("type.docValuesType() must be SORTED_NUMERIC but got " + type.docValuesType());
|
||||
}
|
||||
if (type.numericType() != null) {
|
||||
// make sure numericType is a LONG
|
||||
if (type.numericType() != FieldType.LegacyNumericType.LONG) {
|
||||
throw new IllegalArgumentException("type.numericType() must be LONG but got " + type.numericType());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("type.indexOptions() must be one of NONE or DOCS but got " + type.indexOptions());
|
||||
}
|
||||
if (type.docValuesType() != DocValuesType.SORTED_NUMERIC) {
|
||||
throw new IllegalArgumentException("type.docValuesType() must be SORTED_NUMERIC but got " + type.docValuesType());
|
||||
|
||||
// set field data
|
||||
fieldsData = GeoEncodingUtils.mortonHash(lon, lat);
|
||||
}
|
||||
|
||||
private static FieldType getFieldType(Store stored) {
|
||||
return getFieldType(TermEncoding.PREFIX, stored);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* Static helper method for returning a valid FieldType based on termEncoding and stored options
|
||||
*/
|
||||
@Deprecated
|
||||
private static FieldType getFieldType(TermEncoding termEncoding, Store stored) {
|
||||
if (stored == Store.YES) {
|
||||
return termEncoding == TermEncoding.PREFIX ? PREFIX_TYPE_STORED : NUMERIC_TYPE_STORED;
|
||||
} else if (stored == Store.NO) {
|
||||
return termEncoding == TermEncoding.PREFIX ? PREFIX_TYPE_NOT_STORED : NUMERIC_TYPE_NOT_STORED;
|
||||
} else {
|
||||
throw new IllegalArgumentException("stored option must be NO or YES but got " + stored);
|
||||
}
|
||||
fieldsData = GeoUtils.mortonHash(lon, lat);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenStream tokenStream(Analyzer analyzer, TokenStream reuse) {
|
||||
if (fieldType().indexOptions() == IndexOptions.NONE) {
|
||||
// not indexed
|
||||
return null;
|
||||
}
|
||||
|
||||
// if numericType is set
|
||||
if (type.numericType() != null) {
|
||||
// return numeric encoding
|
||||
return super.tokenStream(analyzer, reuse);
|
||||
}
|
||||
|
||||
if (reuse instanceof GeoPointTokenStream == false) {
|
||||
reuse = new GeoPointTokenStream();
|
||||
}
|
||||
|
||||
final GeoPointTokenStream gpts = (GeoPointTokenStream)reuse;
|
||||
gpts.setGeoCode(((Number) fieldsData).longValue());
|
||||
|
||||
return reuse;
|
||||
}
|
||||
|
||||
/** access longitude value */
|
||||
public double getLon() {
|
||||
return GeoUtils.mortonUnhashLon((long) fieldsData);
|
||||
return GeoEncodingUtils.mortonUnhashLon((long) fieldsData);
|
||||
}
|
||||
|
||||
/** access latitude value */
|
||||
public double getLat() {
|
||||
return GeoUtils.mortonUnhashLat((long) fieldsData);
|
||||
return GeoEncodingUtils.mortonUnhashLat((long) fieldsData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -128,9 +242,9 @@ public final class GeoPointField extends Field {
|
|||
return null;
|
||||
}
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(GeoUtils.mortonUnhashLon((long) fieldsData));
|
||||
sb.append(GeoEncodingUtils.mortonUnhashLon((long) fieldsData));
|
||||
sb.append(',');
|
||||
sb.append(GeoUtils.mortonUnhashLat((long) fieldsData));
|
||||
sb.append(GeoEncodingUtils.mortonUnhashLat((long) fieldsData));
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,233 @@
|
|||
package org.apache.lucene.spatial.document;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
|
||||
import org.apache.lucene.util.Attribute;
|
||||
import org.apache.lucene.util.AttributeFactory;
|
||||
import org.apache.lucene.util.AttributeImpl;
|
||||
import org.apache.lucene.util.AttributeReflector;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
|
||||
import static org.apache.lucene.spatial.document.GeoPointField.PRECISION_STEP;
|
||||
|
||||
/**
|
||||
* <b>Expert:</b> This class provides a {@link TokenStream} used by {@link GeoPointField}
|
||||
* for encoding {@link GeoPointField.TermEncoding#PREFIX} only GeoPointTerms.
|
||||
*
|
||||
* <p><i>NOTE: This is used as the default encoding unless
|
||||
* {@code GeoPointField.setNumericType(FieldType.LegacyNumericType.LONG)} is set</i></p>
|
||||
*
|
||||
* This class is similar to {@link org.apache.lucene.analysis.LegacyNumericTokenStream} but encodes terms up to a
|
||||
* a maximum of {@link #MAX_SHIFT} using a fixed precision step defined by
|
||||
* {@link GeoPointField#PRECISION_STEP}. This yields a total of 4 terms per GeoPoint
|
||||
* each consisting of 5 bytes (4 prefix bytes + 1 precision byte).
|
||||
*
|
||||
* <p>For best performance use the provided {@link GeoPointField#PREFIX_TYPE_NOT_STORED} or
|
||||
* {@link GeoPointField#PREFIX_TYPE_STORED}</p>
|
||||
*
|
||||
* <p>If prefix terms are used then the default GeoPoint query constructors may be used, but if
|
||||
* {@link org.apache.lucene.analysis.LegacyNumericTokenStream} is used, then be sure to pass
|
||||
* {@link GeoPointField.TermEncoding#NUMERIC} to all GeoPointQuery constructors</p>
|
||||
*
|
||||
* Here's an example usage:
|
||||
*
|
||||
* <pre class="prettyprint">
|
||||
* // using prefix terms
|
||||
* GeoPointField geoPointField = new GeoPointField(fieldName1, lon, lat, GeoPointField.PREFIX_TYPE_NOT_STORED);
|
||||
* document.add(geoPointField);
|
||||
*
|
||||
* // query by bounding box (default uses TermEncoding.PREFIX)
|
||||
* Query q = new GeoPointInBBoxQuery(fieldName1, minLon, minLat, maxLon, maxLat);
|
||||
*
|
||||
* // using numeric terms
|
||||
* geoPointField = new GeoPointField(fieldName2, lon, lat, GeoPointField.NUMERIC_TYPE_NOT_STORED);
|
||||
* document.add(geoPointField);
|
||||
*
|
||||
* // query by distance (requires TermEncoding.NUMERIC)
|
||||
* q = new GeoPointDistanceQuery(fieldName2, TermEncoding.NUMERIC, centerLon, centerLat, radiusMeters);
|
||||
* </pre>
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
final class GeoPointTokenStream extends TokenStream {
|
||||
private static final int MAX_SHIFT = PRECISION_STEP * 4;
|
||||
|
||||
private final GeoPointTermAttribute geoPointTermAtt = addAttribute(GeoPointTermAttribute.class);
|
||||
private final PositionIncrementAttribute posIncrAtt = addAttribute(PositionIncrementAttribute.class);
|
||||
|
||||
private boolean isInit = false;
|
||||
|
||||
/**
|
||||
* Expert: Creates a token stream for geo point fields with the specified
|
||||
* <code>precisionStep</code> using the given
|
||||
* {@link org.apache.lucene.util.AttributeFactory}.
|
||||
* The stream is not yet initialized,
|
||||
* before using set a value using the various setGeoCode method.
|
||||
*/
|
||||
public GeoPointTokenStream() {
|
||||
super(new GeoPointAttributeFactory(AttributeFactory.DEFAULT_ATTRIBUTE_FACTORY));
|
||||
assert PRECISION_STEP > 0;
|
||||
}
|
||||
|
||||
public GeoPointTokenStream setGeoCode(final long geoCode) {
|
||||
geoPointTermAtt.init(geoCode, MAX_SHIFT-PRECISION_STEP);
|
||||
isInit = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() {
|
||||
if (isInit == false) {
|
||||
throw new IllegalStateException("call setGeoCode() before usage");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean incrementToken() {
|
||||
if (isInit == false) {
|
||||
throw new IllegalStateException("call setGeoCode() before usage");
|
||||
}
|
||||
|
||||
// this will only clear all other attributes in this TokenStream
|
||||
clearAttributes();
|
||||
|
||||
final int shift = geoPointTermAtt.incShift();
|
||||
posIncrAtt.setPositionIncrement((shift == MAX_SHIFT) ? 1 : 0);
|
||||
return (shift < 63);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tracks shift values during encoding
|
||||
*/
|
||||
public interface GeoPointTermAttribute extends Attribute {
|
||||
/** Returns current shift value, undefined before first token */
|
||||
int getShift();
|
||||
|
||||
/** <em>Don't call this method!</em>
|
||||
* @lucene.internal */
|
||||
void init(long value, int shift);
|
||||
|
||||
/** <em>Don't call this method!</em>
|
||||
* @lucene.internal */
|
||||
int incShift();
|
||||
}
|
||||
|
||||
// just a wrapper to prevent adding CTA
|
||||
private static final class GeoPointAttributeFactory extends AttributeFactory {
|
||||
private final AttributeFactory delegate;
|
||||
|
||||
GeoPointAttributeFactory(AttributeFactory delegate) {
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AttributeImpl createAttributeInstance(Class<? extends Attribute> attClass) {
|
||||
if (CharTermAttribute.class.isAssignableFrom(attClass)) {
|
||||
throw new IllegalArgumentException("GeoPointTokenStream does not support CharTermAttribute.");
|
||||
}
|
||||
return delegate.createAttributeInstance(attClass);
|
||||
}
|
||||
}
|
||||
|
||||
public static final class GeoPointTermAttributeImpl extends AttributeImpl implements GeoPointTermAttribute,TermToBytesRefAttribute {
|
||||
private long value = 0L;
|
||||
private int shift = 0;
|
||||
private BytesRefBuilder bytes = new BytesRefBuilder();
|
||||
|
||||
public GeoPointTermAttributeImpl() {
|
||||
this.shift = MAX_SHIFT-PRECISION_STEP;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef getBytesRef() {
|
||||
GeoEncodingUtils.geoCodedToPrefixCoded(value, shift, bytes);
|
||||
return bytes.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init(long value, int shift) {
|
||||
this.value = value;
|
||||
this.shift = shift;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getShift() { return shift; }
|
||||
|
||||
@Override
|
||||
public int incShift() {
|
||||
return (shift += PRECISION_STEP);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
// this attribute has no contents to clear!
|
||||
// we keep it untouched as it's fully controlled by outer class.
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reflectWith(AttributeReflector reflector) {
|
||||
reflector.reflect(TermToBytesRefAttribute.class, "bytes", getBytesRef());
|
||||
reflector.reflect(GeoPointTermAttribute.class, "shift", shift);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copyTo(AttributeImpl target) {
|
||||
final GeoPointTermAttribute a = (GeoPointTermAttribute) target;
|
||||
a.init(value, shift);
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPointTermAttributeImpl clone() {
|
||||
GeoPointTermAttributeImpl t = (GeoPointTermAttributeImpl)super.clone();
|
||||
// Do a deep clone
|
||||
t.bytes = new BytesRefBuilder();
|
||||
t.bytes.copyBytes(getBytesRef());
|
||||
return t;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(shift, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (obj == null) return false;
|
||||
if (getClass() != obj.getClass()) return false;
|
||||
GeoPointTermAttributeImpl other = (GeoPointTermAttributeImpl) obj;
|
||||
if (shift != other.shift) return false;
|
||||
if (value != other.value) return false;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/** override toString because it can throw cryptic "illegal shift value": */
|
||||
@Override
|
||||
public String toString() {
|
||||
return getClass().getSimpleName() + "(precisionStep=" + PRECISION_STEP + " shift=" + geoPointTermAtt.getShift() + ")";
|
||||
}
|
||||
}
|
|
@ -20,6 +20,7 @@ import org.apache.lucene.index.IndexReader;
|
|||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.document.GeoPointField.TermEncoding;
|
||||
import org.apache.lucene.spatial.util.GeoDistanceUtils;
|
||||
import org.apache.lucene.spatial.util.GeoRect;
|
||||
import org.apache.lucene.spatial.util.GeoUtils;
|
||||
|
@ -32,7 +33,7 @@ import org.apache.lucene.spatial.util.GeoUtils;
|
|||
* passing this initial filter are then passed to a secondary {@code postFilter} method that verifies whether the
|
||||
* decoded lat/lon point fall within the specified query distance (see {@link org.apache.lucene.util.SloppyMath#haversin}.
|
||||
* All morton value comparisons are subject to the same precision tolerance defined in
|
||||
* {@value org.apache.lucene.spatial.util.GeoUtils#TOLERANCE} and distance comparisons are subject to the accuracy of the
|
||||
* {@value org.apache.lucene.spatial.util.GeoEncodingUtils#TOLERANCE} and distance comparisons are subject to the accuracy of the
|
||||
* haversine formula (from R.W. Sinnott, "Virtues of the Haversine", Sky and Telescope, vol. 68, no. 2, 1984, p. 159)
|
||||
*
|
||||
* <p>Note: This query currently uses haversine which is a sloppy distance calculation (see above reference). For large
|
||||
|
@ -53,14 +54,20 @@ public class GeoPointDistanceQuery extends GeoPointInBBoxQuery {
|
|||
* distance (in meters) from a given point
|
||||
**/
|
||||
public GeoPointDistanceQuery(final String field, final double centerLon, final double centerLat, final double radiusMeters) {
|
||||
this(field, GeoUtils.circleToBBox(centerLon, centerLat, radiusMeters), centerLon, centerLat, radiusMeters);
|
||||
this(field, TermEncoding.PREFIX, centerLon, centerLat, radiusMeters);
|
||||
}
|
||||
|
||||
private GeoPointDistanceQuery(final String field, GeoRect bbox, final double centerLon,
|
||||
public GeoPointDistanceQuery(final String field, final TermEncoding termEncoding, final double centerLon, final double centerLat, final double radiusMeters) {
|
||||
this(field, termEncoding, GeoUtils.circleToBBox(centerLon, centerLat, radiusMeters), centerLon, centerLat, radiusMeters);
|
||||
}
|
||||
|
||||
private GeoPointDistanceQuery(final String field, final TermEncoding termEncoding, final GeoRect bbox, final double centerLon,
|
||||
final double centerLat, final double radiusMeters) {
|
||||
super(field, bbox.minLon, bbox.minLat, bbox.maxLon, bbox.maxLat);
|
||||
super(field, termEncoding, bbox.minLon, bbox.minLat, bbox.maxLon, bbox.maxLat);
|
||||
{
|
||||
// check longitudinal overlap (limits radius)
|
||||
// check longitudinal overlap (restrict distance to maximum longitudinal radius)
|
||||
// todo this restriction technically shouldn't be needed,
|
||||
// its only purpose is to ensure the bounding box doesn't self overlap.
|
||||
final double maxRadius = GeoDistanceUtils.maxRadialDistanceMeters(centerLon, centerLat);
|
||||
if (radiusMeters > maxRadius) {
|
||||
throw new IllegalArgumentException("radiusMeters " + radiusMeters + " exceeds maxRadius [" + maxRadius
|
||||
|
@ -97,7 +104,7 @@ public class GeoPointDistanceQuery extends GeoPointInBBoxQuery {
|
|||
// unwrap left
|
||||
unwrappedLon += -360.0D;
|
||||
}
|
||||
GeoPointDistanceQueryImpl left = new GeoPointDistanceQueryImpl(field, this, unwrappedLon,
|
||||
GeoPointDistanceQueryImpl left = new GeoPointDistanceQueryImpl(field, termEncoding, this, unwrappedLon,
|
||||
new GeoRect(GeoUtils.MIN_LON_INCL, maxLon, minLat, maxLat));
|
||||
bqb.add(new BooleanClause(left, BooleanClause.Occur.SHOULD));
|
||||
|
||||
|
@ -105,13 +112,13 @@ public class GeoPointDistanceQuery extends GeoPointInBBoxQuery {
|
|||
// unwrap right
|
||||
unwrappedLon += 360.0D;
|
||||
}
|
||||
GeoPointDistanceQueryImpl right = new GeoPointDistanceQueryImpl(field, this, unwrappedLon,
|
||||
GeoPointDistanceQueryImpl right = new GeoPointDistanceQueryImpl(field, termEncoding, this, unwrappedLon,
|
||||
new GeoRect(minLon, GeoUtils.MAX_LON_INCL, minLat, maxLat));
|
||||
bqb.add(new BooleanClause(right, BooleanClause.Occur.SHOULD));
|
||||
|
||||
return bqb.build();
|
||||
}
|
||||
return new GeoPointDistanceQueryImpl(field, this, centerLon,
|
||||
return new GeoPointDistanceQueryImpl(field, termEncoding, this, centerLon,
|
||||
new GeoRect(this.minLon, this.maxLon, this.minLat, this.maxLat));
|
||||
}
|
||||
|
||||
|
|
|
@ -16,13 +16,8 @@
|
|||
*/
|
||||
package org.apache.lucene.spatial.search;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.util.AttributeSource;
|
||||
import org.apache.lucene.spatial.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.document.GeoPointField.TermEncoding;
|
||||
import org.apache.lucene.spatial.util.GeoRect;
|
||||
import org.apache.lucene.spatial.util.GeoRelationUtils;
|
||||
import org.apache.lucene.util.SloppyMath;
|
||||
|
@ -32,65 +27,46 @@ import org.apache.lucene.util.SloppyMath;
|
|||
* @lucene.experimental
|
||||
*/
|
||||
final class GeoPointDistanceQueryImpl extends GeoPointInBBoxQueryImpl {
|
||||
private final GeoPointDistanceQuery query;
|
||||
private final GeoPointDistanceQuery distanceQuery;
|
||||
private final double centerLon;
|
||||
|
||||
GeoPointDistanceQueryImpl(final String field, final GeoPointDistanceQuery q, final double centerLonUnwrapped,
|
||||
final GeoRect bbox) {
|
||||
super(field, bbox.minLon, bbox.minLat, bbox.maxLon, bbox.maxLat);
|
||||
query = q;
|
||||
GeoPointDistanceQueryImpl(final String field, final TermEncoding termEncoding, final GeoPointDistanceQuery q,
|
||||
final double centerLonUnwrapped, final GeoRect bbox) {
|
||||
super(field, termEncoding, bbox.minLon, bbox.minLat, bbox.maxLon, bbox.maxLat);
|
||||
distanceQuery = q;
|
||||
centerLon = centerLonUnwrapped;
|
||||
}
|
||||
|
||||
@Override @SuppressWarnings("unchecked")
|
||||
protected TermsEnum getTermsEnum(final Terms terms, AttributeSource atts) throws IOException {
|
||||
return new GeoPointRadiusTermsEnum(terms.iterator(), this.minLon, this.minLat, this.maxLon, this.maxLat);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setRewriteMethod(MultiTermQuery.RewriteMethod method) {
|
||||
throw new UnsupportedOperationException("cannot change rewrite method");
|
||||
}
|
||||
|
||||
private final class GeoPointRadiusTermsEnum extends GeoPointTermsEnum {
|
||||
GeoPointRadiusTermsEnum(final TermsEnum tenum, final double minLon, final double minLat,
|
||||
final double maxLon, final double maxLat) {
|
||||
super(tenum, minLon, minLat, maxLon, maxLat);
|
||||
}
|
||||
@Override
|
||||
protected CellComparator newCellComparator() {
|
||||
return new GeoPointRadiusCellComparator(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the maximum shift for the given pointDistanceQuery. This prevents unnecessary depth traversal
|
||||
* given the size of the distance query.
|
||||
*/
|
||||
@Override
|
||||
protected short computeMaxShift() {
|
||||
final short shiftFactor;
|
||||
|
||||
if (query.radiusMeters > 1000000) {
|
||||
shiftFactor = 5;
|
||||
} else {
|
||||
shiftFactor = 4;
|
||||
}
|
||||
|
||||
return (short)(GeoPointField.PRECISION_STEP * shiftFactor);
|
||||
private final class GeoPointRadiusCellComparator extends CellComparator {
|
||||
GeoPointRadiusCellComparator(GeoPointDistanceQueryImpl query) {
|
||||
super(query);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean cellCrosses(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
return GeoRelationUtils.rectCrossesCircle(minLon, minLat, maxLon, maxLat,
|
||||
centerLon, query.centerLat, query.radiusMeters, true);
|
||||
centerLon, distanceQuery.centerLat, distanceQuery.radiusMeters, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean cellWithin(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
return GeoRelationUtils.rectWithinCircle(minLon, minLat, maxLon, maxLat,
|
||||
centerLon, query.centerLat, query.radiusMeters, true);
|
||||
centerLon, distanceQuery.centerLat, distanceQuery.radiusMeters, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean cellIntersectsShape(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
return (cellContains(minLon, minLat, maxLon, maxLat)
|
||||
|| cellWithin(minLon, minLat, maxLon, maxLat) || cellCrosses(minLon, minLat, maxLon, maxLat));
|
||||
return cellCrosses(minLon, minLat, maxLon, maxLat);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -101,7 +77,7 @@ final class GeoPointDistanceQueryImpl extends GeoPointInBBoxQueryImpl {
|
|||
*/
|
||||
@Override
|
||||
protected boolean postFilter(final double lon, final double lat) {
|
||||
return (SloppyMath.haversin(query.centerLat, centerLon, lat, lon) * 1000.0 <= query.radiusMeters);
|
||||
return (SloppyMath.haversin(distanceQuery.centerLat, centerLon, lat, lon) * 1000.0 <= distanceQuery.radiusMeters);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -113,7 +89,7 @@ final class GeoPointDistanceQueryImpl extends GeoPointInBBoxQueryImpl {
|
|||
|
||||
GeoPointDistanceQueryImpl that = (GeoPointDistanceQueryImpl) o;
|
||||
|
||||
if (!query.equals(that.query)) return false;
|
||||
if (!distanceQuery.equals(that.distanceQuery)) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -121,11 +97,11 @@ final class GeoPointDistanceQueryImpl extends GeoPointInBBoxQueryImpl {
|
|||
@Override
|
||||
public int hashCode() {
|
||||
int result = super.hashCode();
|
||||
result = 31 * result + query.hashCode();
|
||||
result = 31 * result + distanceQuery.hashCode();
|
||||
return result;
|
||||
}
|
||||
|
||||
public double getRadiusMeters() {
|
||||
return query.getRadiusMeters();
|
||||
return distanceQuery.getRadiusMeters();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ import org.apache.lucene.index.IndexReader;
|
|||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.document.GeoPointField.TermEncoding;
|
||||
|
||||
/** Implements a point distance range query on a GeoPoint field. This is based on
|
||||
* {@code org.apache.lucene.spatial.search.GeoPointDistanceQuery} and is implemented using a
|
||||
|
@ -36,8 +37,13 @@ public final class GeoPointDistanceRangeQuery extends GeoPointDistanceQuery {
|
|||
* distance (in meters) range from a given point
|
||||
*/
|
||||
public GeoPointDistanceRangeQuery(final String field, final double centerLon, final double centerLat,
|
||||
final double minRadiusMeters, final double maxRadiusMeters) {
|
||||
this(field, TermEncoding.PREFIX, centerLon, centerLat, minRadiusMeters, maxRadiusMeters);
|
||||
}
|
||||
|
||||
public GeoPointDistanceRangeQuery(final String field, final TermEncoding termEncoding, final double centerLon, final double centerLat,
|
||||
final double minRadiusMeters, final double maxRadius) {
|
||||
super(field, centerLon, centerLat, maxRadius);
|
||||
super(field, termEncoding, centerLon, centerLat, maxRadius);
|
||||
this.minRadiusMeters = minRadiusMeters;
|
||||
}
|
||||
|
||||
|
@ -52,7 +58,7 @@ public final class GeoPointDistanceRangeQuery extends GeoPointDistanceQuery {
|
|||
BooleanQuery.Builder bqb = new BooleanQuery.Builder();
|
||||
|
||||
// create a new exclusion query
|
||||
GeoPointDistanceQuery exclude = new GeoPointDistanceQuery(field, centerLon, centerLat, minRadiusMeters);
|
||||
GeoPointDistanceQuery exclude = new GeoPointDistanceQuery(field, termEncoding, centerLon, centerLat, minRadiusMeters);
|
||||
// full map search
|
||||
// if (radiusMeters >= GeoProjectionUtils.SEMIMINOR_AXIS) {
|
||||
// bqb.add(new BooleanClause(new GeoPointInBBoxQuery(this.field, -180.0, -90.0, 180.0, 90.0), BooleanClause.Occur.MUST));
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.search.BooleanQuery;
|
|||
import org.apache.lucene.search.FieldValueQuery;
|
||||
import org.apache.lucene.search.LegacyNumericRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.document.GeoPointField.TermEncoding;
|
||||
import org.apache.lucene.spatial.util.GeoUtils;
|
||||
|
||||
/** Implements a simple bounding box query on a GeoPoint field. This is inspired by
|
||||
|
@ -31,7 +32,7 @@ import org.apache.lucene.spatial.util.GeoUtils;
|
|||
* passing this initial filter are passed to a final check that verifies whether
|
||||
* the decoded lat/lon falls within (or on the boundary) of the query bounding box.
|
||||
* The value comparisons are subject to a precision tolerance defined in
|
||||
* {@value org.apache.lucene.spatial.util.GeoUtils#TOLERANCE}
|
||||
* {@value org.apache.lucene.spatial.util.GeoEncodingUtils#TOLERANCE}
|
||||
*
|
||||
* NOTES:
|
||||
* 1. All latitude/longitude values must be in decimal degrees.
|
||||
|
@ -49,17 +50,23 @@ public class GeoPointInBBoxQuery extends Query {
|
|||
protected final double minLat;
|
||||
protected final double maxLon;
|
||||
protected final double maxLat;
|
||||
protected final TermEncoding termEncoding;
|
||||
|
||||
/**
|
||||
* Constructs a query for all {@link org.apache.lucene.spatial.document.GeoPointField} types that fall within a
|
||||
* defined bounding box
|
||||
*/
|
||||
public GeoPointInBBoxQuery(final String field, final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
this(field, TermEncoding.PREFIX, minLon, minLat, maxLon, maxLat);
|
||||
}
|
||||
|
||||
public GeoPointInBBoxQuery(final String field, final TermEncoding termEncoding, final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
this.field = field;
|
||||
this.minLon = minLon;
|
||||
this.minLat = minLat;
|
||||
this.maxLon = maxLon;
|
||||
this.maxLat = maxLat;
|
||||
this.termEncoding = termEncoding;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -74,13 +81,13 @@ public class GeoPointInBBoxQuery extends Query {
|
|||
if (maxLon < minLon) {
|
||||
BooleanQuery.Builder bqb = new BooleanQuery.Builder();
|
||||
|
||||
GeoPointInBBoxQueryImpl left = new GeoPointInBBoxQueryImpl(field, -180.0D, minLat, maxLon, maxLat);
|
||||
GeoPointInBBoxQueryImpl left = new GeoPointInBBoxQueryImpl(field, termEncoding, -180.0D, minLat, maxLon, maxLat);
|
||||
bqb.add(new BooleanClause(left, BooleanClause.Occur.SHOULD));
|
||||
GeoPointInBBoxQueryImpl right = new GeoPointInBBoxQueryImpl(field, minLon, minLat, 180.0D, maxLat);
|
||||
GeoPointInBBoxQueryImpl right = new GeoPointInBBoxQueryImpl(field, termEncoding, minLon, minLat, 180.0D, maxLat);
|
||||
bqb.add(new BooleanClause(right, BooleanClause.Occur.SHOULD));
|
||||
return bqb.build();
|
||||
}
|
||||
return new GeoPointInBBoxQueryImpl(field, minLon, minLat, maxLon, maxLat);
|
||||
return new GeoPointInBBoxQueryImpl(field, termEncoding, minLon, minLat, maxLon, maxLat);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -16,21 +16,17 @@
|
|||
*/
|
||||
package org.apache.lucene.spatial.search;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.util.AttributeSource;
|
||||
import org.apache.lucene.util.SloppyMath;
|
||||
import org.apache.lucene.spatial.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.document.GeoPointField.TermEncoding;
|
||||
import org.apache.lucene.spatial.util.GeoRelationUtils;
|
||||
|
||||
/** Package private implementation for the public facing GeoPointInBBoxQuery delegate class.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
class GeoPointInBBoxQueryImpl extends GeoPointTermQuery {
|
||||
class GeoPointInBBoxQueryImpl extends GeoPointMultiTermQuery {
|
||||
/**
|
||||
* Constructs a new GeoBBoxQuery that will match encoded GeoPoint terms that fall within or on the boundary
|
||||
* of the bounding box defined by the input parameters
|
||||
|
@ -40,13 +36,8 @@ class GeoPointInBBoxQueryImpl extends GeoPointTermQuery {
|
|||
* @param maxLon upper longitude (x) value of the bounding box
|
||||
* @param maxLat upper latitude (y) value of the bounding box
|
||||
*/
|
||||
GeoPointInBBoxQueryImpl(final String field, final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
super(field, minLon, minLat, maxLon, maxLat);
|
||||
}
|
||||
|
||||
@Override @SuppressWarnings("unchecked")
|
||||
protected TermsEnum getTermsEnum(final Terms terms, AttributeSource atts) throws IOException {
|
||||
return new GeoPointInBBoxTermsEnum(terms.iterator(), minLon, minLat, maxLon, maxLat);
|
||||
GeoPointInBBoxQueryImpl(final String field, final TermEncoding termEncoding, final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
super(field, termEncoding, minLon, minLat, maxLon, maxLat);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -54,27 +45,31 @@ class GeoPointInBBoxQueryImpl extends GeoPointTermQuery {
|
|||
throw new UnsupportedOperationException("cannot change rewrite method");
|
||||
}
|
||||
|
||||
protected class GeoPointInBBoxTermsEnum extends GeoPointTermsEnum {
|
||||
protected GeoPointInBBoxTermsEnum(final TermsEnum tenum, final double minLon, final double minLat,
|
||||
final double maxLon, final double maxLat) {
|
||||
super(tenum, minLon, minLat, maxLon, maxLat);
|
||||
@Override
|
||||
protected short computeMaxShift() {
|
||||
final short shiftFactor;
|
||||
|
||||
// compute diagonal radius
|
||||
double midLon = (minLon + maxLon) * 0.5;
|
||||
double midLat = (minLat + maxLat) * 0.5;
|
||||
|
||||
if (SloppyMath.haversin(minLat, minLon, midLat, midLon)*1000 > 1000000) {
|
||||
shiftFactor = 5;
|
||||
} else {
|
||||
shiftFactor = 4;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected short computeMaxShift() {
|
||||
final short shiftFactor;
|
||||
return (short)(GeoPointField.PRECISION_STEP * shiftFactor);
|
||||
}
|
||||
|
||||
// compute diagonal radius
|
||||
double midLon = (minLon + maxLon) * 0.5;
|
||||
double midLat = (minLat + maxLat) * 0.5;
|
||||
@Override
|
||||
protected CellComparator newCellComparator() {
|
||||
return new GeoPointInBBoxCellComparator(this);
|
||||
}
|
||||
|
||||
if (SloppyMath.haversin(minLat, minLon, midLat, midLon)*1000 > 1000000) {
|
||||
shiftFactor = 5;
|
||||
} else {
|
||||
shiftFactor = 4;
|
||||
}
|
||||
|
||||
return (short)(GeoPointField.PRECISION_STEP * shiftFactor);
|
||||
private final class GeoPointInBBoxCellComparator extends CellComparator {
|
||||
GeoPointInBBoxCellComparator(GeoPointMultiTermQuery query) {
|
||||
super(query);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -82,16 +77,16 @@ class GeoPointInBBoxQueryImpl extends GeoPointTermQuery {
|
|||
*/
|
||||
@Override
|
||||
protected boolean cellCrosses(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
return GeoRelationUtils.rectCrosses(minLon, minLat, maxLon, maxLat, this.minLon, this.minLat, this.maxLon, this.maxLat);
|
||||
}
|
||||
return GeoRelationUtils.rectCrosses(minLon, minLat, maxLon, maxLat, GeoPointInBBoxQueryImpl.this.minLon,
|
||||
GeoPointInBBoxQueryImpl.this.minLat, GeoPointInBBoxQueryImpl.this.maxLon, GeoPointInBBoxQueryImpl.this.maxLat); }
|
||||
|
||||
/**
|
||||
* Determine whether quad-cell is within the shape
|
||||
*/
|
||||
@Override
|
||||
protected boolean cellWithin(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
return GeoRelationUtils.rectWithin(minLon, minLat, maxLon, maxLat, this.minLon, this.minLat, this.maxLon, this.maxLat);
|
||||
}
|
||||
return GeoRelationUtils.rectWithin(minLon, minLat, maxLon, maxLat, GeoPointInBBoxQueryImpl.this.minLon,
|
||||
GeoPointInBBoxQueryImpl.this.minLat, GeoPointInBBoxQueryImpl.this.maxLon, GeoPointInBBoxQueryImpl.this.maxLat); }
|
||||
|
||||
@Override
|
||||
protected boolean cellIntersectsShape(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
|
|
|
@ -16,14 +16,13 @@
|
|||
*/
|
||||
package org.apache.lucene.spatial.search;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.util.AttributeSource;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.document.GeoPointField.TermEncoding;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.apache.lucene.spatial.util.GeoRect;
|
||||
import org.apache.lucene.spatial.util.GeoRelationUtils;
|
||||
import org.apache.lucene.spatial.util.GeoUtils;
|
||||
|
||||
/** Implements a simple point in polygon query on a GeoPoint field. This is based on
|
||||
|
@ -34,7 +33,7 @@ import org.apache.lucene.spatial.util.GeoUtils;
|
|||
* to a secondary filter that verifies whether the decoded lat/lon point falls within
|
||||
* (or on the boundary) of the bounding box query. Finally, the remaining candidate
|
||||
* term is passed to the final point in polygon check. All value comparisons are subject
|
||||
* to the same precision tolerance defined in {@value org.apache.lucene.spatial.util.GeoUtils#TOLERANCE}
|
||||
* to the same precision tolerance defined in {@value GeoEncodingUtils#TOLERANCE}
|
||||
*
|
||||
* <p>NOTES:
|
||||
* 1. The polygon coordinates need to be in either clockwise or counter-clockwise order.
|
||||
|
@ -45,23 +44,27 @@ import org.apache.lucene.spatial.util.GeoUtils;
|
|||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public final class GeoPointInPolygonQuery extends GeoPointInBBoxQueryImpl {
|
||||
public final class GeoPointInPolygonQuery extends GeoPointInBBoxQuery {
|
||||
// polygon position arrays - this avoids the use of any objects or
|
||||
// or geo library dependencies
|
||||
private final double[] x;
|
||||
private final double[] y;
|
||||
protected final double[] x;
|
||||
protected final double[] y;
|
||||
|
||||
public GeoPointInPolygonQuery(final String field, final double[] polyLons, final double[] polyLats) {
|
||||
this(field, TermEncoding.PREFIX, GeoUtils.polyToBBox(polyLons, polyLats), polyLons, polyLats);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new GeoPolygonQuery that will match encoded {@link org.apache.lucene.spatial.document.GeoPointField} terms
|
||||
* that fall within or on the boundary of the polygon defined by the input parameters.
|
||||
*/
|
||||
public GeoPointInPolygonQuery(final String field, final double[] polyLons, final double[] polyLats) {
|
||||
this(field, GeoUtils.polyToBBox(polyLons, polyLats), polyLons, polyLats);
|
||||
public GeoPointInPolygonQuery(final String field, final TermEncoding termEncoding, final double[] polyLons, final double[] polyLats) {
|
||||
this(field, termEncoding, GeoUtils.polyToBBox(polyLons, polyLats), polyLons, polyLats);
|
||||
}
|
||||
|
||||
/** Common constructor, used only internally. */
|
||||
private GeoPointInPolygonQuery(final String field, GeoRect bbox, final double[] polyLons, final double[] polyLats) {
|
||||
super(field, bbox.minLon, bbox.minLat, bbox.maxLon, bbox.maxLat);
|
||||
private GeoPointInPolygonQuery(final String field, TermEncoding termEncoding, GeoRect bbox, final double[] polyLons, final double[] polyLats) {
|
||||
super(field, termEncoding, bbox.minLon, bbox.minLat, bbox.maxLon, bbox.maxLat);
|
||||
if (polyLats.length != polyLons.length) {
|
||||
throw new IllegalArgumentException("polyLats and polyLons must be equal length");
|
||||
}
|
||||
|
@ -79,15 +82,10 @@ public final class GeoPointInPolygonQuery extends GeoPointInBBoxQueryImpl {
|
|||
this.y = polyLats;
|
||||
}
|
||||
|
||||
@Override @SuppressWarnings("unchecked")
|
||||
protected TermsEnum getTermsEnum(final Terms terms, AttributeSource atts) throws IOException {
|
||||
return new GeoPolygonTermsEnum(terms.iterator(), this.minLon, this.minLat, this.maxLon, this.maxLat);
|
||||
}
|
||||
|
||||
/** throw exception if trying to change rewrite method */
|
||||
@Override
|
||||
public void setRewriteMethod(RewriteMethod method) {
|
||||
throw new UnsupportedOperationException("cannot change rewrite method");
|
||||
public Query rewrite(IndexReader reader) {
|
||||
return new GeoPointInPolygonQueryImpl(field, termEncoding, this, this.minLon, this.minLat, this.maxLon, this.maxLat);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -128,56 +126,15 @@ public final class GeoPointInPolygonQuery extends GeoPointInBBoxQueryImpl {
|
|||
sb.append(" Points: ");
|
||||
for (int i=0; i<x.length; ++i) {
|
||||
sb.append("[")
|
||||
.append(x[i])
|
||||
.append(", ")
|
||||
.append(y[i])
|
||||
.append("] ");
|
||||
.append(x[i])
|
||||
.append(", ")
|
||||
.append(y[i])
|
||||
.append("] ");
|
||||
}
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom {@link org.apache.lucene.index.TermsEnum} that computes morton hash ranges based on the defined edges of
|
||||
* the provided polygon.
|
||||
*/
|
||||
private final class GeoPolygonTermsEnum extends GeoPointTermsEnum {
|
||||
GeoPolygonTermsEnum(final TermsEnum tenum, final double minLon, final double minLat,
|
||||
final double maxLon, final double maxLat) {
|
||||
super(tenum, minLon, minLat, maxLon, maxLat);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean cellCrosses(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
return GeoRelationUtils.rectCrossesPolyApprox(minLon, minLat, maxLon, maxLat, x, y, GeoPointInPolygonQuery.this.minLon,
|
||||
GeoPointInPolygonQuery.this.minLat, GeoPointInPolygonQuery.this.maxLon, GeoPointInPolygonQuery.this.maxLat);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean cellWithin(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
return GeoRelationUtils.rectWithinPolyApprox(minLon, minLat, maxLon, maxLat, x, y, GeoPointInPolygonQuery.this.minLon,
|
||||
GeoPointInPolygonQuery.this.minLat, GeoPointInPolygonQuery.this.maxLon, GeoPointInPolygonQuery.this.maxLat);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean cellIntersectsShape(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
return cellContains(minLon, minLat, maxLon, maxLat) || cellWithin(minLon, minLat, maxLon, maxLat)
|
||||
|| cellCrosses(minLon, minLat, maxLon, maxLat);
|
||||
}
|
||||
|
||||
/**
|
||||
* The two-phase query approach. The parent
|
||||
* {@link GeoPointTermsEnum#accept} method is called to match
|
||||
* encoded terms that fall within the bounding box of the polygon. Those documents that pass the initial
|
||||
* bounding box filter are then compared to the provided polygon using the
|
||||
* {@link org.apache.lucene.spatial.util.GeoRelationUtils#pointInPolygon} method.
|
||||
*/
|
||||
@Override
|
||||
protected boolean postFilter(final double lon, final double lat) {
|
||||
return GeoRelationUtils.pointInPolygon(x, y, lat, lon);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* API utility method for returning the array of longitudinal values for this GeoPolygon
|
||||
* The returned array is not a copy so do not change it!
|
||||
|
|
|
@ -0,0 +1,103 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.spatial.search;
|
||||
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.spatial.document.GeoPointField.TermEncoding;
|
||||
import org.apache.lucene.spatial.util.GeoRelationUtils;
|
||||
|
||||
/** Package private implementation for the public facing GeoPointInPolygonQuery delegate class.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
final class GeoPointInPolygonQueryImpl extends GeoPointInBBoxQueryImpl {
|
||||
private final GeoPointInPolygonQuery polygonQuery;
|
||||
|
||||
GeoPointInPolygonQueryImpl(final String field, final TermEncoding termEncoding, final GeoPointInPolygonQuery q,
|
||||
final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
super(field, termEncoding, minLon, minLat, maxLon, maxLat);
|
||||
polygonQuery = q;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setRewriteMethod(MultiTermQuery.RewriteMethod method) {
|
||||
throw new UnsupportedOperationException("cannot change rewrite method");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CellComparator newCellComparator() {
|
||||
return new GeoPolygonCellComparator(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom {@code org.apache.lucene.spatial.search.GeoPointMultiTermQuery.CellComparator} that computes morton hash
|
||||
* ranges based on the defined edges of the provided polygon.
|
||||
*/
|
||||
private final class GeoPolygonCellComparator extends CellComparator {
|
||||
GeoPolygonCellComparator(GeoPointMultiTermQuery query) {
|
||||
super(query);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean cellCrosses(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
return GeoRelationUtils.rectCrossesPolyApprox(minLon, minLat, maxLon, maxLat, polygonQuery.x, polygonQuery.y,
|
||||
polygonQuery.minLon, polygonQuery.minLat, polygonQuery.maxLon, polygonQuery.maxLat);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean cellWithin(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
return GeoRelationUtils.rectWithinPolyApprox(minLon, minLat, maxLon, maxLat, polygonQuery.x, polygonQuery.y,
|
||||
polygonQuery.minLon, polygonQuery.minLat, polygonQuery.maxLon, polygonQuery.maxLat);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean cellIntersectsShape(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
return cellContains(minLon, minLat, maxLon, maxLat) || cellWithin(minLon, minLat, maxLon, maxLat)
|
||||
|| cellCrosses(minLon, minLat, maxLon, maxLat);
|
||||
}
|
||||
|
||||
/**
|
||||
* The two-phase query approach. The parent
|
||||
* {@link org.apache.lucene.spatial.search.GeoPointTermsEnum#accept} method is called to match
|
||||
* encoded terms that fall within the bounding box of the polygon. Those documents that pass the initial
|
||||
* bounding box filter are then compared to the provided polygon using the
|
||||
* {@link org.apache.lucene.spatial.util.GeoRelationUtils#pointInPolygon} method.
|
||||
*/
|
||||
@Override
|
||||
protected boolean postFilter(final double lon, final double lat) {
|
||||
return GeoRelationUtils.pointInPolygon(polygonQuery.x, polygonQuery.y, lat, lon);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (!super.equals(o)) return false;
|
||||
|
||||
GeoPointInPolygonQueryImpl that = (GeoPointInPolygonQueryImpl) o;
|
||||
|
||||
return !(polygonQuery != null ? !polygonQuery.equals(that.polygonQuery) : that.polygonQuery != null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = super.hashCode();
|
||||
result = 31 * result + (polygonQuery != null ? polygonQuery.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,166 @@
|
|||
package org.apache.lucene.spatial.search;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.AttributeSource;
|
||||
import org.apache.lucene.spatial.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.document.GeoPointField.TermEncoding;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.apache.lucene.spatial.util.GeoRelationUtils;
|
||||
import org.apache.lucene.spatial.util.GeoUtils;
|
||||
import org.apache.lucene.util.SloppyMath;
|
||||
|
||||
/**
|
||||
* TermQuery for GeoPointField for overriding {@link org.apache.lucene.search.MultiTermQuery} methods specific to
|
||||
* Geospatial operations
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
abstract class GeoPointMultiTermQuery extends MultiTermQuery {
|
||||
// simple bounding box optimization - no objects used to avoid dependencies
|
||||
protected final double minLon;
|
||||
protected final double minLat;
|
||||
protected final double maxLon;
|
||||
protected final double maxLat;
|
||||
protected final short maxShift;
|
||||
protected final TermEncoding termEncoding;
|
||||
protected final CellComparator cellComparator;
|
||||
|
||||
/**
|
||||
* Constructs a query matching terms that cannot be represented with a single
|
||||
* Term.
|
||||
*/
|
||||
public GeoPointMultiTermQuery(String field, final TermEncoding termEncoding, final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
super(field);
|
||||
|
||||
if (GeoUtils.isValidLon(minLon) == false) {
|
||||
throw new IllegalArgumentException("invalid minLon " + minLon);
|
||||
}
|
||||
if (GeoUtils.isValidLon(maxLon) == false) {
|
||||
throw new IllegalArgumentException("invalid maxLon " + maxLon);
|
||||
}
|
||||
if (GeoUtils.isValidLat(minLat) == false) {
|
||||
throw new IllegalArgumentException("invalid minLat " + minLat);
|
||||
}
|
||||
if (GeoUtils.isValidLat(maxLat) == false) {
|
||||
throw new IllegalArgumentException("invalid maxLat " + maxLat);
|
||||
}
|
||||
|
||||
final long minHash = GeoEncodingUtils.mortonHash(minLon, minLat);
|
||||
final long maxHash = GeoEncodingUtils.mortonHash(maxLon, maxLat);
|
||||
this.minLon = GeoEncodingUtils.mortonUnhashLon(minHash);
|
||||
this.minLat = GeoEncodingUtils.mortonUnhashLat(minHash);
|
||||
this.maxLon = GeoEncodingUtils.mortonUnhashLon(maxHash);
|
||||
this.maxLat = GeoEncodingUtils.mortonUnhashLat(maxHash);
|
||||
|
||||
this.maxShift = computeMaxShift();
|
||||
this.termEncoding = termEncoding;
|
||||
this.cellComparator = newCellComparator();
|
||||
|
||||
this.rewriteMethod = GEO_CONSTANT_SCORE_REWRITE;
|
||||
}
|
||||
|
||||
public static final RewriteMethod GEO_CONSTANT_SCORE_REWRITE = new RewriteMethod() {
|
||||
@Override
|
||||
public Query rewrite(IndexReader reader, MultiTermQuery query) {
|
||||
return new GeoPointTermQueryConstantScoreWrapper<>((GeoPointMultiTermQuery)query);
|
||||
}
|
||||
};
|
||||
|
||||
@Override @SuppressWarnings("unchecked")
|
||||
protected TermsEnum getTermsEnum(final Terms terms, AttributeSource atts) throws IOException {
|
||||
return GeoPointTermsEnum.newInstance(terms.iterator(), this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the maximum shift based on the diagonal distance of the bounding box
|
||||
*/
|
||||
protected short computeMaxShift() {
|
||||
// in this case a factor of 4 brings the detail level to ~0.002/0.001 degrees lon/lat respectively (or ~222m/111m)
|
||||
final short shiftFactor;
|
||||
|
||||
// compute diagonal distance
|
||||
double midLon = (minLon + maxLon) * 0.5;
|
||||
double midLat = (minLat + maxLat) * 0.5;
|
||||
|
||||
if (SloppyMath.haversin(minLat, minLon, midLat, midLon)*1000 > 1000000) {
|
||||
shiftFactor = 5;
|
||||
} else {
|
||||
shiftFactor = 4;
|
||||
}
|
||||
|
||||
return (short)(GeoPointField.PRECISION_STEP * shiftFactor);
|
||||
}
|
||||
|
||||
/**
|
||||
* Abstract method to construct the class that handles all geo point relations
|
||||
* (e.g., GeoPointInPolygon)
|
||||
*/
|
||||
abstract protected CellComparator newCellComparator();
|
||||
|
||||
/**
|
||||
* Base class for all geo point relation comparators
|
||||
*/
|
||||
static abstract class CellComparator {
|
||||
protected final GeoPointMultiTermQuery geoPointQuery;
|
||||
|
||||
CellComparator(GeoPointMultiTermQuery query) {
|
||||
this.geoPointQuery = query;
|
||||
}
|
||||
|
||||
/**
|
||||
* Primary driver for cells intersecting shape boundaries
|
||||
*/
|
||||
protected boolean cellIntersectsMBR(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
return GeoRelationUtils.rectIntersects(minLon, minLat, maxLon, maxLat, geoPointQuery.minLon, geoPointQuery.minLat,
|
||||
geoPointQuery.maxLon, geoPointQuery.maxLat);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return whether quad-cell contains the bounding box of this shape
|
||||
*/
|
||||
protected boolean cellContains(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
return GeoRelationUtils.rectWithin(geoPointQuery.minLon, geoPointQuery.minLat, geoPointQuery.maxLon,
|
||||
geoPointQuery.maxLat, minLon, minLat, maxLon, maxLat);
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether the quad-cell crosses the shape
|
||||
*/
|
||||
abstract protected boolean cellCrosses(final double minLon, final double minLat, final double maxLon, final double maxLat);
|
||||
|
||||
/**
|
||||
* Determine whether quad-cell is within the shape
|
||||
*/
|
||||
abstract protected boolean cellWithin(final double minLon, final double minLat, final double maxLon, final double maxLat);
|
||||
|
||||
/**
|
||||
* Default shape is a rectangle, so this returns the same as {@code cellIntersectsMBR}
|
||||
*/
|
||||
abstract protected boolean cellIntersectsShape(final double minLon, final double minLat, final double maxLon, final double maxLat);
|
||||
|
||||
abstract protected boolean postFilter(final double lon, final double lat);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,161 @@
|
|||
package org.apache.lucene.spatial.search;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.spatial.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
|
||||
/**
|
||||
* Decomposes a given {@link GeoPointMultiTermQuery} into a set of terms that represent the query criteria using
|
||||
* {@link org.apache.lucene.spatial.document.GeoPointField.TermEncoding#NUMERIC} method defined by
|
||||
* {@link org.apache.lucene.analysis.LegacyNumericTokenStream}. The terms are then enumerated by the
|
||||
* {@link GeoPointTermQueryConstantScoreWrapper} and all docs whose GeoPoint fields match the prefix terms or
|
||||
* pass the {@link GeoPointMultiTermQuery.CellComparator#postFilter} criteria are returned in the resulting DocIdSet.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
@Deprecated
|
||||
final class GeoPointNumericTermsEnum extends GeoPointTermsEnum {
|
||||
private final List<Range> rangeBounds = new LinkedList<>();
|
||||
|
||||
// detail level should be a factor of PRECISION_STEP limiting the depth of recursion (and number of ranges)
|
||||
private final short DETAIL_LEVEL;
|
||||
|
||||
GeoPointNumericTermsEnum(final TermsEnum tenum, final GeoPointMultiTermQuery query) {
|
||||
super(tenum, query);
|
||||
DETAIL_LEVEL = (short)(((GeoEncodingUtils.BITS<<1)-this.maxShift)/2);
|
||||
computeRange(0L, (short) (((GeoEncodingUtils.BITS) << 1) - 1));
|
||||
assert rangeBounds.isEmpty() == false;
|
||||
Collections.sort(rangeBounds);
|
||||
}
|
||||
|
||||
/**
|
||||
* entry point for recursively computing ranges
|
||||
*/
|
||||
private final void computeRange(long term, final short shift) {
|
||||
final long split = term | (0x1L<<shift);
|
||||
assert shift < 64;
|
||||
final long upperMax;
|
||||
if (shift < 63) {
|
||||
upperMax = term | ((1L << (shift+1))-1);
|
||||
} else {
|
||||
upperMax = 0xffffffffffffffffL;
|
||||
}
|
||||
final long lowerMax = split-1;
|
||||
|
||||
relateAndRecurse(term, lowerMax, shift);
|
||||
relateAndRecurse(split, upperMax, shift);
|
||||
}
|
||||
|
||||
/**
|
||||
* recurse to higher level precision cells to find ranges along the space-filling curve that fall within the
|
||||
* query box
|
||||
*
|
||||
* @param start starting value on the space-filling curve for a cell at a given res
|
||||
* @param end ending value on the space-filling curve for a cell at a given res
|
||||
* @param res spatial res represented as a bit shift (MSB is lower res)
|
||||
*/
|
||||
private void relateAndRecurse(final long start, final long end, final short res) {
|
||||
final double minLon = GeoEncodingUtils.mortonUnhashLon(start);
|
||||
final double minLat = GeoEncodingUtils.mortonUnhashLat(start);
|
||||
final double maxLon = GeoEncodingUtils.mortonUnhashLon(end);
|
||||
final double maxLat = GeoEncodingUtils.mortonUnhashLat(end);
|
||||
|
||||
final short level = (short)((GeoEncodingUtils.BITS<<1)-res>>>1);
|
||||
|
||||
// if cell is within and a factor of the precision step, or it crosses the edge of the shape add the range
|
||||
final boolean within = res % GeoPointField.PRECISION_STEP == 0 && relationImpl.cellWithin(minLon, minLat, maxLon, maxLat);
|
||||
if (within || (level == DETAIL_LEVEL && relationImpl.cellIntersectsShape(minLon, minLat, maxLon, maxLat))) {
|
||||
final short nextRes = (short)(res-1);
|
||||
if (nextRes % GeoPointField.PRECISION_STEP == 0) {
|
||||
rangeBounds.add(new Range(start, nextRes, !within));
|
||||
rangeBounds.add(new Range(start|(1L<<nextRes), nextRes, !within));
|
||||
} else {
|
||||
rangeBounds.add(new Range(start, res, !within));
|
||||
}
|
||||
} else if (level < DETAIL_LEVEL && relationImpl.cellIntersectsMBR(minLon, minLat, maxLon, maxLat)) {
|
||||
computeRange(start, (short) (res - 1));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final BytesRef peek() {
|
||||
rangeBounds.get(0).fillBytesRef(this.nextSubRangeBRB);
|
||||
return nextSubRangeBRB.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void nextRange() {
|
||||
currentRange = rangeBounds.remove(0);
|
||||
super.nextRange();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final BytesRef nextSeekTerm(BytesRef term) {
|
||||
while (hasNext()) {
|
||||
if (currentRange == null) {
|
||||
nextRange();
|
||||
}
|
||||
// if the new upper bound is before the term parameter, the sub-range is never a hit
|
||||
if (term != null && term.compareTo(currentCell) > 0) {
|
||||
nextRange();
|
||||
if (!rangeBounds.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// never seek backwards, so use current term if lower bound is smaller
|
||||
return (term != null && term.compareTo(currentCell) > 0) ? term : currentCell;
|
||||
}
|
||||
|
||||
// no more sub-range enums available
|
||||
assert rangeBounds.isEmpty();
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final boolean hasNext() {
|
||||
return rangeBounds.isEmpty() == false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal class to represent a range along the space filling curve
|
||||
*/
|
||||
protected final class Range extends BaseRange {
|
||||
Range(final long lower, final short shift, boolean boundary) {
|
||||
super(lower, shift, boundary);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode as a BytesRef using a reusable object. This allows us to lazily create the BytesRef (which is
|
||||
* quite expensive), only when we need it.
|
||||
*/
|
||||
@Override
|
||||
protected void fillBytesRef(BytesRefBuilder result) {
|
||||
assert result != null;
|
||||
LegacyNumericUtils.longToPrefixCoded(start, shift, result);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,237 @@
|
|||
package org.apache.lucene.spatial.search;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.spatial.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonHash;
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonUnhashLat;
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonUnhashLon;
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.geoCodedToPrefixCoded;
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.prefixCodedToGeoCoded;
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.getPrefixCodedShift;
|
||||
|
||||
/**
|
||||
* Decomposes a given {@link GeoPointMultiTermQuery} into a set of terms that represent the query criteria using
|
||||
* {@link org.apache.lucene.spatial.document.GeoPointField.TermEncoding#PREFIX} method defined by
|
||||
* {@link GeoPointField}. The terms are then enumerated by the
|
||||
* {@link GeoPointTermQueryConstantScoreWrapper} and all docs whose GeoPoint fields match the prefix terms or pass
|
||||
* the {@link GeoPointMultiTermQuery.CellComparator#postFilter} criteria are returned in the
|
||||
* resulting DocIdSet.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
final class GeoPointPrefixTermsEnum extends GeoPointTermsEnum {
|
||||
private final long start;
|
||||
|
||||
private short shift;
|
||||
|
||||
// current range as long
|
||||
private long currStart;
|
||||
private long currEnd;
|
||||
|
||||
private final Range nextRange = new Range(-1, shift, true);
|
||||
|
||||
private boolean hasNext = false;
|
||||
|
||||
private boolean withinOnly = false;
|
||||
private long lastWithin;
|
||||
|
||||
public GeoPointPrefixTermsEnum(final TermsEnum tenum, final GeoPointMultiTermQuery query) {
|
||||
super(tenum, query);
|
||||
this.start = mortonHash(query.minLon, query.minLat);
|
||||
this.currentRange = new Range(0, shift, true);
|
||||
// start shift at maxShift value (from computeMaxShift)
|
||||
this.shift = maxShift;
|
||||
final long mask = (1L << shift) - 1;
|
||||
this.currStart = start & ~mask;
|
||||
this.currEnd = currStart | mask;
|
||||
}
|
||||
|
||||
private boolean within(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
return relationImpl.cellWithin(minLon, minLat, maxLon, maxLat);
|
||||
}
|
||||
|
||||
private boolean boundary(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
return shift == maxShift && relationImpl.cellIntersectsShape(minLon, minLat, maxLon, maxLat);
|
||||
}
|
||||
|
||||
private boolean nextWithin() {
|
||||
if (withinOnly == false) {
|
||||
return false;
|
||||
}
|
||||
currStart += (1L << shift);
|
||||
setNextRange(false);
|
||||
currentRange.set(nextRange);
|
||||
hasNext = true;
|
||||
|
||||
withinOnly = lastWithin != currStart;
|
||||
if (withinOnly == false) advanceVariables();
|
||||
return true;
|
||||
}
|
||||
|
||||
private void nextRelation() {
|
||||
double minLon = mortonUnhashLon(currStart);
|
||||
double minLat = mortonUnhashLat(currStart);
|
||||
double maxLon;
|
||||
double maxLat;
|
||||
boolean isWithin;
|
||||
do {
|
||||
maxLon = mortonUnhashLon(currEnd);
|
||||
maxLat = mortonUnhashLat(currEnd);
|
||||
|
||||
// within or a boundary
|
||||
if ((isWithin = within(minLon, minLat, maxLon, maxLat) == true) || boundary(minLon, minLat, maxLon, maxLat) == true) {
|
||||
final int m;
|
||||
if (isWithin == false || (m = shift % GeoPointField.PRECISION_STEP) == 0) {
|
||||
setNextRange(isWithin == false);
|
||||
advanceVariables();
|
||||
break;
|
||||
} else if (shift < 54) {
|
||||
withinOnly = true;
|
||||
shift = (short)(shift - m);
|
||||
lastWithin = currEnd & ~((1L << shift) - 1);
|
||||
setNextRange(false);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// within cell but not at a depth factor of PRECISION_STEP
|
||||
if (isWithin == true || (relationImpl.cellIntersectsMBR(minLon, minLat, maxLon , maxLat) == true && shift != maxShift)) {
|
||||
// descend: currStart need not change since shift handles end of range
|
||||
currEnd = currStart | (1L<<--shift) - 1;
|
||||
} else {
|
||||
advanceVariables();
|
||||
minLon = mortonUnhashLon(currStart);
|
||||
minLat = mortonUnhashLat(currStart);
|
||||
}
|
||||
} while(shift < 63);
|
||||
}
|
||||
|
||||
private void setNextRange(final boolean boundary) {
|
||||
nextRange.start = currStart;
|
||||
nextRange.shift = shift;
|
||||
nextRange.boundary = boundary;
|
||||
}
|
||||
|
||||
private void advanceVariables() {
|
||||
/** set next variables */
|
||||
long shiftMask = 1L << shift;
|
||||
// pop-up if shift bit is set
|
||||
while ( (currStart & shiftMask) == shiftMask) {
|
||||
shiftMask = 1L << ++shift;
|
||||
}
|
||||
final long shiftMOne = shiftMask - 1;
|
||||
currStart = currStart & ~shiftMOne | shiftMask;
|
||||
currEnd = currStart | shiftMOne;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final BytesRef peek() {
|
||||
nextRange.fillBytesRef(nextSubRangeBRB);
|
||||
return super.peek();
|
||||
}
|
||||
|
||||
protected void seek(long term, short res) {
|
||||
if (term < currStart && res < maxShift) {
|
||||
throw new IllegalArgumentException("trying to seek backwards");
|
||||
} else if (term == currStart) {
|
||||
return;
|
||||
}
|
||||
shift = res;
|
||||
currStart = term;
|
||||
currEnd = currStart | ((1L<<shift)-1);
|
||||
withinOnly = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void nextRange() {
|
||||
hasNext = false;
|
||||
super.nextRange();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final boolean hasNext() {
|
||||
if (hasNext == true || nextWithin()) {
|
||||
return true;
|
||||
}
|
||||
nextRelation();
|
||||
if (currentRange.compareTo(nextRange) != 0) {
|
||||
currentRange.set(nextRange);
|
||||
return (hasNext = true);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final BytesRef nextSeekTerm(BytesRef term) {
|
||||
while (hasNext()) {
|
||||
nextRange();
|
||||
if (term == null) {
|
||||
return currentCell;
|
||||
}
|
||||
|
||||
final int comparison = term.compareTo(currentCell);
|
||||
if (comparison > 0) {
|
||||
seek(GeoEncodingUtils.prefixCodedToGeoCoded(term), (short)(64-GeoEncodingUtils.getPrefixCodedShift(term)));
|
||||
continue;
|
||||
}
|
||||
return currentCell;
|
||||
}
|
||||
|
||||
// no more sub-range enums available
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AcceptStatus accept(BytesRef term) {
|
||||
// range < term or range is null
|
||||
while (currentCell == null || term.compareTo(currentCell) > 0) {
|
||||
// no more ranges, be gone
|
||||
if (hasNext() == false) {
|
||||
return AcceptStatus.END;
|
||||
}
|
||||
|
||||
// peek next range, if the range > term then seek
|
||||
final int peekCompare = term.compareTo(peek());
|
||||
if (peekCompare < 0) {
|
||||
return AcceptStatus.NO_AND_SEEK;
|
||||
} else if (peekCompare > 0) {
|
||||
seek(prefixCodedToGeoCoded(term), (short)(64 - getPrefixCodedShift(term)));
|
||||
}
|
||||
nextRange();
|
||||
}
|
||||
return AcceptStatus.YES;
|
||||
}
|
||||
|
||||
protected final class Range extends BaseRange {
|
||||
public Range(final long start, final short res, final boolean boundary) {
|
||||
super(start, res, boundary);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void fillBytesRef(BytesRefBuilder result) {
|
||||
assert result != null;
|
||||
geoCodedToPrefixCoded(start, shift, result);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,114 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.spatial.search;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.util.GeoUtils;
|
||||
import org.apache.lucene.util.AttributeSource;
|
||||
|
||||
/**
|
||||
* TermQuery for GeoPointField for overriding {@link org.apache.lucene.search.MultiTermQuery} methods specific to
|
||||
* Geospatial operations
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
abstract class GeoPointTermQuery extends MultiTermQuery {
|
||||
// simple bounding box optimization - no objects used to avoid dependencies
|
||||
/** minimum longitude value (in degrees) */
|
||||
protected final double minLon;
|
||||
/** minimum latitude value (in degrees) */
|
||||
protected final double minLat;
|
||||
/** maximum longitude value (in degrees) */
|
||||
protected final double maxLon;
|
||||
/** maximum latitude value (in degrees) */
|
||||
protected final double maxLat;
|
||||
|
||||
/**
|
||||
* Constructs a query matching terms that cannot be represented with a single
|
||||
* Term.
|
||||
*/
|
||||
public GeoPointTermQuery(String field, final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
super(field);
|
||||
|
||||
if (GeoUtils.isValidLon(minLon) == false) {
|
||||
throw new IllegalArgumentException("invalid minLon " + minLon);
|
||||
}
|
||||
if (GeoUtils.isValidLon(maxLon) == false) {
|
||||
throw new IllegalArgumentException("invalid maxLon " + maxLon);
|
||||
}
|
||||
if (GeoUtils.isValidLat(minLat) == false) {
|
||||
throw new IllegalArgumentException("invalid minLat " + minLat);
|
||||
}
|
||||
if (GeoUtils.isValidLat(maxLat) == false) {
|
||||
throw new IllegalArgumentException("invalid maxLat " + maxLat);
|
||||
}
|
||||
this.minLon = minLon;
|
||||
this.minLat = minLat;
|
||||
this.maxLon = maxLon;
|
||||
this.maxLat = maxLat;
|
||||
|
||||
this.rewriteMethod = GEO_CONSTANT_SCORE_REWRITE;
|
||||
}
|
||||
|
||||
private static final RewriteMethod GEO_CONSTANT_SCORE_REWRITE = new RewriteMethod() {
|
||||
@Override
|
||||
public Query rewrite(IndexReader reader, MultiTermQuery query) {
|
||||
return new GeoPointTermQueryConstantScoreWrapper<>((GeoPointTermQuery)query);
|
||||
}
|
||||
};
|
||||
|
||||
/** override package protected method */
|
||||
@Override
|
||||
protected abstract TermsEnum getTermsEnum(final Terms terms, AttributeSource atts) throws IOException;
|
||||
|
||||
/** check if this instance equals another instance */
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (!super.equals(o)) return false;
|
||||
|
||||
GeoPointTermQuery that = (GeoPointTermQuery) o;
|
||||
|
||||
if (Double.compare(that.minLon, minLon) != 0) return false;
|
||||
if (Double.compare(that.minLat, minLat) != 0) return false;
|
||||
if (Double.compare(that.maxLon, maxLon) != 0) return false;
|
||||
return Double.compare(that.maxLat, maxLat) == 0;
|
||||
}
|
||||
|
||||
/** compute hashcode */
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = super.hashCode();
|
||||
long temp;
|
||||
temp = Double.doubleToLongBits(minLon);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(minLat);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(maxLon);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(maxLat);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
return result;
|
||||
}
|
||||
}
|
|
@ -33,15 +33,18 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.util.DocIdSetBuilder;
|
||||
import org.apache.lucene.spatial.util.GeoUtils;
|
||||
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonUnhashLat;
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonUnhashLon;
|
||||
|
||||
|
||||
/**
|
||||
* Custom ConstantScoreWrapper for {@code GeoPointTermQuery} that cuts over to DocValues
|
||||
* Custom ConstantScoreWrapper for {@code GeoPointMultiTermQuery} that cuts over to DocValues
|
||||
* for post filtering boundary ranges. Multi-valued GeoPoint documents are supported.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
final class GeoPointTermQueryConstantScoreWrapper <Q extends GeoPointTermQuery> extends Query {
|
||||
final class GeoPointTermQueryConstantScoreWrapper <Q extends GeoPointMultiTermQuery> extends Query {
|
||||
protected final Q query;
|
||||
|
||||
protected GeoPointTermQueryConstantScoreWrapper(Q query) {
|
||||
|
@ -95,7 +98,7 @@ final class GeoPointTermQueryConstantScoreWrapper <Q extends GeoPointTermQuery>
|
|||
sdv.setDocument(docId);
|
||||
for (int i=0; i<sdv.count(); ++i) {
|
||||
hash = sdv.valueAt(i);
|
||||
if (termsEnum.postFilter(GeoUtils.mortonUnhashLon(hash), GeoUtils.mortonUnhashLat(hash))) {
|
||||
if (termsEnum.postFilter(mortonUnhashLon(hash), mortonUnhashLat(hash))) {
|
||||
builder.add(docId);
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -16,173 +16,60 @@
|
|||
*/
|
||||
package org.apache.lucene.spatial.search;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.spatial.document.GeoPointField;
|
||||
import org.apache.lucene.index.FilteredTermsEnum;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.spatial.util.GeoRelationUtils;
|
||||
import org.apache.lucene.spatial.util.GeoUtils;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
import org.apache.lucene.spatial.document.GeoPointField.TermEncoding;
|
||||
import org.apache.lucene.spatial.search.GeoPointMultiTermQuery.CellComparator;
|
||||
|
||||
/**
|
||||
* computes all ranges along a space-filling curve that represents
|
||||
* the given bounding box and enumerates all terms contained within those ranges
|
||||
* Base class for {@link GeoPointNumericTermsEnum} and {@link GeoPointPrefixTermsEnum} which compares
|
||||
* candidate GeoPointField encoded terms against terms matching the defined query criteria.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
abstract class GeoPointTermsEnum extends FilteredTermsEnum {
|
||||
protected final double minLon;
|
||||
protected final double minLat;
|
||||
protected final double maxLon;
|
||||
protected final double maxLat;
|
||||
protected final short maxShift;
|
||||
|
||||
protected Range currentRange;
|
||||
private final BytesRefBuilder currentCell = new BytesRefBuilder();
|
||||
private final BytesRefBuilder nextSubRange = new BytesRefBuilder();
|
||||
protected BaseRange currentRange;
|
||||
protected BytesRef currentCell;
|
||||
protected final BytesRefBuilder currentCellBRB = new BytesRefBuilder();
|
||||
protected final BytesRefBuilder nextSubRangeBRB = new BytesRefBuilder();
|
||||
|
||||
private final List<Range> rangeBounds = new LinkedList<>();
|
||||
protected final CellComparator relationImpl;
|
||||
|
||||
// detail level should be a factor of PRECISION_STEP limiting the depth of recursion (and number of ranges)
|
||||
protected final short DETAIL_LEVEL;
|
||||
|
||||
GeoPointTermsEnum(final TermsEnum tenum, final double minLon, final double minLat,
|
||||
final double maxLon, final double maxLat) {
|
||||
GeoPointTermsEnum(final TermsEnum tenum, final GeoPointMultiTermQuery query) {
|
||||
super(tenum);
|
||||
final long rectMinHash = GeoUtils.mortonHash(minLon, minLat);
|
||||
final long rectMaxHash = GeoUtils.mortonHash(maxLon, maxLat);
|
||||
this.minLon = GeoUtils.mortonUnhashLon(rectMinHash);
|
||||
this.minLat = GeoUtils.mortonUnhashLat(rectMinHash);
|
||||
this.maxLon = GeoUtils.mortonUnhashLon(rectMaxHash);
|
||||
this.maxLat = GeoUtils.mortonUnhashLat(rectMaxHash);
|
||||
DETAIL_LEVEL = (short)(((GeoUtils.BITS<<1)-computeMaxShift())/2);
|
||||
|
||||
computeRange(0L, (short) ((GeoUtils.BITS << 1) - 1));
|
||||
assert rangeBounds.isEmpty() == false;
|
||||
Collections.sort(rangeBounds);
|
||||
this.maxShift = query.maxShift;
|
||||
this.relationImpl = query.cellComparator;
|
||||
}
|
||||
|
||||
/**
|
||||
* entry point for recursively computing ranges
|
||||
*/
|
||||
private final void computeRange(long term, final short shift) {
|
||||
final long split = term | (0x1L<<shift);
|
||||
assert shift < 64;
|
||||
final long upperMax;
|
||||
if (shift < 63) {
|
||||
upperMax = term | ((1L << (shift+1))-1);
|
||||
} else {
|
||||
upperMax = 0xffffffffffffffffL;
|
||||
static GeoPointTermsEnum newInstance(final TermsEnum terms, final GeoPointMultiTermQuery query) {
|
||||
if (query.termEncoding == TermEncoding.PREFIX) {
|
||||
return new GeoPointPrefixTermsEnum(terms, query);
|
||||
} else if (query.termEncoding == TermEncoding.NUMERIC) {
|
||||
return new GeoPointNumericTermsEnum(terms, query);
|
||||
}
|
||||
final long lowerMax = split-1;
|
||||
|
||||
relateAndRecurse(term, lowerMax, shift);
|
||||
relateAndRecurse(split, upperMax, shift);
|
||||
}
|
||||
|
||||
/**
|
||||
* recurse to higher level precision cells to find ranges along the space-filling curve that fall within the
|
||||
* query box
|
||||
*
|
||||
* @param start starting value on the space-filling curve for a cell at a given res
|
||||
* @param end ending value on the space-filling curve for a cell at a given res
|
||||
* @param res spatial res represented as a bit shift (MSB is lower res)
|
||||
*/
|
||||
private void relateAndRecurse(final long start, final long end, final short res) {
|
||||
final double minLon = GeoUtils.mortonUnhashLon(start);
|
||||
final double minLat = GeoUtils.mortonUnhashLat(start);
|
||||
final double maxLon = GeoUtils.mortonUnhashLon(end);
|
||||
final double maxLat = GeoUtils.mortonUnhashLat(end);
|
||||
|
||||
final short level = (short)((GeoUtils.BITS<<1)-res>>>1);
|
||||
|
||||
// if cell is within and a factor of the precision step, or it crosses the edge of the shape add the range
|
||||
final boolean within = res % GeoPointField.PRECISION_STEP == 0 && cellWithin(minLon, minLat, maxLon, maxLat);
|
||||
if (within || (level == DETAIL_LEVEL && cellIntersectsShape(minLon, minLat, maxLon, maxLat))) {
|
||||
final short nextRes = (short)(res-1);
|
||||
if (nextRes % GeoPointField.PRECISION_STEP == 0) {
|
||||
rangeBounds.add(new Range(start, nextRes, !within));
|
||||
rangeBounds.add(new Range(start|(1L<<nextRes), nextRes, !within));
|
||||
} else {
|
||||
rangeBounds.add(new Range(start, res, !within));
|
||||
}
|
||||
} else if (level < DETAIL_LEVEL && cellIntersectsMBR(minLon, minLat, maxLon, maxLat)) {
|
||||
computeRange(start, (short) (res - 1));
|
||||
}
|
||||
}
|
||||
|
||||
protected short computeMaxShift() {
|
||||
// in this case a factor of 4 brings the detail level to ~0.002/0.001 degrees lon/lat respectively (or ~222m/111m)
|
||||
return GeoPointField.PRECISION_STEP * 4;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether the quad-cell crosses the shape
|
||||
*/
|
||||
protected abstract boolean cellCrosses(final double minLon, final double minLat, final double maxLon, final double maxLat);
|
||||
|
||||
/**
|
||||
* Determine whether quad-cell is within the shape
|
||||
*/
|
||||
protected abstract boolean cellWithin(final double minLon, final double minLat, final double maxLon, final double maxLat);
|
||||
|
||||
/**
|
||||
* Default shape is a rectangle, so this returns the same as {@code cellIntersectsMBR}
|
||||
*/
|
||||
protected abstract boolean cellIntersectsShape(final double minLon, final double minLat, final double maxLon, final double maxLat);
|
||||
|
||||
/**
|
||||
* Primary driver for cells intersecting shape boundaries
|
||||
*/
|
||||
protected boolean cellIntersectsMBR(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
return GeoRelationUtils.rectIntersects(minLon, minLat, maxLon, maxLat, this.minLon, this.minLat, this.maxLon, this.maxLat);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return whether quad-cell contains the bounding box of this shape
|
||||
*/
|
||||
protected boolean cellContains(final double minLon, final double minLat, final double maxLon, final double maxLat) {
|
||||
return GeoRelationUtils.rectWithin(this.minLon, this.minLat, this.maxLon, this.maxLat, minLon, minLat, maxLon, maxLat);
|
||||
throw new IllegalArgumentException("Invalid GeoPoint TermEncoding " + query.termEncoding);
|
||||
}
|
||||
|
||||
public boolean boundaryTerm() {
|
||||
if (currentRange == null) {
|
||||
if (currentCell == null) {
|
||||
throw new IllegalStateException("GeoPointTermsEnum empty or not initialized");
|
||||
}
|
||||
return currentRange.boundary;
|
||||
}
|
||||
|
||||
private void nextRange() {
|
||||
currentRange = rangeBounds.remove(0);
|
||||
currentRange.fillBytesRef(currentCell);
|
||||
protected BytesRef peek() {
|
||||
return nextSubRangeBRB.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected final BytesRef nextSeekTerm(BytesRef term) {
|
||||
while (!rangeBounds.isEmpty()) {
|
||||
if (currentRange == null) {
|
||||
nextRange();
|
||||
}
|
||||
abstract protected boolean hasNext();
|
||||
|
||||
// if the new upper bound is before the term parameter, the sub-range is never a hit
|
||||
if (term != null && term.compareTo(currentCell.get()) > 0) {
|
||||
nextRange();
|
||||
if (!rangeBounds.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// never seek backwards, so use current term if lower bound is smaller
|
||||
return (term != null && term.compareTo(currentCell.get()) > 0) ?
|
||||
term : currentCell.get();
|
||||
}
|
||||
|
||||
// no more sub-range enums available
|
||||
assert rangeBounds.isEmpty();
|
||||
return null;
|
||||
protected void nextRange() {
|
||||
currentRange.fillBytesRef(currentCellBRB);
|
||||
currentCell = currentCellBRB.get();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -196,13 +83,12 @@ abstract class GeoPointTermsEnum extends FilteredTermsEnum {
|
|||
@Override
|
||||
protected AcceptStatus accept(BytesRef term) {
|
||||
// validate value is in range
|
||||
while (currentCell == null || term.compareTo(currentCell.get()) > 0) {
|
||||
if (rangeBounds.isEmpty()) {
|
||||
while (currentCell == null || term.compareTo(currentCell) > 0) {
|
||||
if (hasNext() == false) {
|
||||
return AcceptStatus.END;
|
||||
}
|
||||
// peek next sub-range, only seek if the current term is smaller than next lower bound
|
||||
rangeBounds.get(0).fillBytesRef(this.nextSubRange);
|
||||
if (term.compareTo(this.nextSubRange.get()) < 0) {
|
||||
if (term.compareTo(peek()) < 0) {
|
||||
return AcceptStatus.NO_AND_SEEK;
|
||||
}
|
||||
// step forward to next range without seeking, as next range is less or equal current term
|
||||
|
@ -212,17 +98,19 @@ abstract class GeoPointTermsEnum extends FilteredTermsEnum {
|
|||
return AcceptStatus.YES;
|
||||
}
|
||||
|
||||
protected abstract boolean postFilter(final double lon, final double lat);
|
||||
protected boolean postFilter(final double lon, final double lat) {
|
||||
return relationImpl.postFilter(lon, lat);
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal class to represent a range along the space filling curve
|
||||
*/
|
||||
protected final class Range implements Comparable<Range> {
|
||||
final short shift;
|
||||
final long start;
|
||||
final boolean boundary;
|
||||
abstract class BaseRange implements Comparable<BaseRange> {
|
||||
protected short shift;
|
||||
protected long start;
|
||||
protected boolean boundary;
|
||||
|
||||
Range(final long lower, final short shift, boolean boundary) {
|
||||
BaseRange(final long lower, final short shift, boolean boundary) {
|
||||
this.boundary = boundary;
|
||||
this.start = lower;
|
||||
this.shift = shift;
|
||||
|
@ -232,18 +120,21 @@ abstract class GeoPointTermsEnum extends FilteredTermsEnum {
|
|||
* Encode as a BytesRef using a reusable object. This allows us to lazily create the BytesRef (which is
|
||||
* quite expensive), only when we need it.
|
||||
*/
|
||||
private void fillBytesRef(BytesRefBuilder result) {
|
||||
assert result != null;
|
||||
LegacyNumericUtils.longToPrefixCoded(start, shift, result);
|
||||
}
|
||||
abstract protected void fillBytesRef(BytesRefBuilder result);
|
||||
|
||||
@Override
|
||||
public int compareTo(Range other) {
|
||||
public int compareTo(BaseRange other) {
|
||||
final int result = Short.compare(this.shift, other.shift);
|
||||
if (result == 0) {
|
||||
return Long.compare(this.start, other.start);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
protected void set(BaseRange other) {
|
||||
this.start = other.start;
|
||||
this.shift = other.shift;
|
||||
this.boundary = other.boundary;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,157 @@
|
|||
package org.apache.lucene.spatial.util;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.util.BitUtil;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
|
||||
import static org.apache.lucene.spatial.util.GeoUtils.MIN_LON_INCL;
|
||||
import static org.apache.lucene.spatial.util.GeoUtils.MIN_LAT_INCL;
|
||||
|
||||
/**
|
||||
* Basic reusable geopoint encoding methods
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public final class GeoEncodingUtils {
|
||||
/** number of bits used for quantizing latitude and longitude values */
|
||||
public static final short BITS = 31;
|
||||
private static final double LON_SCALE = (0x1L<<BITS)/360.0D;
|
||||
private static final double LAT_SCALE = (0x1L<<BITS)/180.0D;
|
||||
|
||||
/**
|
||||
* The maximum term length (used for <code>byte[]</code> buffer size)
|
||||
* for encoding <code>geoEncoded</code> values.
|
||||
* @see #geoCodedToPrefixCodedBytes(long, int, BytesRefBuilder)
|
||||
*/
|
||||
public static final int BUF_SIZE_LONG = 28/8 + 1;
|
||||
|
||||
/** rounding error for quantized latitude and longitude values */
|
||||
public static final double TOLERANCE = 1E-6;
|
||||
|
||||
// No instance:
|
||||
private GeoEncodingUtils() {
|
||||
}
|
||||
|
||||
public static final Long mortonHash(final double lon, final double lat) {
|
||||
return BitUtil.interleave(scaleLon(lon), scaleLat(lat));
|
||||
}
|
||||
|
||||
public static final double mortonUnhashLon(final long hash) {
|
||||
return unscaleLon(BitUtil.deinterleave(hash));
|
||||
}
|
||||
|
||||
public static final double mortonUnhashLat(final long hash) {
|
||||
return unscaleLat(BitUtil.deinterleave(hash >>> 1));
|
||||
}
|
||||
|
||||
protected static final long scaleLon(final double val) {
|
||||
return (long) ((val-MIN_LON_INCL) * LON_SCALE);
|
||||
}
|
||||
|
||||
protected static final long scaleLat(final double val) {
|
||||
return (long) ((val-MIN_LAT_INCL) * LAT_SCALE);
|
||||
}
|
||||
|
||||
protected static final double unscaleLon(final long val) {
|
||||
return (val / LON_SCALE) + MIN_LON_INCL;
|
||||
}
|
||||
|
||||
protected static final double unscaleLat(final long val) {
|
||||
return (val / LAT_SCALE) + MIN_LAT_INCL;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare two position values within a {@link GeoEncodingUtils#TOLERANCE} factor
|
||||
*/
|
||||
public static double compare(final double v1, final double v2) {
|
||||
final double delta = v1-v2;
|
||||
return Math.abs(delta) <= TOLERANCE ? 0 : delta;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a geocoded morton long into a prefix coded geo term
|
||||
*/
|
||||
public static void geoCodedToPrefixCoded(long hash, int shift, BytesRefBuilder bytes) {
|
||||
geoCodedToPrefixCodedBytes(hash, shift, bytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a prefix coded geo term back into the geocoded morton long
|
||||
*/
|
||||
public static long prefixCodedToGeoCoded(final BytesRef val) {
|
||||
final long result = fromBytes((byte)0, (byte)0, (byte)0, (byte)0,
|
||||
val.bytes[val.offset+0], val.bytes[val.offset+1], val.bytes[val.offset+2], val.bytes[val.offset+3]);
|
||||
return result << 32;
|
||||
}
|
||||
|
||||
/**
|
||||
* GeoTerms are coded using 4 prefix bytes + 1 byte to record number of prefix bits
|
||||
*
|
||||
* example prefix at shift 54 (yields 10 significant prefix bits):
|
||||
* pppppppp pp000000 00000000 00000000 00001010
|
||||
* (byte 1) (byte 2) (byte 3) (byte 4) (sigbits)
|
||||
*/
|
||||
private static void geoCodedToPrefixCodedBytes(final long hash, final int shift, final BytesRefBuilder bytes) {
|
||||
// ensure shift is 32..63
|
||||
if (shift < 32 || shift > 63) {
|
||||
throw new IllegalArgumentException("Illegal shift value, must be 32..63; got shift=" + shift);
|
||||
}
|
||||
int nChars = BUF_SIZE_LONG + 1; // one extra for the byte that contains the number of significant bits
|
||||
bytes.setLength(nChars);
|
||||
bytes.grow(nChars--);
|
||||
final int sigBits = 64 - shift;
|
||||
bytes.setByteAt(BUF_SIZE_LONG, (byte)(sigBits));
|
||||
long sortableBits = hash;
|
||||
sortableBits >>>= shift;
|
||||
sortableBits <<= 32 - sigBits;
|
||||
do {
|
||||
bytes.setByteAt(--nChars, (byte)(sortableBits));
|
||||
sortableBits >>>= 8;
|
||||
} while (nChars > 0);
|
||||
}
|
||||
|
||||
/** Get the prefix coded geo term shift value */
|
||||
public static int getPrefixCodedShift(final BytesRef val) {
|
||||
final int shift = val.bytes[val.offset + BUF_SIZE_LONG];
|
||||
if (shift > 63 || shift < 0)
|
||||
throw new NumberFormatException("Invalid shift value (" + shift + ") in prefixCoded bytes (is encoded value really a geo point?)");
|
||||
return shift;
|
||||
}
|
||||
|
||||
/** Converts 8 bytes to a long value */
|
||||
protected static long fromBytes(byte b1, byte b2, byte b3, byte b4, byte b5, byte b6, byte b7, byte b8) {
|
||||
return ((long)b1 & 255L) << 56 | ((long)b2 & 255L) << 48 | ((long)b3 & 255L) << 40
|
||||
| ((long)b4 & 255L) << 32 | ((long)b5 & 255L) << 24 | ((long)b6 & 255L) << 16
|
||||
| ((long)b7 & 255L) << 8 | (long)b8 & 255L;
|
||||
}
|
||||
|
||||
/** Converts a long value into a bit string (useful for debugging) */
|
||||
public static String geoTermToString(long term) {
|
||||
StringBuilder s = new StringBuilder(64);
|
||||
final int numberOfLeadingZeros = Long.numberOfLeadingZeros(term);
|
||||
for (int i = 0; i < numberOfLeadingZeros; i++) {
|
||||
s.append('0');
|
||||
}
|
||||
if (term != 0) {
|
||||
s.append(Long.toBinaryString(term));
|
||||
}
|
||||
return s.toString();
|
||||
}
|
||||
}
|
|
@ -40,7 +40,7 @@ public class GeoHashUtils {
|
|||
|
||||
/** maximum precision for geohash strings */
|
||||
public static final int PRECISION = 12;
|
||||
private static final short MORTON_OFFSET = (GeoUtils.BITS<<1) - (PRECISION*5);
|
||||
private static final short MORTON_OFFSET = (GeoEncodingUtils.BITS<<1) - (PRECISION*5);
|
||||
|
||||
// No instance:
|
||||
private GeoHashUtils() {
|
||||
|
@ -52,7 +52,7 @@ public class GeoHashUtils {
|
|||
public static final long longEncode(final double lon, final double lat, final int level) {
|
||||
// shift to appropriate level
|
||||
final short msf = (short)(((12 - level) * 5) + MORTON_OFFSET);
|
||||
return ((BitUtil.flipFlop(GeoUtils.mortonHash(lon, lat)) >>> msf) << 4) | level;
|
||||
return ((BitUtil.flipFlop(GeoEncodingUtils.mortonHash(lon, lat)) >>> msf) << 4) | level;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -118,7 +118,7 @@ public class GeoHashUtils {
|
|||
*/
|
||||
public static final String stringEncode(final double lon, final double lat, final int level) {
|
||||
// convert to geohashlong
|
||||
final long ghLong = fromMorton(GeoUtils.mortonHash(lon, lat), level);
|
||||
final long ghLong = fromMorton(GeoEncodingUtils.mortonHash(lon, lat), level);
|
||||
return stringEncode(ghLong);
|
||||
|
||||
}
|
||||
|
@ -139,7 +139,7 @@ public class GeoHashUtils {
|
|||
|
||||
StringBuilder geoHash = new StringBuilder();
|
||||
short precision = 0;
|
||||
final short msf = (GeoUtils.BITS<<1)-5;
|
||||
final short msf = (GeoEncodingUtils.BITS<<1)-5;
|
||||
long mask = 31L<<msf;
|
||||
do {
|
||||
geoHash.append(BASE_32[(int)((mask & hashedVal)>>>(msf-(precision*5)))]);
|
||||
|
|
|
@ -174,14 +174,14 @@ public class GeoRelationUtils {
|
|||
c2 = a2*shapeX[p+1] + b2*shapeY[p+1];
|
||||
s = (1/d)*(b2*c1 - b1*c2);
|
||||
t = (1/d)*(a1*c2 - a2*c1);
|
||||
x00 = StrictMath.min(bbox[b][0], bbox[b+1][0]) - GeoUtils.TOLERANCE;
|
||||
x01 = StrictMath.max(bbox[b][0], bbox[b+1][0]) + GeoUtils.TOLERANCE;
|
||||
y00 = StrictMath.min(bbox[b][1], bbox[b+1][1]) - GeoUtils.TOLERANCE;
|
||||
y01 = StrictMath.max(bbox[b][1], bbox[b+1][1]) + GeoUtils.TOLERANCE;
|
||||
x10 = StrictMath.min(shapeX[p], shapeX[p+1]) - GeoUtils.TOLERANCE;
|
||||
x11 = StrictMath.max(shapeX[p], shapeX[p+1]) + GeoUtils.TOLERANCE;
|
||||
y10 = StrictMath.min(shapeY[p], shapeY[p+1]) - GeoUtils.TOLERANCE;
|
||||
y11 = StrictMath.max(shapeY[p], shapeY[p+1]) + GeoUtils.TOLERANCE;
|
||||
x00 = StrictMath.min(bbox[b][0], bbox[b+1][0]) - GeoEncodingUtils.TOLERANCE;
|
||||
x01 = StrictMath.max(bbox[b][0], bbox[b+1][0]) + GeoEncodingUtils.TOLERANCE;
|
||||
y00 = StrictMath.min(bbox[b][1], bbox[b+1][1]) - GeoEncodingUtils.TOLERANCE;
|
||||
y01 = StrictMath.max(bbox[b][1], bbox[b+1][1]) + GeoEncodingUtils.TOLERANCE;
|
||||
x10 = StrictMath.min(shapeX[p], shapeX[p+1]) - GeoEncodingUtils.TOLERANCE;
|
||||
x11 = StrictMath.max(shapeX[p], shapeX[p+1]) + GeoEncodingUtils.TOLERANCE;
|
||||
y10 = StrictMath.min(shapeY[p], shapeY[p+1]) - GeoEncodingUtils.TOLERANCE;
|
||||
y11 = StrictMath.max(shapeY[p], shapeY[p+1]) + GeoEncodingUtils.TOLERANCE;
|
||||
// check whether the intersection point is touching one of the line segments
|
||||
boolean touching = ((x00 == s && y00 == t) || (x01 == s && y01 == t))
|
||||
|| ((x10 == s && y10 == t) || (x11 == s && y11 == t));
|
||||
|
|
|
@ -18,8 +18,6 @@ package org.apache.lucene.spatial.util;
|
|||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.lucene.util.BitUtil;
|
||||
|
||||
import static java.lang.Math.max;
|
||||
import static java.lang.Math.min;
|
||||
import static java.lang.Math.PI;
|
||||
|
@ -30,6 +28,7 @@ import static org.apache.lucene.util.SloppyMath.cos;
|
|||
import static org.apache.lucene.util.SloppyMath.sin;
|
||||
import static org.apache.lucene.util.SloppyMath.TO_DEGREES;
|
||||
import static org.apache.lucene.util.SloppyMath.TO_RADIANS;
|
||||
import static org.apache.lucene.spatial.util.GeoEncodingUtils.TOLERANCE;
|
||||
import static org.apache.lucene.spatial.util.GeoProjectionUtils.MAX_LAT_RADIANS;
|
||||
import static org.apache.lucene.spatial.util.GeoProjectionUtils.MAX_LON_RADIANS;
|
||||
import static org.apache.lucene.spatial.util.GeoProjectionUtils.MIN_LAT_RADIANS;
|
||||
|
@ -43,13 +42,6 @@ import static org.apache.lucene.spatial.util.GeoProjectionUtils.SEMIMAJOR_AXIS;
|
|||
* @lucene.experimental
|
||||
*/
|
||||
public final class GeoUtils {
|
||||
/** number of bits used for quantizing latitude and longitude values */
|
||||
public static final short BITS = 31;
|
||||
private static final double LON_SCALE = (0x1L<<BITS)/360.0D;
|
||||
private static final double LAT_SCALE = (0x1L<<BITS)/180.0D;
|
||||
/** rounding error for quantized latitude and longitude values */
|
||||
public static final double TOLERANCE = 1E-6;
|
||||
|
||||
/** Minimum longitude value. */
|
||||
public static final double MIN_LON_INCL = -180.0D;
|
||||
|
||||
|
@ -66,44 +58,14 @@ public final class GeoUtils {
|
|||
private GeoUtils() {
|
||||
}
|
||||
|
||||
/**
|
||||
* encode longitude, latitude geopoint values using morton encoding method
|
||||
* https://en.wikipedia.org/wiki/Z-order_curve
|
||||
*/
|
||||
public static final Long mortonHash(final double lon, final double lat) {
|
||||
return BitUtil.interleave(scaleLon(lon), scaleLat(lat));
|
||||
/** validates latitude value is within standard +/-90 coordinate bounds */
|
||||
public static boolean isValidLat(double lat) {
|
||||
return Double.isNaN(lat) == false && lat >= MIN_LAT_INCL && lat <= MAX_LAT_INCL;
|
||||
}
|
||||
|
||||
/** decode longitude value from morton encoded geo point */
|
||||
public static final double mortonUnhashLon(final long hash) {
|
||||
return unscaleLon(BitUtil.deinterleave(hash));
|
||||
}
|
||||
|
||||
/** decode latitude value from morton encoded geo point */
|
||||
public static final double mortonUnhashLat(final long hash) {
|
||||
return unscaleLat(BitUtil.deinterleave(hash >>> 1));
|
||||
}
|
||||
|
||||
private static final long scaleLon(final double val) {
|
||||
return (long) ((val-MIN_LON_INCL) * LON_SCALE);
|
||||
}
|
||||
|
||||
private static final long scaleLat(final double val) {
|
||||
return (long) ((val-MIN_LAT_INCL) * LAT_SCALE);
|
||||
}
|
||||
|
||||
private static final double unscaleLon(final long val) {
|
||||
return (val / LON_SCALE) + MIN_LON_INCL;
|
||||
}
|
||||
|
||||
private static final double unscaleLat(final long val) {
|
||||
return (val / LAT_SCALE) + MIN_LAT_INCL;
|
||||
}
|
||||
|
||||
/** Compare two position values within a {@link GeoUtils#TOLERANCE} factor */
|
||||
public static double compare(final double v1, final double v2) {
|
||||
final double delta = v1-v2;
|
||||
return abs(delta) <= TOLERANCE ? 0 : delta;
|
||||
/** validates longitude value is within standard +/-180 coordinate bounds */
|
||||
public static boolean isValidLon(double lon) {
|
||||
return Double.isNaN(lon) == false && lon >= MIN_LON_INCL && lon <= MAX_LON_INCL;
|
||||
}
|
||||
|
||||
/** Puts longitude in range of -180 to +180. */
|
||||
|
@ -130,28 +92,15 @@ public final class GeoUtils {
|
|||
return (off <= 180 ? off : 360-off) - 90;
|
||||
}
|
||||
|
||||
/** Converts long value to bit string (useful for debugging) */
|
||||
public static String geoTermToString(long term) {
|
||||
StringBuilder s = new StringBuilder(64);
|
||||
final int numberOfLeadingZeros = Long.numberOfLeadingZeros(term);
|
||||
for (int i = 0; i < numberOfLeadingZeros; i++) {
|
||||
s.append('0');
|
||||
}
|
||||
if (term != 0) {
|
||||
s.append(Long.toBinaryString(term));
|
||||
}
|
||||
return s.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a given circle (defined as a point/radius) to an approximated line-segment polygon
|
||||
*
|
||||
* @param lon longitudinal center of circle (in degrees)
|
||||
* @param lat latitudinal center of circle (in degrees)
|
||||
* @param lon longitudinal center of circle (in degrees)
|
||||
* @param lat latitudinal center of circle (in degrees)
|
||||
* @param radiusMeters distance radius of circle (in meters)
|
||||
* @return a list of lon/lat points representing the circle
|
||||
*/
|
||||
@SuppressWarnings({"unchecked","rawtypes"})
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public static ArrayList<double[]> circleToPoly(final double lon, final double lat, final double radiusMeters) {
|
||||
double angle;
|
||||
// a little under-sampling (to limit the number of polygonal points): using archimedes estimation of pi
|
||||
|
@ -161,9 +110,9 @@ public final class GeoUtils {
|
|||
double[] lats = new double[sides];
|
||||
|
||||
double[] pt = new double[2];
|
||||
final int sidesLen = sides-1;
|
||||
for (int i=0; i<sidesLen; ++i) {
|
||||
angle = (i*360/sides);
|
||||
final int sidesLen = sides - 1;
|
||||
for (int i = 0; i < sidesLen; ++i) {
|
||||
angle = (i * 360 / sides);
|
||||
pt = pointFromLonLatBearingGreatCircle(lon, lat, angle, radiusMeters, pt);
|
||||
lons[i] = pt[0];
|
||||
lats[i] = pt[1];
|
||||
|
@ -235,14 +184,4 @@ public final class GeoUtils {
|
|||
return new GeoRect(max(minLon - TOLERANCE, MIN_LON_INCL), min(maxLon + TOLERANCE, MAX_LON_INCL),
|
||||
max(minLat - TOLERANCE, MIN_LAT_INCL), min(maxLat + TOLERANCE, MAX_LAT_INCL));
|
||||
}
|
||||
|
||||
/** validates latitude value is within standard +/-90 coordinate bounds */
|
||||
public static boolean isValidLat(double lat) {
|
||||
return Double.isNaN(lat) == false && lat >= MIN_LAT_INCL && lat <= MAX_LAT_INCL;
|
||||
}
|
||||
|
||||
/** validates longitude value is within standard +/-180 coordinate bounds */
|
||||
public static boolean isValidLon(double lon) {
|
||||
return Double.isNaN(lon) == false && lon >= MIN_LON_INCL && lon <= MAX_LON_INCL;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,9 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.apache.lucene.spatial.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.document.GeoPointField.TermEncoding;
|
||||
import org.apache.lucene.spatial.util.BaseGeoPointTestCase;
|
||||
import org.apache.lucene.spatial.util.GeoRect;
|
||||
import org.apache.lucene.spatial.util.GeoRelationUtils;
|
||||
|
@ -50,6 +52,8 @@ public class TestGeoPointQuery extends BaseGeoPointTestCase {
|
|||
private static Directory directory = null;
|
||||
private static IndexReader reader = null;
|
||||
private static IndexSearcher searcher = null;
|
||||
private static TermEncoding termEncoding = null;
|
||||
private static FieldType fieldType = null;
|
||||
|
||||
@Override
|
||||
protected boolean forceSmall() {
|
||||
|
@ -58,58 +62,58 @@ public class TestGeoPointQuery extends BaseGeoPointTestCase {
|
|||
|
||||
@Override
|
||||
protected void addPointToDoc(String field, Document doc, double lat, double lon) {
|
||||
doc.add(new GeoPointField(field, lon, lat, Field.Store.NO));
|
||||
doc.add(new GeoPointField(field, lon, lat, fieldType));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newRectQuery(String field, GeoRect rect) {
|
||||
return new GeoPointInBBoxQuery(field, rect.minLon, rect.minLat, rect.maxLon, rect.maxLat);
|
||||
return new GeoPointInBBoxQuery(field, termEncoding, rect.minLon, rect.minLat, rect.maxLon, rect.maxLat);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newDistanceQuery(String field, double centerLat, double centerLon, double radiusMeters) {
|
||||
return new GeoPointDistanceQuery(field, centerLon, centerLat, radiusMeters);
|
||||
return new GeoPointDistanceQuery(field, termEncoding, centerLon, centerLat, radiusMeters);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newDistanceRangeQuery(String field, double centerLat, double centerLon, double minRadiusMeters, double radiusMeters) {
|
||||
return new GeoPointDistanceRangeQuery(field, centerLon, centerLat, minRadiusMeters, radiusMeters);
|
||||
return new GeoPointDistanceRangeQuery(field, termEncoding, centerLon, centerLat, minRadiusMeters, radiusMeters);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newPolygonQuery(String field, double[] lats, double[] lons) {
|
||||
return new GeoPointInPolygonQuery(field, lons, lats);
|
||||
return new GeoPointInPolygonQuery(field, termEncoding, lons, lats);
|
||||
}
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
directory = newDirectory();
|
||||
termEncoding = TermEncoding.PREFIX;// randomTermEncoding();
|
||||
fieldType = randomFieldType();
|
||||
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), directory,
|
||||
newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000))
|
||||
.setMergePolicy(newLogMergePolicy()));
|
||||
|
||||
// create some simple geo points
|
||||
final FieldType storedPoint = new FieldType(GeoPointField.TYPE_STORED);
|
||||
// this is a simple systematic test
|
||||
GeoPointField[] pts = new GeoPointField[] {
|
||||
new GeoPointField(FIELD_NAME, -96.774, 32.763420, storedPoint),
|
||||
new GeoPointField(FIELD_NAME, -96.7759895324707, 32.7559529921407, storedPoint),
|
||||
new GeoPointField(FIELD_NAME, -96.77701950073242, 32.77866942010977, storedPoint),
|
||||
new GeoPointField(FIELD_NAME, -96.7706036567688, 32.7756745755423, storedPoint),
|
||||
new GeoPointField(FIELD_NAME, -139.73458170890808, 27.703618681345585, storedPoint),
|
||||
new GeoPointField(FIELD_NAME, -96.4538113027811, 32.94823588839368, storedPoint),
|
||||
new GeoPointField(FIELD_NAME, -96.65084838867188, 33.06047141970814, storedPoint),
|
||||
new GeoPointField(FIELD_NAME, -96.7772, 32.778650, storedPoint),
|
||||
new GeoPointField(FIELD_NAME, -177.23537676036358, -88.56029371730983, storedPoint),
|
||||
new GeoPointField(FIELD_NAME, -26.779373834241003, 33.541429799076354, storedPoint),
|
||||
new GeoPointField(FIELD_NAME, -77.35379276106497, 26.774024500421728, storedPoint),
|
||||
new GeoPointField(FIELD_NAME, -14.796283808944777, -90.0, storedPoint),
|
||||
new GeoPointField(FIELD_NAME, -178.8538113027811, 32.94823588839368, storedPoint),
|
||||
new GeoPointField(FIELD_NAME, 178.8538113027811, 32.94823588839368, storedPoint),
|
||||
new GeoPointField(FIELD_NAME, -73.998776, 40.720611, storedPoint),
|
||||
new GeoPointField(FIELD_NAME, -179.5, -44.5, storedPoint)};
|
||||
new GeoPointField(FIELD_NAME, -96.774, 32.763420, fieldType),
|
||||
new GeoPointField(FIELD_NAME, -96.7759895324707, 32.7559529921407, fieldType),
|
||||
new GeoPointField(FIELD_NAME, -96.77701950073242, 32.77866942010977, fieldType),
|
||||
new GeoPointField(FIELD_NAME, -96.7706036567688, 32.7756745755423, fieldType),
|
||||
new GeoPointField(FIELD_NAME, -139.73458170890808, 27.703618681345585, fieldType),
|
||||
new GeoPointField(FIELD_NAME, -96.4538113027811, 32.94823588839368, fieldType),
|
||||
new GeoPointField(FIELD_NAME, -96.65084838867188, 33.06047141970814, fieldType),
|
||||
new GeoPointField(FIELD_NAME, -96.7772, 32.778650, fieldType),
|
||||
new GeoPointField(FIELD_NAME, -177.23537676036358, -88.56029371730983, fieldType),
|
||||
new GeoPointField(FIELD_NAME, -26.779373834241003, 33.541429799076354, fieldType),
|
||||
new GeoPointField(FIELD_NAME, -77.35379276106497, 26.774024500421728, fieldType),
|
||||
new GeoPointField(FIELD_NAME, -14.796283808944777, -90.0, fieldType),
|
||||
new GeoPointField(FIELD_NAME, -178.8538113027811, 32.94823588839368, fieldType),
|
||||
new GeoPointField(FIELD_NAME, 178.8538113027811, 32.94823588839368, fieldType),
|
||||
new GeoPointField(FIELD_NAME, -73.998776, 40.720611, fieldType),
|
||||
new GeoPointField(FIELD_NAME, -179.5, -44.5, fieldType)};
|
||||
|
||||
for (GeoPointField p : pts) {
|
||||
Document doc = new Document();
|
||||
|
@ -146,27 +150,38 @@ public class TestGeoPointQuery extends BaseGeoPointTestCase {
|
|||
directory = null;
|
||||
}
|
||||
|
||||
private static TermEncoding randomTermEncoding() {
|
||||
return random().nextBoolean() ? TermEncoding.NUMERIC : TermEncoding.PREFIX;
|
||||
}
|
||||
|
||||
private static FieldType randomFieldType() {
|
||||
if (termEncoding == TermEncoding.PREFIX) {
|
||||
return GeoPointField.PREFIX_TYPE_NOT_STORED;
|
||||
}
|
||||
return GeoPointField.NUMERIC_TYPE_NOT_STORED;
|
||||
}
|
||||
|
||||
private TopDocs bboxQuery(double minLon, double minLat, double maxLon, double maxLat, int limit) throws Exception {
|
||||
GeoPointInBBoxQuery q = new GeoPointInBBoxQuery(FIELD_NAME, minLon, minLat, maxLon, maxLat);
|
||||
GeoPointInBBoxQuery q = new GeoPointInBBoxQuery(FIELD_NAME, termEncoding, minLon, minLat, maxLon, maxLat);
|
||||
return searcher.search(q, limit);
|
||||
}
|
||||
|
||||
private TopDocs polygonQuery(double[] lon, double[] lat, int limit) throws Exception {
|
||||
GeoPointInPolygonQuery q = new GeoPointInPolygonQuery(FIELD_NAME, lon, lat);
|
||||
GeoPointInPolygonQuery q = new GeoPointInPolygonQuery(FIELD_NAME, termEncoding, lon, lat);
|
||||
return searcher.search(q, limit);
|
||||
}
|
||||
|
||||
private TopDocs geoDistanceQuery(double lon, double lat, double radius, int limit) throws Exception {
|
||||
GeoPointDistanceQuery q = new GeoPointDistanceQuery(FIELD_NAME, lon, lat, radius);
|
||||
GeoPointDistanceQuery q = new GeoPointDistanceQuery(FIELD_NAME, termEncoding, lon, lat, radius);
|
||||
return searcher.search(q, limit);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean rectContainsPoint(GeoRect rect, double pointLat, double pointLon) {
|
||||
if (GeoUtils.compare(pointLon, rect.minLon) == 0.0 ||
|
||||
GeoUtils.compare(pointLon, rect.maxLon) == 0.0 ||
|
||||
GeoUtils.compare(pointLat, rect.minLat) == 0.0 ||
|
||||
GeoUtils.compare(pointLat, rect.maxLat) == 0.0) {
|
||||
if (GeoEncodingUtils.compare(pointLon, rect.minLon) == 0.0 ||
|
||||
GeoEncodingUtils.compare(pointLon, rect.maxLon) == 0.0 ||
|
||||
GeoEncodingUtils.compare(pointLat, rect.minLat) == 0.0 ||
|
||||
GeoEncodingUtils.compare(pointLat, rect.maxLat) == 0.0) {
|
||||
// Point is very close to rect boundary
|
||||
return null;
|
||||
}
|
||||
|
@ -207,12 +222,12 @@ public class TestGeoPointQuery extends BaseGeoPointTestCase {
|
|||
|
||||
private static boolean radiusQueryCanBeWrong(double centerLat, double centerLon, double ptLon, double ptLat,
|
||||
final double radius) {
|
||||
final long hashedCntr = GeoUtils.mortonHash(centerLon, centerLat);
|
||||
centerLon = GeoUtils.mortonUnhashLon(hashedCntr);
|
||||
centerLat = GeoUtils.mortonUnhashLat(hashedCntr);
|
||||
final long hashedPt = GeoUtils.mortonHash(ptLon, ptLat);
|
||||
ptLon = GeoUtils.mortonUnhashLon(hashedPt);
|
||||
ptLat = GeoUtils.mortonUnhashLat(hashedPt);
|
||||
final long hashedCntr = GeoEncodingUtils.mortonHash(centerLon, centerLat);
|
||||
centerLon = GeoEncodingUtils.mortonUnhashLon(hashedCntr);
|
||||
centerLat = GeoEncodingUtils.mortonUnhashLat(hashedCntr);
|
||||
final long hashedPt = GeoEncodingUtils.mortonHash(ptLon, ptLat);
|
||||
ptLon = GeoEncodingUtils.mortonUnhashLon(hashedPt);
|
||||
ptLat = GeoEncodingUtils.mortonUnhashLat(hashedPt);
|
||||
|
||||
double ptDistance = SloppyMath.haversin(centerLat, centerLon, ptLat, ptLon)*1000.0;
|
||||
double delta = StrictMath.abs(ptDistance - radius);
|
||||
|
@ -227,7 +242,7 @@ public class TestGeoPointQuery extends BaseGeoPointTestCase {
|
|||
|
||||
private TopDocs geoDistanceRangeQuery(double lon, double lat, double minRadius, double maxRadius, int limit)
|
||||
throws Exception {
|
||||
GeoPointDistanceRangeQuery q = new GeoPointDistanceRangeQuery(FIELD_NAME, lon, lat, minRadius, maxRadius);
|
||||
GeoPointDistanceRangeQuery q = new GeoPointDistanceRangeQuery(FIELD_NAME, termEncoding, lon, lat, minRadius, maxRadius);
|
||||
return searcher.search(q, limit);
|
||||
}
|
||||
|
||||
|
@ -347,9 +362,9 @@ public class TestGeoPointQuery extends BaseGeoPointTestCase {
|
|||
}
|
||||
|
||||
public void testMortonEncoding() throws Exception {
|
||||
long hash = GeoUtils.mortonHash(180, 90);
|
||||
assertEquals(180.0, GeoUtils.mortonUnhashLon(hash), 0);
|
||||
assertEquals(90.0, GeoUtils.mortonUnhashLat(hash), 0);
|
||||
long hash = GeoEncodingUtils.mortonHash(180, 90);
|
||||
assertEquals(180.0, GeoEncodingUtils.mortonUnhashLon(hash), 0);
|
||||
assertEquals(90.0, GeoEncodingUtils.mortonUnhashLat(hash), 0);
|
||||
}
|
||||
|
||||
public void testEncodeDecode() throws Exception {
|
||||
|
@ -359,12 +374,12 @@ public class TestGeoPointQuery extends BaseGeoPointTestCase {
|
|||
double lat = randomLat(small);
|
||||
double lon = randomLon(small);
|
||||
|
||||
long enc = GeoUtils.mortonHash(lon, lat);
|
||||
double latEnc = GeoUtils.mortonUnhashLat(enc);
|
||||
double lonEnc = GeoUtils.mortonUnhashLon(enc);
|
||||
long enc = GeoEncodingUtils.mortonHash(lon, lat);
|
||||
double latEnc = GeoEncodingUtils.mortonUnhashLat(enc);
|
||||
double lonEnc = GeoEncodingUtils.mortonUnhashLon(enc);
|
||||
|
||||
assertEquals("lat=" + lat + " latEnc=" + latEnc + " diff=" + (lat - latEnc), lat, latEnc, GeoUtils.TOLERANCE);
|
||||
assertEquals("lon=" + lon + " lonEnc=" + lonEnc + " diff=" + (lon - lonEnc), lon, lonEnc, GeoUtils.TOLERANCE);
|
||||
assertEquals("lat=" + lat + " latEnc=" + latEnc + " diff=" + (lat - latEnc), lat, latEnc, GeoEncodingUtils.TOLERANCE);
|
||||
assertEquals("lon=" + lon + " lonEnc=" + lonEnc + " diff=" + (lon - lonEnc), lon, lonEnc, GeoEncodingUtils.TOLERANCE);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -375,13 +390,13 @@ public class TestGeoPointQuery extends BaseGeoPointTestCase {
|
|||
double lat = randomLat(small);
|
||||
double lon = randomLon(small);
|
||||
|
||||
long enc = GeoUtils.mortonHash(lon, lat);
|
||||
double latEnc = GeoUtils.mortonUnhashLat(enc);
|
||||
double lonEnc = GeoUtils.mortonUnhashLon(enc);
|
||||
long enc = GeoEncodingUtils.mortonHash(lon, lat);
|
||||
double latEnc = GeoEncodingUtils.mortonUnhashLat(enc);
|
||||
double lonEnc = GeoEncodingUtils.mortonUnhashLon(enc);
|
||||
|
||||
long enc2 = GeoUtils.mortonHash(lon, lat);
|
||||
double latEnc2 = GeoUtils.mortonUnhashLat(enc2);
|
||||
double lonEnc2 = GeoUtils.mortonUnhashLon(enc2);
|
||||
long enc2 = GeoEncodingUtils.mortonHash(lon, lat);
|
||||
double latEnc2 = GeoEncodingUtils.mortonUnhashLat(enc2);
|
||||
double lonEnc2 = GeoEncodingUtils.mortonUnhashLon(enc2);
|
||||
assertEquals(latEnc, latEnc2, 0.0);
|
||||
assertEquals(lonEnc, lonEnc2, 0.0);
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue