mirror of https://github.com/apache/lucene.git
Merge remote-tracking branch 'origin/master'
This commit is contained in:
commit
79b62ee731
|
@ -31,6 +31,7 @@
|
|||
<buildFile url="file://$PROJECT_DIR$/lucene/replicator/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/lucene/sandbox/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/lucene/spatial/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/lucene/spatial-extras/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/lucene/suggest/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/lucene/test-framework/build.xml" />
|
||||
<buildFile url="file://$PROJECT_DIR$/lucene/tools/build.xml" />
|
||||
|
|
|
@ -36,6 +36,7 @@
|
|||
<module group="Lucene/Other" filepath="$PROJECT_DIR$/lucene/replicator/replicator.iml" />
|
||||
<module group="Lucene/Other" filepath="$PROJECT_DIR$/lucene/sandbox/sandbox.iml" />
|
||||
<module group="Lucene/Other" filepath="$PROJECT_DIR$/lucene/spatial/spatial.iml" />
|
||||
<module group="Lucene/Other" filepath="$PROJECT_DIR$/lucene/spatial-extras/spatial-extras.iml" />
|
||||
<module group="Lucene/Other" filepath="$PROJECT_DIR$/lucene/spatial3d/spatial3d.iml" />
|
||||
<module group="Lucene/Other" filepath="$PROJECT_DIR$/lucene/suggest/suggest.iml" />
|
||||
<module group="Lucene/Other" filepath="$PROJECT_DIR$/lucene/tools/tools.iml" />
|
||||
|
|
|
@ -204,6 +204,14 @@
|
|||
<option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
|
||||
<patterns><pattern testClass=".*\.Test[^.]*|.*\.[^.]*Test" /></patterns>
|
||||
</configuration>
|
||||
<configuration default="false" name="Module spatial-extras" type="JUnit" factoryName="JUnit">
|
||||
<module name="spatial-extras" />
|
||||
<option name="TEST_OBJECT" value="pattern" />
|
||||
<option name="WORKING_DIRECTORY" value="file://$PROJECT_DIR$/idea-build/lucene/spatial-extras" />
|
||||
<option name="VM_PARAMETERS" value="-ea -DtempDir=temp" />
|
||||
<option name="TEST_SEARCH_SCOPE"><value defaultName="singleModule" /></option>
|
||||
<patterns><pattern testClass=".*\.Test[^.]*|.*\.[^.]*Test" /></patterns>
|
||||
</configuration>
|
||||
<configuration default="false" name="Module spatial3d" type="JUnit" factoryName="JUnit">
|
||||
<module name="spatial3d" />
|
||||
<option name="TEST_OBJECT" value="pattern" />
|
||||
|
@ -333,7 +341,7 @@
|
|||
<patterns><pattern testClass=".*\.Test[^.]*|.*\.[^.]*Test" /></patterns>
|
||||
</configuration>
|
||||
|
||||
<list size="40">
|
||||
<list size="41">
|
||||
<item index="0" class="java.lang.String" itemvalue="JUnit.Lucene core" />
|
||||
<item index="1" class="java.lang.String" itemvalue="JUnit.Module analyzers-common" />
|
||||
<item index="2" class="java.lang.String" itemvalue="JUnit.Module analyzers-icu" />
|
||||
|
@ -359,21 +367,22 @@
|
|||
<item index="22" class="java.lang.String" itemvalue="JUnit.Module replicator" />
|
||||
<item index="23" class="java.lang.String" itemvalue="JUnit.Module sandbox" />
|
||||
<item index="24" class="java.lang.String" itemvalue="JUnit.Module spatial" />
|
||||
<item index="25" class="java.lang.String" itemvalue="JUnit.Module spatial3d" />
|
||||
<item index="26" class="java.lang.String" itemvalue="JUnit.Module suggest" />
|
||||
<item index="27" class="java.lang.String" itemvalue="JUnit.Solr core" />
|
||||
<item index="28" class="java.lang.String" itemvalue="JUnit.Solr analysis-extras contrib" />
|
||||
<item index="29" class="java.lang.String" itemvalue="JUnit.Solr clustering contrib" />
|
||||
<item index="30" class="java.lang.String" itemvalue="JUnit.Solr dataimporthandler contrib" />
|
||||
<item index="31" class="java.lang.String" itemvalue="JUnit.Solr dataimporthandler-extras contrib" />
|
||||
<item index="32" class="java.lang.String" itemvalue="JUnit.Solr extraction contrib" />
|
||||
<item index="33" class="java.lang.String" itemvalue="JUnit.Solr map-reduce contrib" />
|
||||
<item index="34" class="java.lang.String" itemvalue="JUnit.Solr morphlines-cell contrib" />
|
||||
<item index="35" class="java.lang.String" itemvalue="JUnit.Solr morphlines-core contrib" />
|
||||
<item index="36" class="java.lang.String" itemvalue="JUnit.Solr langid contrib" />
|
||||
<item index="37" class="java.lang.String" itemvalue="JUnit.Solr uima contrib" />
|
||||
<item index="38" class="java.lang.String" itemvalue="JUnit.Solr velocity contrib" />
|
||||
<item index="39" class="java.lang.String" itemvalue="JUnit.Solrj" />
|
||||
<item index="25" class="java.lang.String" itemvalue="JUnit.Module spatial-extras" />
|
||||
<item index="26" class="java.lang.String" itemvalue="JUnit.Module spatial3d" />
|
||||
<item index="27" class="java.lang.String" itemvalue="JUnit.Module suggest" />
|
||||
<item index="28" class="java.lang.String" itemvalue="JUnit.Solr core" />
|
||||
<item index="29" class="java.lang.String" itemvalue="JUnit.Solr analysis-extras contrib" />
|
||||
<item index="30" class="java.lang.String" itemvalue="JUnit.Solr clustering contrib" />
|
||||
<item index="31" class="java.lang.String" itemvalue="JUnit.Solr dataimporthandler contrib" />
|
||||
<item index="32" class="java.lang.String" itemvalue="JUnit.Solr dataimporthandler-extras contrib" />
|
||||
<item index="33" class="java.lang.String" itemvalue="JUnit.Solr extraction contrib" />
|
||||
<item index="34" class="java.lang.String" itemvalue="JUnit.Solr map-reduce contrib" />
|
||||
<item index="35" class="java.lang.String" itemvalue="JUnit.Solr morphlines-cell contrib" />
|
||||
<item index="36" class="java.lang.String" itemvalue="JUnit.Solr morphlines-core contrib" />
|
||||
<item index="37" class="java.lang.String" itemvalue="JUnit.Solr langid contrib" />
|
||||
<item index="38" class="java.lang.String" itemvalue="JUnit.Solr uima contrib" />
|
||||
<item index="39" class="java.lang.String" itemvalue="JUnit.Solr velocity contrib" />
|
||||
<item index="40" class="java.lang.String" itemvalue="JUnit.Solrj" />
|
||||
</list>
|
||||
</component>
|
||||
</project>
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" scope="TEST" module-name="lucene-test-framework" />
|
||||
<orderEntry type="module" scope="TEST" module-name="benchmark-conf" />
|
||||
<orderEntry type="module" module-name="spatial" />
|
||||
<orderEntry type="module" module-name="spatial-extras" />
|
||||
<orderEntry type="module" module-name="facet" />
|
||||
<orderEntry type="module" module-name="highlighter" />
|
||||
<orderEntry type="module" module-name="icu" />
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="JAVA_MODULE" version="4">
|
||||
<component name="NewModuleRootManager" inherit-compiler-output="false">
|
||||
<output url="file://$MODULE_DIR$/../../idea-build/lucene/spatial-extras/classes/java" />
|
||||
<output-test url="file://$MODULE_DIR$/../../idea-build/lucene/spatial-extras/classes/test" />
|
||||
<exclude-output />
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/src/java" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/src/test" isTestSource="true" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/src/test-files" type="java-test-resource" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="module-library" exported="">
|
||||
<library>
|
||||
<CLASSES>
|
||||
<root url="file://$MODULE_DIR$/lib" />
|
||||
</CLASSES>
|
||||
<JAVADOC />
|
||||
<SOURCES />
|
||||
<jarDirectory url="file://$MODULE_DIR$/lib" recursive="false" />
|
||||
</library>
|
||||
</orderEntry>
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" scope="TEST" module-name="lucene-test-framework" />
|
||||
<orderEntry type="module" module-name="lucene-core" />
|
||||
<orderEntry type="module" module-name="queries" />
|
||||
<orderEntry type="module" module-name="misc" />
|
||||
<orderEntry type="module" module-name="spatial3d" />
|
||||
<orderEntry type="module" module-name="analysis-common" scope="TEST"/>
|
||||
</component>
|
||||
</module>
|
|
@ -7,27 +7,12 @@
|
|||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/src/java" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/src/test" isTestSource="true" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/src/test-files" type="java-test-resource" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="module-library" exported="">
|
||||
<library>
|
||||
<CLASSES>
|
||||
<root url="file://$MODULE_DIR$/lib" />
|
||||
</CLASSES>
|
||||
<JAVADOC />
|
||||
<SOURCES />
|
||||
<jarDirectory url="file://$MODULE_DIR$/lib" recursive="false" />
|
||||
</library>
|
||||
</orderEntry>
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" scope="TEST" module-name="lucene-test-framework" />
|
||||
<orderEntry type="module" module-name="lucene-core" />
|
||||
<orderEntry type="module" module-name="queries" />
|
||||
<orderEntry type="module" module-name="misc" />
|
||||
<orderEntry type="module" module-name="spatial3d" />
|
||||
<orderEntry type="module" module-name="analysis-common" scope="TEST"/>
|
||||
</component>
|
||||
</module>
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
<orderEntry type="library" name="Solr example library" level="project" />
|
||||
<orderEntry type="module" module-name="solrj" />
|
||||
<orderEntry type="module" module-name="kuromoji" />
|
||||
<orderEntry type="module" module-name="spatial" />
|
||||
<orderEntry type="module" module-name="spatial-extras" />
|
||||
<orderEntry type="module" module-name="grouping" />
|
||||
<orderEntry type="module" module-name="highlighter" />
|
||||
<orderEntry type="module" module-name="icu" />
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
<orderEntry type="module" scope="TEST" module-name="queryparser" />
|
||||
<orderEntry type="module" scope="TEST" module-name="queries" />
|
||||
<orderEntry type="module" scope="TEST" module-name="suggest" />
|
||||
<orderEntry type="module" scope="TEST" module-name="spatial" />
|
||||
<orderEntry type="module" scope="TEST" module-name="spatial-extras" />
|
||||
<orderEntry type="module" scope="TEST" module-name="misc" />
|
||||
<orderEntry type="module" scope="TEST" module-name="join" />
|
||||
<orderEntry type="module" scope="TEST" module-name="expressions" />
|
||||
|
|
|
@ -60,6 +60,7 @@
|
|||
<module>replicator</module>
|
||||
<module>sandbox</module>
|
||||
<module>spatial</module>
|
||||
<module>spatial-extras</module>
|
||||
<module>spatial3d</module>
|
||||
<module>suggest</module>
|
||||
</modules>
|
||||
|
|
|
@ -0,0 +1,62 @@
|
|||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-parent</artifactId>
|
||||
<version>@version@</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-spatial-extras</artifactId>
|
||||
<packaging>jar</packaging>
|
||||
<name>Lucene Spatial Extras</name>
|
||||
<description>
|
||||
Advanced Spatial Shape Strategies for Apache Lucene
|
||||
</description>
|
||||
<properties>
|
||||
<module-directory>lucene/spatial-extras</module-directory>
|
||||
<relative-top-level>../../..</relative-top-level>
|
||||
<module-path>${relative-top-level}/${module-directory}</module-path>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<!-- lucene-test-framework dependency must be declared before lucene-core -->
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-test-framework</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
@lucene-spatial-extras.internal.dependencies@
|
||||
@lucene-spatial-extras.external.dependencies@
|
||||
@lucene-spatial-extras.internal.test.dependencies@
|
||||
@lucene-spatial-extras.external.test.dependencies@
|
||||
</dependencies>
|
||||
<build>
|
||||
<sourceDirectory>${module-path}/src/java</sourceDirectory>
|
||||
<testSourceDirectory>${module-path}/src/test</testSourceDirectory>
|
||||
<testResources>
|
||||
<testResource>
|
||||
<directory>${module-path}/src/test-files</directory>
|
||||
</testResource>
|
||||
</testResources>
|
||||
</build>
|
||||
</project>
|
|
@ -31,7 +31,7 @@
|
|||
<packaging>jar</packaging>
|
||||
<name>Lucene Spatial</name>
|
||||
<description>
|
||||
Spatial Strategies for Apache Lucene
|
||||
Geospatial Indexing and Query for Apache Lucene
|
||||
</description>
|
||||
<properties>
|
||||
<module-directory>lucene/spatial</module-directory>
|
||||
|
|
|
@ -643,7 +643,7 @@ def verifyUnpacked(java, project, artifact, unpackPath, gitRevision, version, te
|
|||
|
||||
if project == 'lucene':
|
||||
# TODO: clean this up to not be a list of modules that we must maintain
|
||||
extras = ('analysis', 'backward-codecs', 'benchmark', 'classification', 'codecs', 'core', 'demo', 'docs', 'expressions', 'facet', 'grouping', 'highlighter', 'join', 'memory', 'misc', 'queries', 'queryparser', 'replicator', 'sandbox', 'spatial', 'spatial3d', 'suggest', 'test-framework', 'licenses')
|
||||
extras = ('analysis', 'backward-codecs', 'benchmark', 'classification', 'codecs', 'core', 'demo', 'docs', 'expressions', 'facet', 'grouping', 'highlighter', 'join', 'memory', 'misc', 'queries', 'queryparser', 'replicator', 'sandbox', 'spatial', 'spatial-extras', 'spatial3d', 'suggest', 'test-framework', 'licenses')
|
||||
if isSrc:
|
||||
extras += ('build.xml', 'common-build.xml', 'module-build.xml', 'ivy-settings.xml', 'ivy-versions.properties', 'ivy-ignore-conflicts.properties', 'version.properties', 'tools', 'site')
|
||||
else:
|
||||
|
|
|
@ -98,12 +98,18 @@ API Changes
|
|||
* LUCENE-6917: Deprecate and rename NumericXXX classes to
|
||||
LegacyNumericXXX in favor of points (Mike McCandless)
|
||||
|
||||
* LUCENE-6947: SortField.missingValue is now protected. You can read its value
|
||||
using the new SortField.getMissingValue getter. (Adrien Grand)
|
||||
* LUCENE-6947: SortField.missingValue is now protected. You can read its
|
||||
value using the new SortField.getMissingValue getter. (Adrien Grand)
|
||||
|
||||
* LUCENE-7028: Remove duplicate method in LegacyNumericUtils.
|
||||
(Uwe Schindler)
|
||||
|
||||
* LUCENE-7052, LUCENE-7053: Remove custom comparators from BytesRef
|
||||
class and solely use natural byte[] comparator throughout codebase.
|
||||
This also simplifies API of BytesRefHash. It also replaces the natural
|
||||
comparator in ArrayUtil by Java 8's Comparator#naturalOrder().
|
||||
(Mike McCandless, Uwe Schindler, Robert Muir)
|
||||
|
||||
Optimizations
|
||||
|
||||
* LUCENE-6891: Use prefix coding when writing points in
|
||||
|
@ -115,6 +121,12 @@ Optimizations
|
|||
merging to merge sort the already sorted segments instead of
|
||||
re-indexing (Mike McCandless)
|
||||
|
||||
* LUCENE-6793: LegacyNumericRangeQuery.hashCode() is now less subject to hash
|
||||
collisions. (J.B. Langston via Adrien Grand)
|
||||
|
||||
* LUCENE-7050: TermsQuery is now cached more aggressively by the default
|
||||
query caching policy. (Adrien Grand)
|
||||
|
||||
Changes in Runtime Behavior
|
||||
|
||||
* LUCENE-6789: IndexSearcher's default Similarity is changed to BM25Similarity.
|
||||
|
|
|
@ -205,7 +205,7 @@ public final class StemmerOverrideFilter extends TokenFilter {
|
|||
ByteSequenceOutputs outputs = ByteSequenceOutputs.getSingleton();
|
||||
org.apache.lucene.util.fst.Builder<BytesRef> builder = new org.apache.lucene.util.fst.Builder<>(
|
||||
FST.INPUT_TYPE.BYTE4, outputs);
|
||||
final int[] sort = hash.sort(BytesRef.getUTF8SortedAsUnicodeComparator());
|
||||
final int[] sort = hash.sort();
|
||||
IntsRefBuilder intsSpare = new IntsRefBuilder();
|
||||
final int size = hash.size();
|
||||
BytesRef spare = new BytesRef();
|
||||
|
|
|
@ -171,7 +171,7 @@
|
|||
<pathelement path="${analyzers-common.jar}"/>
|
||||
<pathelement path="${queryparser.jar}"/>
|
||||
<pathelement path="${facet.jar}"/>
|
||||
<pathelement path="${spatial.jar}"/>
|
||||
<pathelement path="${spatial-extras.jar}"/>
|
||||
<pathelement path="${queries.jar}"/>
|
||||
<pathelement path="${codecs.jar}"/>
|
||||
<pathelement path="${join.jar}"/>
|
||||
|
@ -185,7 +185,7 @@
|
|||
</path>
|
||||
|
||||
<target name="javadocs" depends="javadocs-memory,javadocs-highlighter,javadocs-analyzers-common,
|
||||
javadocs-queryparser,javadocs-facet,javadocs-spatial,compile-core,check-javadocs-uptodate"
|
||||
javadocs-queryparser,javadocs-facet,javadocs-spatial-extras,compile-core,check-javadocs-uptodate"
|
||||
unless="javadocs-uptodate-${name}">
|
||||
<invoke-module-javadoc>
|
||||
<links>
|
||||
|
@ -194,7 +194,7 @@
|
|||
<link href="../analyzers-common"/>
|
||||
<link href="../queryparser"/>
|
||||
<link href="../facet"/>
|
||||
<link href="../spatial"/>
|
||||
<link href="../spatial-extras"/>
|
||||
</links>
|
||||
</invoke-module-javadoc>
|
||||
</target>
|
||||
|
@ -277,7 +277,7 @@
|
|||
<echo>Benchmark output in JIRA table format is in file: ${shingle.jira.output.file}</echo>
|
||||
</target>
|
||||
|
||||
<target name="init" depends="module-build.init,jar-memory,jar-highlighter,jar-analyzers-common,jar-queryparser,jar-facet,jar-spatial,jar-codecs,jar-join"/>
|
||||
<target name="init" depends="module-build.init,jar-memory,jar-highlighter,jar-analyzers-common,jar-queryparser,jar-facet,jar-spatial-extras,jar-codecs,jar-join"/>
|
||||
|
||||
<target name="compile-test" depends="copy-alg-files-for-testing,module-build.compile-test"/>
|
||||
<target name="copy-alg-files-for-testing" description="copy .alg files as resources for testing">
|
||||
|
|
|
@ -179,7 +179,8 @@
|
|||
<!-- queries: problems -->
|
||||
<!-- queryparser: problems -->
|
||||
<!-- sandbox: problems -->
|
||||
<!-- spatial: problems -->
|
||||
<check-missing-javadocs dir="build/docs/spatial" level="method"/>
|
||||
<!-- spatial-extras: problems -->
|
||||
<check-missing-javadocs dir="build/docs/suggest" level="method"/>
|
||||
<!-- test-framework: problems -->
|
||||
|
||||
|
|
|
@ -372,7 +372,7 @@ public class BlockTermsReader extends FieldsProducer {
|
|||
// is after current term but before next index term:
|
||||
if (indexIsCurrent) {
|
||||
|
||||
final int cmp = BytesRef.getUTF8SortedAsUnicodeComparator().compare(term.get(), target);
|
||||
final int cmp = term.get().compareTo(target);
|
||||
|
||||
if (cmp == 0) {
|
||||
// Already at the requested term
|
||||
|
@ -390,7 +390,7 @@ public class BlockTermsReader extends FieldsProducer {
|
|||
didIndexNext = true;
|
||||
}
|
||||
|
||||
if (nextIndexTerm == null || BytesRef.getUTF8SortedAsUnicodeComparator().compare(target, nextIndexTerm) < 0) {
|
||||
if (nextIndexTerm == null || target.compareTo(nextIndexTerm) < 0) {
|
||||
// Optimization: requested term is within the
|
||||
// same term block we are now in; skip seeking
|
||||
// (but do scanning):
|
||||
|
|
|
@ -112,7 +112,7 @@ class SortedDocValuesWriter extends DocValuesWriter {
|
|||
final int valueCount = hash.size();
|
||||
final PackedLongValues ords = pending.build();
|
||||
|
||||
final int[] sortedValues = hash.sort(BytesRef.getUTF8SortedAsUnicodeComparator());
|
||||
final int[] sortedValues = hash.sort();
|
||||
final int[] ordMap = new int[valueCount];
|
||||
|
||||
for(int ord=0;ord<valueCount;ord++) {
|
||||
|
|
|
@ -152,7 +152,7 @@ class SortedSetDocValuesWriter extends DocValuesWriter {
|
|||
final PackedLongValues ords = pending.build();
|
||||
final PackedLongValues ordCounts = pendingCounts.build();
|
||||
|
||||
final int[] sortedValues = hash.sort(BytesRef.getUTF8SortedAsUnicodeComparator());
|
||||
final int[] sortedValues = hash.sort();
|
||||
final int[] ordMap = new int[valueCount];
|
||||
|
||||
for(int ord=0;ord<valueCount;ord++) {
|
||||
|
|
|
@ -93,7 +93,7 @@ abstract class TermsHashPerField implements Comparable<TermsHashPerField> {
|
|||
/** Collapse the hash table and sort in-place; also sets
|
||||
* this.sortedTermIDs to the results */
|
||||
public int[] sortPostings() {
|
||||
sortedTermIDs = bytesHash.sort(BytesRef.getUTF8SortedAsUnicodeComparator());
|
||||
sortedTermIDs = bytesHash.sort();
|
||||
return sortedTermIDs;
|
||||
}
|
||||
|
||||
|
|
|
@ -59,9 +59,6 @@ public class FuzzyTermsEnum extends TermsEnum {
|
|||
|
||||
private float bottom;
|
||||
private BytesRef bottomTerm;
|
||||
|
||||
// TODO: chicken-and-egg
|
||||
private final Comparator<BytesRef> termComparator = BytesRef.getUTF8SortedAsUnicodeComparator();
|
||||
|
||||
protected final float minSimilarity;
|
||||
protected final float scale_factor;
|
||||
|
@ -193,7 +190,7 @@ public class FuzzyTermsEnum extends TermsEnum {
|
|||
int oldMaxEdits = maxEdits;
|
||||
|
||||
// true if the last term encountered is lexicographically equal or after the bottom term in the PQ
|
||||
boolean termAfter = bottomTerm == null || (lastTerm != null && termComparator.compare(lastTerm, bottomTerm) >= 0);
|
||||
boolean termAfter = bottomTerm == null || (lastTerm != null && lastTerm.compareTo(bottomTerm) >= 0);
|
||||
|
||||
// as long as the max non-competitive boost is >= the max boost
|
||||
// for some edit distance, keep dropping the max edit distance.
|
||||
|
|
|
@ -348,12 +348,12 @@ public final class LegacyNumericRangeQuery<T extends Number> extends MultiTermQu
|
|||
@Override
|
||||
public final int hashCode() {
|
||||
int hash = super.hashCode();
|
||||
hash += precisionStep^0x64365465;
|
||||
if (min != null) hash += min.hashCode()^0x14fa55fb;
|
||||
if (max != null) hash += max.hashCode()^0x733fa5fe;
|
||||
return hash +
|
||||
(Boolean.valueOf(minInclusive).hashCode()^0x14fa55fb)+
|
||||
(Boolean.valueOf(maxInclusive).hashCode()^0x733fa5fe);
|
||||
hash = 31 * hash + precisionStep;
|
||||
hash = 31 * hash + Objects.hashCode(min);
|
||||
hash = 31 * hash + Objects.hashCode(max);
|
||||
hash = 31 * hash + Objects.hashCode(minInclusive);
|
||||
hash = 31 * hash + Objects.hashCode(maxInclusive);
|
||||
return hash;
|
||||
}
|
||||
|
||||
// members (package private, to be also fast accessible by NumericRangeTermEnum)
|
||||
|
|
|
@ -19,11 +19,13 @@ package org.apache.lucene.search;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.lucene.document.BinaryPoint;
|
||||
import org.apache.lucene.document.DoublePoint;
|
||||
import org.apache.lucene.document.FloatPoint;
|
||||
import org.apache.lucene.document.IntPoint;
|
||||
import org.apache.lucene.document.LongPoint;
|
||||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
||||
|
@ -35,14 +37,27 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.BytesRefIterator;
|
||||
import org.apache.lucene.util.DocIdSetBuilder;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
|
||||
/** Finds all documents whose point value, previously indexed with e.g. {@link org.apache.lucene.document.LongPoint}, is contained in the
|
||||
* specified set */
|
||||
/**
|
||||
* Abstract query class to find all documents whose single or multi-dimensional point values, previously indexed with e.g. {@link IntPoint},
|
||||
* is contained in the specified set.
|
||||
*
|
||||
* <p>
|
||||
* This is for subclasses and works on the underlying binary encoding: to
|
||||
* create range queries for lucene's standard {@code Point} types, refer to factory
|
||||
* methods on those classes, e.g. {@link IntPoint#newSetQuery IntPoint.newSetQuery()} for
|
||||
* fields indexed with {@link IntPoint}.
|
||||
|
||||
public class PointInSetQuery extends Query {
|
||||
* @see IntPoint
|
||||
* @see LongPoint
|
||||
* @see FloatPoint
|
||||
* @see DoublePoint
|
||||
* @see BinaryPoint
|
||||
*
|
||||
* @lucene.experimental */
|
||||
|
||||
public abstract class PointInSetQuery extends Query {
|
||||
// A little bit overkill for us, since all of our "terms" are always in the same field:
|
||||
final PrefixCodedTerms sortedPackedPoints;
|
||||
final int sortedPackedPointsHashCode;
|
||||
|
@ -119,18 +134,16 @@ public class PointInSetQuery extends Query {
|
|||
|
||||
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc());
|
||||
|
||||
int[] hitCount = new int[1];
|
||||
|
||||
if (numDims == 1) {
|
||||
|
||||
// We optimize this common case, effectively doing a merge sort of the indexed values vs the queried set:
|
||||
values.intersect(field, new MergePointVisitor(sortedPackedPoints.iterator(), hitCount, result));
|
||||
values.intersect(field, new MergePointVisitor(sortedPackedPoints, result));
|
||||
|
||||
} else {
|
||||
// NOTE: this is naive implementation, where for each point we re-walk the KD tree to intersect. We could instead do a similar
|
||||
// optimization as the 1D case, but I think it'd mean building a query-time KD tree so we could efficiently intersect against the
|
||||
// index, which is probably tricky!
|
||||
SinglePointVisitor visitor = new SinglePointVisitor(hitCount, result);
|
||||
SinglePointVisitor visitor = new SinglePointVisitor(result);
|
||||
TermIterator iterator = sortedPackedPoints.iterator();
|
||||
for (BytesRef point = iterator.next(); point != null; point = iterator.next()) {
|
||||
visitor.setPoint(point);
|
||||
|
@ -138,8 +151,7 @@ public class PointInSetQuery extends Query {
|
|||
}
|
||||
}
|
||||
|
||||
// NOTE: hitCount[0] will be over-estimate in multi-valued case
|
||||
return new ConstantScoreScorer(this, score(), result.build(hitCount[0]).iterator());
|
||||
return new ConstantScoreScorer(this, score(), result.build().iterator());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -149,17 +161,23 @@ public class PointInSetQuery extends Query {
|
|||
private class MergePointVisitor implements IntersectVisitor {
|
||||
|
||||
private final DocIdSetBuilder result;
|
||||
private final int[] hitCount;
|
||||
private final TermIterator iterator;
|
||||
private TermIterator iterator;
|
||||
private BytesRef nextQueryPoint;
|
||||
private final byte[] lastMaxPackedValue;
|
||||
private final BytesRef scratch = new BytesRef();
|
||||
private final PrefixCodedTerms sortedPackedPoints;
|
||||
|
||||
public MergePointVisitor(TermIterator iterator, int[] hitCount, DocIdSetBuilder result) throws IOException {
|
||||
this.hitCount = hitCount;
|
||||
public MergePointVisitor(PrefixCodedTerms sortedPackedPoints, DocIdSetBuilder result) throws IOException {
|
||||
this.result = result;
|
||||
this.iterator = iterator;
|
||||
nextQueryPoint = iterator.next();
|
||||
this.sortedPackedPoints = sortedPackedPoints;
|
||||
lastMaxPackedValue = new byte[bytesPerDim];
|
||||
scratch.length = bytesPerDim;
|
||||
resetIterator();
|
||||
}
|
||||
|
||||
private void resetIterator() {
|
||||
this.iterator = sortedPackedPoints.iterator();
|
||||
nextQueryPoint = iterator.next();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -169,7 +187,6 @@ public class PointInSetQuery extends Query {
|
|||
|
||||
@Override
|
||||
public void visit(int docID) {
|
||||
hitCount[0]++;
|
||||
result.add(docID);
|
||||
}
|
||||
|
||||
|
@ -180,7 +197,6 @@ public class PointInSetQuery extends Query {
|
|||
int cmp = nextQueryPoint.compareTo(scratch);
|
||||
if (cmp == 0) {
|
||||
// Query point equals index point, so collect and return
|
||||
hitCount[0]++;
|
||||
result.add(docID);
|
||||
break;
|
||||
} else if (cmp < 0) {
|
||||
|
@ -195,6 +211,14 @@ public class PointInSetQuery extends Query {
|
|||
|
||||
@Override
|
||||
public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
|
||||
|
||||
// NOTE: this is messy ... we need it in cases where a single vistor (us) is shared across multiple leaf readers
|
||||
// (e.g. SlowCompositeReaderWrapper), in which case we need to reset our iterator to re-start the merge sort. Maybe we should instead
|
||||
// add an explicit .start() to IntersectVisitor, and clarify the semantics that in the 1D case all cells will be visited in order?
|
||||
if (StringHelper.compare(bytesPerDim, lastMaxPackedValue, 0, minPackedValue, 0) > 0) {
|
||||
resetIterator();
|
||||
}
|
||||
System.arraycopy(maxPackedValue, 0, lastMaxPackedValue, 0, bytesPerDim);
|
||||
|
||||
while (nextQueryPoint != null) {
|
||||
scratch.bytes = minPackedValue;
|
||||
|
@ -229,11 +253,9 @@ public class PointInSetQuery extends Query {
|
|||
private class SinglePointVisitor implements IntersectVisitor {
|
||||
|
||||
private final DocIdSetBuilder result;
|
||||
private final int[] hitCount;
|
||||
private final byte[] pointBytes;
|
||||
|
||||
public SinglePointVisitor(int[] hitCount, DocIdSetBuilder result) {
|
||||
this.hitCount = hitCount;
|
||||
public SinglePointVisitor(DocIdSetBuilder result) {
|
||||
this.result = result;
|
||||
this.pointBytes = new byte[bytesPerDim * numDims];
|
||||
}
|
||||
|
@ -251,7 +273,6 @@ public class PointInSetQuery extends Query {
|
|||
|
||||
@Override
|
||||
public void visit(int docID) {
|
||||
hitCount[0]++;
|
||||
result.add(docID);
|
||||
}
|
||||
|
||||
|
@ -260,7 +281,6 @@ public class PointInSetQuery extends Query {
|
|||
assert packedValue.length == pointBytes.length;
|
||||
if (Arrays.equals(packedValue, pointBytes)) {
|
||||
// The point for this doc matches the point we are querying on
|
||||
hitCount[0]++;
|
||||
result.add(docID);
|
||||
}
|
||||
}
|
||||
|
@ -301,9 +321,9 @@ public class PointInSetQuery extends Query {
|
|||
@Override
|
||||
public int hashCode() {
|
||||
int hash = super.hashCode();
|
||||
hash += sortedPackedPointsHashCode^0x14fa55fb;
|
||||
hash += numDims^0x14fa55fb;
|
||||
hash += bytesPerDim^0x14fa55fb;
|
||||
hash = 31 * hash + sortedPackedPointsHashCode;
|
||||
hash = 31 * hash + numDims;
|
||||
hash = 31 * hash + bytesPerDim;
|
||||
return hash;
|
||||
}
|
||||
|
||||
|
@ -354,17 +374,5 @@ public class PointInSetQuery extends Query {
|
|||
* @param value single value, never null
|
||||
* @return human readable value for debugging
|
||||
*/
|
||||
protected String toString(byte[] value) {
|
||||
assert value != null;
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("binary(");
|
||||
for (int i = 0; i < value.length; i++) {
|
||||
if (i > 0) {
|
||||
sb.append(' ');
|
||||
}
|
||||
sb.append(Integer.toHexString(value[i] & 0xFF));
|
||||
}
|
||||
sb.append(')');
|
||||
return sb.toString();
|
||||
}
|
||||
protected abstract String toString(byte[] value);
|
||||
}
|
||||
|
|
|
@ -51,6 +51,8 @@ import org.apache.lucene.util.StringHelper;
|
|||
* @see FloatPoint
|
||||
* @see DoublePoint
|
||||
* @see BinaryPoint
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public abstract class PointRangeQuery extends Query {
|
||||
final String field;
|
||||
|
@ -219,7 +221,6 @@ public abstract class PointRangeQuery extends Query {
|
|||
|
||||
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc());
|
||||
|
||||
int[] hitCount = new int[1];
|
||||
values.intersect(field,
|
||||
new IntersectVisitor() {
|
||||
|
||||
|
@ -230,7 +231,6 @@ public abstract class PointRangeQuery extends Query {
|
|||
|
||||
@Override
|
||||
public void visit(int docID) {
|
||||
hitCount[0]++;
|
||||
result.add(docID);
|
||||
}
|
||||
|
||||
|
@ -249,7 +249,6 @@ public abstract class PointRangeQuery extends Query {
|
|||
}
|
||||
|
||||
// Doc is in-bounds
|
||||
hitCount[0]++;
|
||||
result.add(docID);
|
||||
}
|
||||
|
||||
|
@ -278,8 +277,7 @@ public abstract class PointRangeQuery extends Query {
|
|||
}
|
||||
});
|
||||
|
||||
// NOTE: hitCount[0] will be over-estimate in multi-valued case
|
||||
return new ConstantScoreScorer(this, score(), result.build(hitCount[0]).iterator());
|
||||
return new ConstantScoreScorer(this, score(), result.build().iterator());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -287,12 +285,12 @@ public abstract class PointRangeQuery extends Query {
|
|||
@Override
|
||||
public int hashCode() {
|
||||
int hash = super.hashCode();
|
||||
hash += Arrays.hashCode(lowerPoint)^0x14fa55fb;
|
||||
hash += Arrays.hashCode(upperPoint)^0x733fa5fe;
|
||||
hash += Arrays.hashCode(lowerInclusive)^0x14fa55fb;
|
||||
hash += Arrays.hashCode(upperInclusive)^0x733fa5fe;
|
||||
hash += numDims^0x14fa55fb;
|
||||
hash += Objects.hashCode(bytesPerDim);
|
||||
hash = 31 * hash + Arrays.hashCode(lowerPoint);
|
||||
hash = 31 * hash + Arrays.hashCode(upperPoint);
|
||||
hash = 31 * hash + Arrays.hashCode(lowerInclusive);
|
||||
hash = 31 * hash + Arrays.hashCode(upperInclusive);
|
||||
hash = 31 * hash + numDims;
|
||||
hash = 31 * hash + Objects.hashCode(bytesPerDim);
|
||||
return hash;
|
||||
}
|
||||
|
||||
|
|
|
@ -109,7 +109,7 @@ public abstract class ScoringRewrite<B> extends TermCollectingRewrite<B> {
|
|||
|
||||
final int size = col.terms.size();
|
||||
if (size > 0) {
|
||||
final int sort[] = col.terms.sort(BytesRef.getUTF8SortedAsUnicodeComparator());
|
||||
final int sort[] = col.terms.sort();
|
||||
final float[] boost = col.array.boost;
|
||||
final TermContext[] termStates = col.array.termState;
|
||||
for (int i = 0; i < size; i++) {
|
||||
|
|
|
@ -316,7 +316,7 @@ public class SortField {
|
|||
return Objects.hash(field, type, reverse, comparatorSource, missingValue);
|
||||
}
|
||||
|
||||
private Comparator<BytesRef> bytesComparator = BytesRef.getUTF8SortedAsUnicodeComparator();
|
||||
private Comparator<BytesRef> bytesComparator = Comparator.naturalOrder();
|
||||
|
||||
public void setBytesComparator(Comparator<BytesRef> b) {
|
||||
bytesComparator = b;
|
||||
|
|
|
@ -37,13 +37,27 @@ public final class UsageTrackingQueryCachingPolicy implements QueryCachingPolicy
|
|||
// the hash code that we use as a sentinel in the ring buffer.
|
||||
private static final int SENTINEL = Integer.MIN_VALUE;
|
||||
|
||||
private static boolean isPointQuery(Query query) {
|
||||
// we need to check for super classes because we occasionally use anonymous
|
||||
// sub classes of eg. PointRangeQuery
|
||||
for (Class<?> clazz = query.getClass(); clazz != Query.class; clazz = clazz.getSuperclass()) {
|
||||
final String simpleName = clazz.getSimpleName();
|
||||
if (simpleName.startsWith("Point") && simpleName.endsWith("Query")) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static boolean isCostly(Query query) {
|
||||
// This does not measure the cost of iterating over the filter (for this we
|
||||
// already have the DocIdSetIterator#cost API) but the cost to build the
|
||||
// DocIdSet in the first place
|
||||
return query instanceof MultiTermQuery ||
|
||||
query instanceof MultiTermQueryConstantScoreWrapper ||
|
||||
query instanceof PointRangeQuery;
|
||||
isPointQuery(query) ||
|
||||
// can't refer to TermsQuery directly as it is in another module
|
||||
"TermsQuery".equals(query.getClass().getSimpleName());
|
||||
}
|
||||
|
||||
static boolean isCheap(Query query) {
|
||||
|
|
|
@ -323,7 +323,7 @@ public class MMapDirectory extends FSDirectory {
|
|||
private static final BufferCleaner CLEANER;
|
||||
|
||||
static {
|
||||
final Object hack = AccessController.doPrivileged((PrivilegedAction<Object>) MMapDirectory::initUnmapHack);
|
||||
final Object hack = AccessController.doPrivileged((PrivilegedAction<Object>) MMapDirectory::unmapHackImpl);
|
||||
if (hack instanceof BufferCleaner) {
|
||||
CLEANER = (BufferCleaner) hack;
|
||||
UNMAP_SUPPORTED = true;
|
||||
|
@ -336,7 +336,7 @@ public class MMapDirectory extends FSDirectory {
|
|||
}
|
||||
|
||||
@SuppressForbidden(reason = "Needs access to private APIs in DirectBuffer and sun.misc.Cleaner to enable hack")
|
||||
private static Object initUnmapHack() {
|
||||
private static Object unmapHackImpl() {
|
||||
final Lookup lookup = lookup();
|
||||
try {
|
||||
final Class<?> directBufferClass = Class.forName("java.nio.DirectByteBuffer");
|
||||
|
@ -388,10 +388,10 @@ public class MMapDirectory extends FSDirectory {
|
|||
} catch (ReflectiveOperationException e) {
|
||||
return "Unmapping is not supported on this platform, because internal Java APIs are not compatible to this Lucene version: " + e;
|
||||
} catch (SecurityException e) {
|
||||
return "Unmapping is not supported, because not all required permissions are given to the Lucene JAR file. " +
|
||||
"Please grant at least the following permissions: RuntimePermission(\"accessClassInPackage.sun.misc\"), " +
|
||||
return "Unmapping is not supported, because not all required permissions are given to the Lucene JAR file: " + e +
|
||||
" [Please grant at least the following permissions: RuntimePermission(\"accessClassInPackage.sun.misc\"), " +
|
||||
"RuntimePermission(\"accessClassInPackage.jdk.internal.ref\"), and " +
|
||||
"ReflectPermission(\"suppressAccessChecks\")";
|
||||
"ReflectPermission(\"suppressAccessChecks\")]";
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -620,22 +620,6 @@ public final class ArrayUtil {
|
|||
return result;
|
||||
}
|
||||
|
||||
private static class NaturalComparator<T extends Comparable<? super T>> implements Comparator<T> {
|
||||
NaturalComparator() {}
|
||||
@Override
|
||||
public int compare(T o1, T o2) {
|
||||
return o1.compareTo(o2);
|
||||
}
|
||||
}
|
||||
|
||||
private static final Comparator<?> NATURAL_COMPARATOR = new NaturalComparator<>();
|
||||
|
||||
/** Get the natural {@link Comparator} for the provided object class. */
|
||||
@SuppressWarnings("unchecked")
|
||||
public static <T extends Comparable<? super T>> Comparator<T> naturalComparator() {
|
||||
return (Comparator<T>) NATURAL_COMPARATOR;
|
||||
}
|
||||
|
||||
/** Swap values stored in slots <code>i</code> and <code>j</code> */
|
||||
public static <T> void swap(T[] arr, int i, int j) {
|
||||
final T tmp = arr[i];
|
||||
|
@ -672,7 +656,7 @@ public final class ArrayUtil {
|
|||
*/
|
||||
public static <T extends Comparable<? super T>> void introSort(T[] a, int fromIndex, int toIndex) {
|
||||
if (toIndex-fromIndex <= 1) return;
|
||||
introSort(a, fromIndex, toIndex, ArrayUtil.<T>naturalComparator());
|
||||
introSort(a, fromIndex, toIndex, Comparator.naturalOrder());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -712,7 +696,7 @@ public final class ArrayUtil {
|
|||
*/
|
||||
public static <T extends Comparable<? super T>> void timSort(T[] a, int fromIndex, int toIndex) {
|
||||
if (toIndex-fromIndex <= 1) return;
|
||||
timSort(a, fromIndex, toIndex, ArrayUtil.<T>naturalComparator());
|
||||
timSort(a, fromIndex, toIndex, Comparator.naturalOrder());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.lucene.util;
|
|||
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
|
||||
/** Represents byte[], as a slice (offset + length) into an
|
||||
* existing byte[]. The {@link #bytes} member should never be null;
|
||||
|
@ -30,6 +29,10 @@ import java.util.Comparator;
|
|||
* Using code like {@code new String(bytes, offset, length)} to do this
|
||||
* is <b>wrong</b>, as it does not respect the correct character set
|
||||
* and may return wrong results (depending on the platform's defaults)!
|
||||
*
|
||||
* <p>{@code BytesRef} implements {@link Comparable}. The underlying byte arrays
|
||||
* are sorted lexicographically, numerically treating elements as unsigned.
|
||||
* This is identical to Unicode codepoint order.
|
||||
*/
|
||||
public final class BytesRef implements Comparable<BytesRef>,Cloneable {
|
||||
/** An empty byte array for convenience */
|
||||
|
@ -169,106 +172,29 @@ public final class BytesRef implements Comparable<BytesRef>,Cloneable {
|
|||
/** Unsigned byte order comparison */
|
||||
@Override
|
||||
public int compareTo(BytesRef other) {
|
||||
return utf8SortedAsUnicodeSortOrder.compare(this, other);
|
||||
}
|
||||
|
||||
private final static Comparator<BytesRef> utf8SortedAsUnicodeSortOrder = new UTF8SortedAsUnicodeComparator();
|
||||
// TODO: Once we are on Java 9 replace this by java.util.Arrays#compareUnsigned()
|
||||
// which is implemented by a Hotspot intrinsic! Also consider building a
|
||||
// Multi-Release-JAR!
|
||||
final byte[] aBytes = this.bytes;
|
||||
int aUpto = this.offset;
|
||||
final byte[] bBytes = other.bytes;
|
||||
int bUpto = other.offset;
|
||||
|
||||
final int aStop = aUpto + Math.min(this.length, other.length);
|
||||
while(aUpto < aStop) {
|
||||
int aByte = aBytes[aUpto++] & 0xff;
|
||||
int bByte = bBytes[bUpto++] & 0xff;
|
||||
|
||||
public static Comparator<BytesRef> getUTF8SortedAsUnicodeComparator() {
|
||||
return utf8SortedAsUnicodeSortOrder;
|
||||
}
|
||||
|
||||
private static class UTF8SortedAsUnicodeComparator implements Comparator<BytesRef> {
|
||||
// Only singleton
|
||||
private UTF8SortedAsUnicodeComparator() {};
|
||||
|
||||
@Override
|
||||
public int compare(BytesRef a, BytesRef b) {
|
||||
final byte[] aBytes = a.bytes;
|
||||
int aUpto = a.offset;
|
||||
final byte[] bBytes = b.bytes;
|
||||
int bUpto = b.offset;
|
||||
|
||||
final int aStop = aUpto + Math.min(a.length, b.length);
|
||||
while(aUpto < aStop) {
|
||||
int aByte = aBytes[aUpto++] & 0xff;
|
||||
int bByte = bBytes[bUpto++] & 0xff;
|
||||
|
||||
int diff = aByte - bByte;
|
||||
if (diff != 0) {
|
||||
return diff;
|
||||
}
|
||||
int diff = aByte - bByte;
|
||||
if (diff != 0) {
|
||||
return diff;
|
||||
}
|
||||
|
||||
// One is a prefix of the other, or, they are equal:
|
||||
return a.length - b.length;
|
||||
}
|
||||
}
|
||||
|
||||
/** @deprecated This comparator is only a transition mechanism */
|
||||
@Deprecated
|
||||
private final static Comparator<BytesRef> utf8SortedAsUTF16SortOrder = new UTF8SortedAsUTF16Comparator();
|
||||
|
||||
/** @deprecated This comparator is only a transition mechanism */
|
||||
@Deprecated
|
||||
public static Comparator<BytesRef> getUTF8SortedAsUTF16Comparator() {
|
||||
return utf8SortedAsUTF16SortOrder;
|
||||
}
|
||||
|
||||
/** @deprecated This comparator is only a transition mechanism */
|
||||
@Deprecated
|
||||
private static class UTF8SortedAsUTF16Comparator implements Comparator<BytesRef> {
|
||||
// Only singleton
|
||||
private UTF8SortedAsUTF16Comparator() {};
|
||||
|
||||
@Override
|
||||
public int compare(BytesRef a, BytesRef b) {
|
||||
|
||||
final byte[] aBytes = a.bytes;
|
||||
int aUpto = a.offset;
|
||||
final byte[] bBytes = b.bytes;
|
||||
int bUpto = b.offset;
|
||||
|
||||
final int aStop;
|
||||
if (a.length < b.length) {
|
||||
aStop = aUpto + a.length;
|
||||
} else {
|
||||
aStop = aUpto + b.length;
|
||||
}
|
||||
|
||||
while(aUpto < aStop) {
|
||||
int aByte = aBytes[aUpto++] & 0xff;
|
||||
int bByte = bBytes[bUpto++] & 0xff;
|
||||
|
||||
if (aByte != bByte) {
|
||||
|
||||
// See http://icu-project.org/docs/papers/utf16_code_point_order.html#utf-8-in-utf-16-order
|
||||
|
||||
// We know the terms are not equal, but, we may
|
||||
// have to carefully fixup the bytes at the
|
||||
// difference to match UTF16's sort order:
|
||||
|
||||
// NOTE: instead of moving supplementary code points (0xee and 0xef) to the unused 0xfe and 0xff,
|
||||
// we move them to the unused 0xfc and 0xfd [reserved for future 6-byte character sequences]
|
||||
// this reserves 0xff for preflex's term reordering (surrogate dance), and if unicode grows such
|
||||
// that 6-byte sequences are needed we have much bigger problems anyway.
|
||||
if (aByte >= 0xee && bByte >= 0xee) {
|
||||
if ((aByte & 0xfe) == 0xee) {
|
||||
aByte += 0xe;
|
||||
}
|
||||
if ((bByte&0xfe) == 0xee) {
|
||||
bByte += 0xe;
|
||||
}
|
||||
}
|
||||
return aByte - bByte;
|
||||
}
|
||||
}
|
||||
|
||||
// One is a prefix of the other, or, they are equal:
|
||||
return a.length - b.length;
|
||||
}
|
||||
|
||||
// One is a prefix of the other, or, they are equal:
|
||||
return this.length - other.length;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a new BytesRef that points to a copy of the bytes from
|
||||
* <code>other</code>
|
||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.lucene.util;
|
|||
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import org.apache.lucene.util.ByteBlockPool.DirectAllocator;
|
||||
|
@ -156,11 +155,8 @@ public final class BytesRefHash {
|
|||
* Note: This is a destructive operation. {@link #clear()} must be called in
|
||||
* order to reuse this {@link BytesRefHash} instance.
|
||||
* </p>
|
||||
*
|
||||
* @param comp
|
||||
* the {@link Comparator} used for sorting
|
||||
*/
|
||||
public int[] sort(final Comparator<BytesRef> comp) {
|
||||
public int[] sort() {
|
||||
final int[] compact = compact();
|
||||
new IntroSorter() {
|
||||
@Override
|
||||
|
@ -176,7 +172,7 @@ public final class BytesRefHash {
|
|||
assert bytesStart.length > id1 && bytesStart.length > id2;
|
||||
pool.setBytesRef(scratch1, bytesStart[id1]);
|
||||
pool.setBytesRef(scratch2, bytesStart[id2]);
|
||||
return comp.compare(scratch1, scratch2);
|
||||
return scratch1.compareTo(scratch2);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -191,7 +187,7 @@ public final class BytesRefHash {
|
|||
final int id = compact[j];
|
||||
assert bytesStart.length > id;
|
||||
pool.setBytesRef(scratch2, bytesStart[id]);
|
||||
return comp.compare(pivot, scratch2);
|
||||
return pivot.compareTo(scratch2);
|
||||
}
|
||||
|
||||
private final BytesRef pivot = new BytesRef(),
|
||||
|
|
|
@ -146,7 +146,7 @@ public final class CollectionUtil {
|
|||
public static <T extends Comparable<? super T>> void introSort(List<T> list) {
|
||||
final int size = list.size();
|
||||
if (size <= 1) return;
|
||||
introSort(list, ArrayUtil.<T>naturalComparator());
|
||||
introSort(list, Comparator.naturalOrder());
|
||||
}
|
||||
|
||||
// Tim sorts:
|
||||
|
@ -172,7 +172,7 @@ public final class CollectionUtil {
|
|||
public static <T extends Comparable<? super T>> void timSort(List<T> list) {
|
||||
final int size = list.size();
|
||||
if (size <= 1) return;
|
||||
timSort(list, ArrayUtil.<T>naturalComparator());
|
||||
timSort(list, Comparator.naturalOrder());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -169,21 +169,9 @@ public final class DocIdSetBuilder {
|
|||
* Build a {@link DocIdSet} from the accumulated doc IDs.
|
||||
*/
|
||||
public DocIdSet build() {
|
||||
return build(-1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: build a {@link DocIdSet} with a hint on the cost that the resulting
|
||||
* {@link DocIdSet} would have.
|
||||
*/
|
||||
public DocIdSet build(long costHint) {
|
||||
try {
|
||||
if (bitSet != null) {
|
||||
if (costHint == -1) {
|
||||
return new BitDocIdSet(bitSet);
|
||||
} else {
|
||||
return new BitDocIdSet(bitSet, costHint);
|
||||
}
|
||||
return new BitDocIdSet(bitSet);
|
||||
} else {
|
||||
LSBRadixSorter sorter = new LSBRadixSorter();
|
||||
sorter.sort(buffer, 0, bufferSize);
|
||||
|
|
|
@ -53,10 +53,6 @@ import org.apache.lucene.index.TermsEnum;
|
|||
* {@link org.apache.lucene.search.LegacyNumericRangeQuery} implements the query part
|
||||
* for the same data types.
|
||||
*
|
||||
* <p>This class can also be used, to generate lexicographically sortable (according to
|
||||
* {@link BytesRef#getUTF8SortedAsUTF16Comparator()}) representations of numeric data
|
||||
* types for other usages (e.g. sorting).
|
||||
*
|
||||
* @lucene.internal
|
||||
*
|
||||
* @deprecated Please use {@link org.apache.lucene.index.PointValues} instead.
|
||||
|
|
|
@ -174,7 +174,7 @@ public class OfflineSorter {
|
|||
private final Comparator<BytesRef> comparator;
|
||||
|
||||
/** Default comparator: sorts in binary (codepoint) order */
|
||||
public static final Comparator<BytesRef> DEFAULT_COMPARATOR = BytesRef.getUTF8SortedAsUnicodeComparator();
|
||||
public static final Comparator<BytesRef> DEFAULT_COMPARATOR = Comparator.naturalOrder();
|
||||
|
||||
/**
|
||||
* Defaults constructor.
|
||||
|
|
|
@ -104,8 +104,8 @@ public class TestMultiFields extends LuceneTestCase {
|
|||
|
||||
if (VERBOSE) {
|
||||
List<BytesRef> termsList = new ArrayList<>(uniqueTerms);
|
||||
Collections.sort(termsList, BytesRef.getUTF8SortedAsUTF16Comparator());
|
||||
System.out.println("TEST: terms in UTF16 order:");
|
||||
Collections.sort(termsList);
|
||||
System.out.println("TEST: terms in UTF-8 order:");
|
||||
for(BytesRef b : termsList) {
|
||||
System.out.println(" " + UnicodeUtil.toHexString(b.utf8ToString()) + " " + b);
|
||||
for(int docID : docs.get(b)) {
|
||||
|
|
|
@ -1822,7 +1822,12 @@ public class TestPointQueries extends LuceneTestCase {
|
|||
public BytesRef next() {
|
||||
return new BytesRef(new byte[3]);
|
||||
}
|
||||
});
|
||||
}) {
|
||||
@Override
|
||||
protected String toString(byte[] point) {
|
||||
return Arrays.toString(point);
|
||||
}
|
||||
};
|
||||
});
|
||||
assertEquals("packed point length should be 12 but got 3; field=\"foo\" numDims=3 bytesPerDim=4", expected.getMessage());
|
||||
}
|
||||
|
|
|
@ -90,9 +90,8 @@ public class TestBytesRefArray extends LuceneTestCase {
|
|||
stringList.add(randomRealisticUnicodeString);
|
||||
}
|
||||
|
||||
Collections.sort(stringList);
|
||||
BytesRefIterator iter = list.iterator(BytesRef
|
||||
.getUTF8SortedAsUTF16Comparator());
|
||||
Collections.sort(stringList, TestUtil.STRING_CODEPOINT_COMPARATOR);
|
||||
BytesRefIterator iter = list.iterator(Comparator.naturalOrder());
|
||||
int i = 0;
|
||||
BytesRef next;
|
||||
while ((next = iter.next()) != null) {
|
||||
|
|
|
@ -20,11 +20,11 @@ package org.apache.lucene.util;
|
|||
import java.util.BitSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.SortedSet;
|
||||
import java.util.TreeSet;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.apache.lucene.util.BytesRefHash.MaxBytesLengthExceededException;
|
||||
import org.junit.Before;
|
||||
|
@ -168,14 +168,16 @@ public class TestBytesRefHash extends LuceneTestCase {
|
|||
|
||||
/**
|
||||
* Test method for
|
||||
* {@link org.apache.lucene.util.BytesRefHash#sort(java.util.Comparator)}.
|
||||
* {@link org.apache.lucene.util.BytesRefHash#sort()}.
|
||||
*/
|
||||
@Test
|
||||
public void testSort() {
|
||||
BytesRefBuilder ref = new BytesRefBuilder();
|
||||
int num = atLeast(2);
|
||||
for (int j = 0; j < num; j++) {
|
||||
SortedSet<String> strings = new TreeSet<>();
|
||||
|
||||
// Sorts by unicode code point order (is there a simple way, e.g. a Collator?)
|
||||
SortedSet<String> strings = new TreeSet<>(TestUtil.STRING_CODEPOINT_COMPARATOR);
|
||||
for (int i = 0; i < 797; i++) {
|
||||
String str;
|
||||
do {
|
||||
|
@ -185,9 +187,7 @@ public class TestBytesRefHash extends LuceneTestCase {
|
|||
hash.add(ref.get());
|
||||
strings.add(str);
|
||||
}
|
||||
// We use the UTF-16 comparator here, because we need to be able to
|
||||
// compare to native String.compareTo() [UTF-16]:
|
||||
int[] sort = hash.sort(BytesRef.getUTF8SortedAsUTF16Comparator());
|
||||
int[] sort = hash.sort();
|
||||
assertTrue(strings.size() < sort.length);
|
||||
int i = 0;
|
||||
BytesRef scratch = new BytesRef();
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
package org.apache.lucene.util;
|
||||
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.RandomizedRunner;
|
||||
|
@ -30,7 +32,7 @@ public class TestInPlaceMergeSorter extends BaseSortTestCase {
|
|||
|
||||
@Override
|
||||
public Sorter newSorter(Entry[] arr) {
|
||||
return new ArrayInPlaceMergeSorter<>(arr, ArrayUtil.<Entry>naturalComparator());
|
||||
return new ArrayInPlaceMergeSorter<>(arr, Comparator.naturalOrder());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
*/
|
||||
package org.apache.lucene.util;
|
||||
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
public class TestIntroSorter extends BaseSortTestCase {
|
||||
|
||||
|
@ -26,7 +26,7 @@ public class TestIntroSorter extends BaseSortTestCase {
|
|||
|
||||
@Override
|
||||
public Sorter newSorter(Entry[] arr) {
|
||||
return new ArrayIntroSorter<>(arr, ArrayUtil.<Entry>naturalComparator());
|
||||
return new ArrayIntroSorter<>(arr, Comparator.naturalOrder());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
*/
|
||||
package org.apache.lucene.util;
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
public class TestTimSorter extends BaseSortTestCase {
|
||||
|
||||
|
@ -25,6 +26,6 @@ public class TestTimSorter extends BaseSortTestCase {
|
|||
|
||||
@Override
|
||||
public Sorter newSorter(Entry[] arr) {
|
||||
return new ArrayTimSorter<>(arr, ArrayUtil.<Entry>naturalComparator(), TestUtil.nextInt(random(), 0, arr.length));
|
||||
return new ArrayTimSorter<>(arr, Comparator.naturalOrder(), TestUtil.nextInt(random(), 0, arr.length));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -134,7 +134,6 @@ public class TestUnicodeUtil extends LuceneTestCase {
|
|||
|
||||
public void testUTF8toUTF32() {
|
||||
int[] utf32 = new int[0];
|
||||
int[] codePoints = new int[20];
|
||||
int num = atLeast(50000);
|
||||
for (int i = 0; i < num; i++) {
|
||||
final String s = TestUtil.randomUnicodeString(random());
|
||||
|
@ -143,21 +142,15 @@ public class TestUnicodeUtil extends LuceneTestCase {
|
|||
utf32 = ArrayUtil.grow(utf32, utf8Len);
|
||||
final int utf32Len = UnicodeUtil.UTF8toUTF32(new BytesRef(utf8, 0, utf8Len), utf32);
|
||||
|
||||
int charUpto = 0;
|
||||
int intUpto = 0;
|
||||
while(charUpto < s.length()) {
|
||||
final int cp = s.codePointAt(charUpto);
|
||||
codePoints[intUpto++] = cp;
|
||||
charUpto += Character.charCount(cp);
|
||||
}
|
||||
if (!ArrayUtil.equals(codePoints, 0, utf32, 0, intUpto)) {
|
||||
int[] codePoints = s.codePoints().toArray();
|
||||
if (!ArrayUtil.equals(codePoints, 0, utf32, 0, codePoints.length)) {
|
||||
System.out.println("FAILED");
|
||||
for(int j=0;j<s.length();j++) {
|
||||
System.out.println(" char[" + j + "]=" + Integer.toHexString(s.charAt(j)));
|
||||
}
|
||||
System.out.println();
|
||||
assertEquals(intUpto, utf32Len);
|
||||
for(int j=0;j<intUpto;j++) {
|
||||
assertEquals(codePoints.length, utf32Len);
|
||||
for(int j=0;j<codePoints.length;j++) {
|
||||
System.out.println(" " + Integer.toHexString(utf32[j]) + " vs " + Integer.toHexString(codePoints[j]));
|
||||
}
|
||||
fail("mismatch");
|
||||
|
|
|
@ -55,7 +55,7 @@ class TermsIncludingScoreQuery extends Query {
|
|||
this.terms = terms;
|
||||
this.scores = scores;
|
||||
this.originalQuery = originalQuery;
|
||||
this.ords = terms.sort(BytesRef.getUTF8SortedAsUnicodeComparator());
|
||||
this.ords = terms.sort();
|
||||
this.unwrittenOriginalQuery = originalQuery;
|
||||
}
|
||||
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.apache.lucene.util.BytesRefHash;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Comparator;
|
||||
|
||||
/**
|
||||
* A query that has an array of terms from a specific field. This query will match documents have one or more terms in
|
||||
|
@ -48,7 +47,7 @@ class TermsQuery extends MultiTermQuery {
|
|||
super(field);
|
||||
this.fromQuery = fromQuery;
|
||||
this.terms = terms;
|
||||
ords = terms.sort(BytesRef.getUTF8SortedAsUnicodeComparator());
|
||||
ords = terms.sort();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -100,7 +99,6 @@ class TermsQuery extends MultiTermQuery {
|
|||
|
||||
private final BytesRef lastTerm;
|
||||
private final BytesRef spare = new BytesRef();
|
||||
private final Comparator<BytesRef> comparator;
|
||||
|
||||
private BytesRef seekTerm;
|
||||
private int upto = 0;
|
||||
|
@ -109,7 +107,6 @@ class TermsQuery extends MultiTermQuery {
|
|||
super(tenum);
|
||||
this.terms = terms;
|
||||
this.ords = ords;
|
||||
comparator = BytesRef.getUTF8SortedAsUnicodeComparator();
|
||||
lastElement = terms.size() - 1;
|
||||
lastTerm = terms.get(ords[lastElement], new BytesRef());
|
||||
seekTerm = terms.get(ords[upto], spare);
|
||||
|
@ -124,12 +121,12 @@ class TermsQuery extends MultiTermQuery {
|
|||
|
||||
@Override
|
||||
protected AcceptStatus accept(BytesRef term) throws IOException {
|
||||
if (comparator.compare(term, lastTerm) > 0) {
|
||||
if (term.compareTo(lastTerm) > 0) {
|
||||
return AcceptStatus.END;
|
||||
}
|
||||
|
||||
BytesRef currentTerm = terms.get(ords[upto], spare);
|
||||
if (comparator.compare(term, currentTerm) == 0) {
|
||||
if (term.compareTo(currentTerm) == 0) {
|
||||
if (upto == lastElement) {
|
||||
return AcceptStatus.YES;
|
||||
} else {
|
||||
|
@ -148,7 +145,7 @@ class TermsQuery extends MultiTermQuery {
|
|||
// typically the terms dict is a superset of query's terms so it's unusual that we have to skip many of
|
||||
// our terms so we don't do a binary search here
|
||||
seekTerm = terms.get(ords[++upto], spare);
|
||||
} while ((cmp = comparator.compare(seekTerm, term)) < 0);
|
||||
} while ((cmp = seekTerm.compareTo(term)) < 0);
|
||||
if (cmp == 0) {
|
||||
if (upto == lastElement) {
|
||||
return AcceptStatus.YES;
|
||||
|
|
|
@ -1130,7 +1130,7 @@ public class TestJoinUtil extends LuceneTestCase {
|
|||
Terms terms = slowCompositeReader.terms(toField);
|
||||
if (terms != null) {
|
||||
PostingsEnum postingsEnum = null;
|
||||
SortedSet<BytesRef> joinValues = new TreeSet<>(BytesRef.getUTF8SortedAsUnicodeComparator());
|
||||
SortedSet<BytesRef> joinValues = new TreeSet<>();
|
||||
joinValues.addAll(joinValueToJoinScores.keySet());
|
||||
for (BytesRef joinValue : joinValues) {
|
||||
TermsEnum termsEnum = terms.iterator();
|
||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.lucene.index.memory;
|
|||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.SortedMap;
|
||||
|
@ -758,7 +757,7 @@ public class MemoryIndex {
|
|||
*/
|
||||
public void sortTerms() {
|
||||
if (sortedTerms == null) {
|
||||
sortedTerms = terms.sort(BytesRef.getUTF8SortedAsUnicodeComparator());
|
||||
sortedTerms = terms.sort();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -950,12 +949,12 @@ public class MemoryIndex {
|
|||
}
|
||||
|
||||
private final int binarySearch(BytesRef b, BytesRef bytesRef, int low,
|
||||
int high, BytesRefHash hash, int[] ords, Comparator<BytesRef> comparator) {
|
||||
int high, BytesRefHash hash, int[] ords) {
|
||||
int mid = 0;
|
||||
while (low <= high) {
|
||||
mid = (low + high) >>> 1;
|
||||
hash.get(ords[mid], bytesRef);
|
||||
final int cmp = comparator.compare(bytesRef, b);
|
||||
final int cmp = bytesRef.compareTo(b);
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
} else if (cmp > 0) {
|
||||
|
@ -964,20 +963,20 @@ public class MemoryIndex {
|
|||
return mid;
|
||||
}
|
||||
}
|
||||
assert comparator.compare(bytesRef, b) != 0;
|
||||
assert bytesRef.compareTo(b) != 0;
|
||||
return -(low + 1);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean seekExact(BytesRef text) {
|
||||
termUpto = binarySearch(text, br, 0, info.terms.size()-1, info.terms, info.sortedTerms, BytesRef.getUTF8SortedAsUnicodeComparator());
|
||||
termUpto = binarySearch(text, br, 0, info.terms.size()-1, info.terms, info.sortedTerms);
|
||||
return termUpto >= 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SeekStatus seekCeil(BytesRef text) {
|
||||
termUpto = binarySearch(text, br, 0, info.terms.size()-1, info.terms, info.sortedTerms, BytesRef.getUTF8SortedAsUnicodeComparator());
|
||||
termUpto = binarySearch(text, br, 0, info.terms.size()-1, info.terms, info.sortedTerms);
|
||||
if (termUpto < 0) { // not found; choose successor
|
||||
termUpto = -termUpto-1;
|
||||
if (termUpto >= info.terms.size()) {
|
||||
|
|
|
@ -640,6 +640,28 @@
|
|||
<property name="spatial-javadocs.uptodate" value="true"/>
|
||||
</target>
|
||||
|
||||
<property name="spatial-extras.jar" value="${common.dir}/build/spatial-extras/lucene-spatial-extras-${version}.jar"/>
|
||||
<target name="check-spatial-extras-uptodate" unless="spatial-extras.uptodate">
|
||||
<module-uptodate name="spatial-extras" jarfile="${spatial-extras.jar}" property="spatial-extras.uptodate"/>
|
||||
</target>
|
||||
<target name="jar-spatial-extras" unless="spatial-extras.uptodate" depends="check-spatial-extras-uptodate">
|
||||
<ant dir="${common.dir}/spatial-extras" target="jar-core" inheritAll="false">
|
||||
<propertyset refid="uptodate.and.compiled.properties"/>
|
||||
</ant>
|
||||
<property name="spatial-extras.uptodate" value="true"/>
|
||||
</target>
|
||||
|
||||
<property name="spatial-extras-javadoc.jar" value="${common.dir}/build/spatial-extras/lucene-spatial-extras-${version}-javadoc.jar"/>
|
||||
<target name="check-spatial-extras-javadocs-uptodate" unless="spatial-extras-javadocs.uptodate">
|
||||
<module-uptodate name="spatial-extras" jarfile="${spatial-extras-javadoc.jar}" property="spatial-extras-javadocs.uptodate"/>
|
||||
</target>
|
||||
<target name="javadocs-spatial-extras" unless="spatial-extras-javadocs.uptodate" depends="check-spatial-extras-javadocs-uptodate">
|
||||
<ant dir="${common.dir}/spatial-extras" target="javadocs" inheritAll="false">
|
||||
<propertyset refid="uptodate.and.compiled.properties"/>
|
||||
</ant>
|
||||
<property name="spatial-extras-javadocs.uptodate" value="true"/>
|
||||
</target>
|
||||
|
||||
<property name="suggest.jar" value="${common.dir}/build/suggest/lucene-suggest-${version}.jar"/>
|
||||
<target name="check-suggest-uptodate" unless="suggest.uptodate">
|
||||
<module-uptodate name="suggest" jarfile="${suggest.jar}" property="suggest.uptodate"/>
|
||||
|
|
|
@ -34,6 +34,8 @@ import org.apache.lucene.index.Fields;
|
|||
import org.apache.lucene.index.FilterDirectoryReader;
|
||||
import org.apache.lucene.index.FilterLeafReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
|
@ -49,6 +51,7 @@ import org.apache.lucene.search.QueryUtils;
|
|||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.UsageTrackingQueryCachingPolicy;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
@ -325,4 +328,26 @@ public class TermsQueryTest extends LuceneTestCase {
|
|||
TermsQuery query = new TermsQuery(new Term("field", new BytesRef(new byte[] { (byte) 0xff, (byte) 0xfe })));
|
||||
assertEquals("field:[ff fe]", query.toString());
|
||||
}
|
||||
|
||||
public void testIsConsideredCostlyByQueryCache() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
IndexWriter w = new IndexWriter(dir, iwc);
|
||||
Document doc = new Document();
|
||||
for (int i = 0; i < 10000; ++i) {
|
||||
w.addDocument(doc);
|
||||
}
|
||||
w.forceMerge(1);
|
||||
DirectoryReader reader = DirectoryReader.open(w);
|
||||
w.close();
|
||||
TermsQuery query = new TermsQuery(new Term("foo", "bar"), new Term("foo", "baz"));
|
||||
UsageTrackingQueryCachingPolicy policy = new UsageTrackingQueryCachingPolicy();
|
||||
assertFalse(policy.shouldCache(query, getOnlySegmentReader(reader).getContext()));
|
||||
policy.onUse(query);
|
||||
policy.onUse(query);
|
||||
// cached after two uses
|
||||
assertTrue(policy.shouldCache(query, getOnlySegmentReader(reader).getContext()));
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.util.NumericUtils;
|
|||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.PointDistanceQuery;
|
||||
import org.apache.lucene.search.PointInPolygonQuery;
|
||||
import org.apache.lucene.search.PointRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -37,6 +38,7 @@ import org.apache.lucene.spatial.util.GeoUtils;
|
|||
* This field defines static factory methods for creating common queries:
|
||||
* <ul>
|
||||
* <li>{@link #newBoxQuery newBoxQuery()} for matching points within a bounding box.
|
||||
* <li>{@link #newDistanceQuery newDistanceQuery()} for matching points within a specified distance.
|
||||
* <li>{@link #newPolygonQuery newPolygonQuery()} for matching points within an arbitrary polygon.
|
||||
* </ul>
|
||||
* <p>
|
||||
|
@ -206,6 +208,13 @@ public class LatLonPoint extends Field {
|
|||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a query for matching points within the specified distance of the supplied location.
|
||||
*/
|
||||
public static Query newDistanceQuery(String field, double latitude, double longitude, double radiusMeters) {
|
||||
return new PointDistanceQuery(field, latitude, longitude, radiusMeters);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a query for matching a polygon.
|
||||
* <p>
|
||||
|
|
|
@ -98,7 +98,7 @@ public class DocValuesTermsQuery extends Query {
|
|||
this.field = Objects.requireNonNull(field);
|
||||
Objects.requireNonNull(terms, "Collection of terms must not be null");
|
||||
this.terms = terms.toArray(new BytesRef[terms.size()]);
|
||||
ArrayUtil.timSort(this.terms, BytesRef.getUTF8SortedAsUnicodeComparator());
|
||||
ArrayUtil.timSort(this.terms);
|
||||
}
|
||||
|
||||
public DocValuesTermsQuery(String field, BytesRef... terms) {
|
||||
|
|
|
@ -0,0 +1,179 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.search;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.document.LatLonPoint;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.PointValues;
|
||||
import org.apache.lucene.index.PointValues.IntersectVisitor;
|
||||
import org.apache.lucene.index.PointValues.Relation;
|
||||
import org.apache.lucene.spatial.util.GeoDistanceUtils;
|
||||
import org.apache.lucene.spatial.util.GeoRect;
|
||||
import org.apache.lucene.spatial.util.GeoUtils;
|
||||
import org.apache.lucene.util.DocIdSetBuilder;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
|
||||
/**
|
||||
* Distance query for {@link LatLonPoint}.
|
||||
*/
|
||||
public class PointDistanceQuery extends Query {
|
||||
final String field;
|
||||
final double latitude;
|
||||
final double longitude;
|
||||
final double radiusMeters;
|
||||
|
||||
public PointDistanceQuery(String field, double latitude, double longitude, double radiusMeters) {
|
||||
if (field == null) {
|
||||
throw new IllegalArgumentException("field cannot be null");
|
||||
}
|
||||
if (GeoUtils.isValidLat(latitude) == false) {
|
||||
throw new IllegalArgumentException("latitude: '" + latitude + "' is invalid");
|
||||
}
|
||||
if (GeoUtils.isValidLon(longitude) == false) {
|
||||
throw new IllegalArgumentException("longitude: '" + longitude + "' is invalid");
|
||||
}
|
||||
this.field = field;
|
||||
this.latitude = latitude;
|
||||
this.longitude = longitude;
|
||||
this.radiusMeters = radiusMeters;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
GeoRect box = GeoUtils.circleToBBox(longitude, latitude, radiusMeters);
|
||||
final GeoRect box1;
|
||||
final GeoRect box2;
|
||||
|
||||
// crosses dateline: split
|
||||
if (box.maxLon < box.minLon) {
|
||||
box1 = new GeoRect(-180.0, box.maxLon, box.minLat, box.maxLat);
|
||||
box2 = new GeoRect(box.minLon, 180.0, box.minLat, box.maxLat);
|
||||
} else {
|
||||
box1 = box;
|
||||
box2 = null;
|
||||
}
|
||||
|
||||
return new ConstantScoreWeight(this) {
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
LeafReader reader = context.reader();
|
||||
PointValues values = reader.getPointValues();
|
||||
if (values == null) {
|
||||
// No docs in this segment had any points fields
|
||||
return null;
|
||||
}
|
||||
|
||||
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc());
|
||||
values.intersect(field,
|
||||
new IntersectVisitor() {
|
||||
@Override
|
||||
public void visit(int docID) {
|
||||
result.add(docID);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void visit(int docID, byte[] packedValue) {
|
||||
assert packedValue.length == 8;
|
||||
double lat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(packedValue, 0));
|
||||
double lon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(packedValue, Integer.BYTES));
|
||||
if (GeoDistanceUtils.haversin(latitude, longitude, lat, lon) <= radiusMeters) {
|
||||
visit(docID);
|
||||
}
|
||||
}
|
||||
|
||||
// algorithm: we create a bounding box (two bounding boxes if we cross the dateline).
|
||||
// 1. check our bounding box(es) first. if the subtree is entirely outside of those, bail.
|
||||
// 2. see if the subtree is fully contained. if the subtree is enormous along the x axis, wrapping half way around the world, etc: then this can't work, just go to step 3.
|
||||
// 3. recurse naively.
|
||||
@Override
|
||||
public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
|
||||
double latMin = LatLonPoint.decodeLat(NumericUtils.bytesToInt(minPackedValue, 0));
|
||||
double lonMin = LatLonPoint.decodeLon(NumericUtils.bytesToInt(minPackedValue, Integer.BYTES));
|
||||
double latMax = LatLonPoint.decodeLat(NumericUtils.bytesToInt(maxPackedValue, 0));
|
||||
double lonMax = LatLonPoint.decodeLon(NumericUtils.bytesToInt(maxPackedValue, Integer.BYTES));
|
||||
|
||||
if ((latMax < box1.minLat || lonMax < box1.minLon || latMin > box1.maxLat || lonMin > box1.maxLon) &&
|
||||
(box2 == null || latMax < box2.minLat || lonMax < box2.minLon || latMin > box2.maxLat || lonMin > box2.maxLon)) {
|
||||
// we are fully outside of bounding box(es), don't proceed any further.
|
||||
return Relation.CELL_OUTSIDE_QUERY;
|
||||
} else if (lonMax - longitude < 90 && longitude - lonMin < 90 &&
|
||||
GeoDistanceUtils.haversin(latitude, longitude, latMin, lonMin) <= radiusMeters &&
|
||||
GeoDistanceUtils.haversin(latitude, longitude, latMin, lonMax) <= radiusMeters &&
|
||||
GeoDistanceUtils.haversin(latitude, longitude, latMax, lonMin) <= radiusMeters &&
|
||||
GeoDistanceUtils.haversin(latitude, longitude, latMax, lonMax) <= radiusMeters) {
|
||||
// we are fully enclosed, collect everything within this subtree
|
||||
return Relation.CELL_INSIDE_QUERY;
|
||||
} else {
|
||||
// recurse: its inside our bounding box(es), but not fully, or it wraps around.
|
||||
return Relation.CELL_CROSSES_QUERY;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return new ConstantScoreScorer(this, score(), result.build().iterator());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = super.hashCode();
|
||||
result = prime * result + field.hashCode();
|
||||
long temp;
|
||||
temp = Double.doubleToLongBits(latitude);
|
||||
result = prime * result + (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(longitude);
|
||||
result = prime * result + (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(radiusMeters);
|
||||
result = prime * result + (int) (temp ^ (temp >>> 32));
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (!super.equals(obj)) return false;
|
||||
if (getClass() != obj.getClass()) return false;
|
||||
PointDistanceQuery other = (PointDistanceQuery) obj;
|
||||
if (!field.equals(other.field)) return false;
|
||||
if (Double.doubleToLongBits(latitude) != Double.doubleToLongBits(other.latitude)) return false;
|
||||
if (Double.doubleToLongBits(longitude) != Double.doubleToLongBits(other.longitude)) return false;
|
||||
if (Double.doubleToLongBits(radiusMeters) != Double.doubleToLongBits(other.radiusMeters)) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(String field) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
if (!this.field.equals(field)) {
|
||||
sb.append(field);
|
||||
sb.append(':');
|
||||
}
|
||||
sb.append(latitude);
|
||||
sb.append(",");
|
||||
sb.append(longitude);
|
||||
sb.append(" +/- ");
|
||||
sb.append(radiusMeters);
|
||||
sb.append(" meters");
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
|
@ -111,12 +111,10 @@ public class PointInPolygonQuery extends Query {
|
|||
}
|
||||
|
||||
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc());
|
||||
int[] hitCount = new int[1];
|
||||
values.intersect(field,
|
||||
new IntersectVisitor() {
|
||||
@Override
|
||||
public void visit(int docID) {
|
||||
hitCount[0]++;
|
||||
result.add(docID);
|
||||
}
|
||||
|
||||
|
@ -126,7 +124,6 @@ public class PointInPolygonQuery extends Query {
|
|||
double lat = LatLonPoint.decodeLat(NumericUtils.bytesToInt(packedValue, 0));
|
||||
double lon = LatLonPoint.decodeLon(NumericUtils.bytesToInt(packedValue, Integer.BYTES));
|
||||
if (GeoRelationUtils.pointInPolygon(polyLons, polyLats, lat, lon)) {
|
||||
hitCount[0]++;
|
||||
result.add(docID);
|
||||
}
|
||||
}
|
||||
|
@ -155,8 +152,7 @@ public class PointInPolygonQuery extends Query {
|
|||
}
|
||||
});
|
||||
|
||||
// NOTE: hitCount[0] will be over-estimate in multi-valued case
|
||||
return new ConstantScoreScorer(this, score(), result.build(hitCount[0]).iterator());
|
||||
return new ConstantScoreScorer(this, score(), result.build().iterator());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -16,11 +16,29 @@
|
|||
*/
|
||||
package org.apache.lucene.document;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.BitSet;
|
||||
|
||||
import org.apache.lucene.codecs.FilterCodec;
|
||||
import org.apache.lucene.codecs.PointFormat;
|
||||
import org.apache.lucene.codecs.PointReader;
|
||||
import org.apache.lucene.codecs.PointWriter;
|
||||
import org.apache.lucene.codecs.lucene60.Lucene60PointReader;
|
||||
import org.apache.lucene.codecs.lucene60.Lucene60PointWriter;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.SegmentReadState;
|
||||
import org.apache.lucene.index.SegmentWriteState;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.spatial.util.GeoDistanceUtils;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.lucene.util.bkd.BKDWriter;
|
||||
|
||||
/** Simple tests for {@link LatLonPoint} */
|
||||
public class TestLatLonPoint extends LuceneTestCase {
|
||||
|
@ -53,4 +71,92 @@ public class TestLatLonPoint extends LuceneTestCase {
|
|||
// looks crazy due to lossiness
|
||||
assertEquals("field:[17.99999997485429 TO 18.999999999068677},[-65.9999999217689 TO -64.99999998137355}", LatLonPoint.newBoxQuery("field", 18, 19, -66, -65).toString());
|
||||
}
|
||||
|
||||
public void testRadiusRandom() throws Exception {
|
||||
for (int iters = 0; iters < 100; iters++) {
|
||||
doRandomTest(10, 100);
|
||||
}
|
||||
}
|
||||
|
||||
@Nightly
|
||||
public void testRadiusRandomHuge() throws Exception {
|
||||
for (int iters = 0; iters < 10; iters++) {
|
||||
doRandomTest(2000, 100);
|
||||
}
|
||||
}
|
||||
|
||||
private void doRandomTest(int numDocs, int numQueries) throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
int pointsInLeaf = 2 + random().nextInt(4);
|
||||
iwc.setCodec(new FilterCodec("Lucene60", TestUtil.getDefaultCodec()) {
|
||||
@Override
|
||||
public PointFormat pointFormat() {
|
||||
return new PointFormat() {
|
||||
@Override
|
||||
public PointWriter fieldsWriter(SegmentWriteState writeState) throws IOException {
|
||||
return new Lucene60PointWriter(writeState, pointsInLeaf, BKDWriter.DEFAULT_MAX_MB_SORT_IN_HEAP);
|
||||
}
|
||||
|
||||
@Override
|
||||
public PointReader fieldsReader(SegmentReadState readState) throws IOException {
|
||||
return new Lucene60PointReader(readState);
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
|
||||
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
double latRaw = -90 + 180.0 * random().nextDouble();
|
||||
double lonRaw = -180 + 360.0 * random().nextDouble();
|
||||
// pre-normalize up front, so we can just use quantized value for testing and do simple exact comparisons
|
||||
double lat = LatLonPoint.decodeLat(LatLonPoint.encodeLat(latRaw));
|
||||
double lon = LatLonPoint.decodeLon(LatLonPoint.encodeLon(lonRaw));
|
||||
Document doc = new Document();
|
||||
doc.add(new LatLonPoint("field", lat, lon));
|
||||
doc.add(new StoredField("lat", lat));
|
||||
doc.add(new StoredField("lon", lon));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
IndexReader reader = writer.getReader();
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
||||
for (int i = 0; i < numQueries; i++) {
|
||||
double lat = -90 + 180.0 * random().nextDouble();
|
||||
double lon = -180 + 360.0 * random().nextDouble();
|
||||
double radius = 50000000 * random().nextDouble();
|
||||
|
||||
BitSet expected = new BitSet();
|
||||
for (int doc = 0; doc < reader.maxDoc(); doc++) {
|
||||
double docLatitude = reader.document(doc).getField("lat").numericValue().doubleValue();
|
||||
double docLongitude = reader.document(doc).getField("lon").numericValue().doubleValue();
|
||||
double distance = GeoDistanceUtils.haversin(lat, lon, docLatitude, docLongitude);
|
||||
if (distance <= radius) {
|
||||
expected.set(doc);
|
||||
}
|
||||
}
|
||||
|
||||
TopDocs topDocs = searcher.search(LatLonPoint.newDistanceQuery("field", lat, lon, radius), reader.maxDoc(), Sort.INDEXORDER);
|
||||
BitSet actual = new BitSet();
|
||||
for (ScoreDoc doc : topDocs.scoreDocs) {
|
||||
actual.set(doc.doc);
|
||||
}
|
||||
|
||||
try {
|
||||
assertEquals(expected, actual);
|
||||
} catch (AssertionError e) {
|
||||
for (int doc = 0; doc < reader.maxDoc(); doc++) {
|
||||
double docLatitude = reader.document(doc).getField("lat").numericValue().doubleValue();
|
||||
double docLongitude = reader.document(doc).getField("lon").numericValue().doubleValue();
|
||||
double distance = GeoDistanceUtils.haversin(lat, lon, docLatitude, docLongitude);
|
||||
System.out.println("" + doc + ": (" + docLatitude + "," + docLongitude + "), distance=" + distance);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
writer.close();
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,8 +39,7 @@ public class TestLatLonPointQueries extends BaseGeoPointTestCase {
|
|||
|
||||
@Override
|
||||
protected Query newDistanceQuery(String field, double centerLat, double centerLon, double radiusMeters) {
|
||||
// return new BKDDistanceQuery(field, centerLat, centerLon, radiusMeters);
|
||||
return null;
|
||||
return LatLonPoint.newDistanceQuery(field, centerLat, centerLon, radiusMeters);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -58,6 +57,53 @@ public class TestLatLonPointQueries extends BaseGeoPointTestCase {
|
|||
|
||||
assert Double.isNaN(pointLat) == false;
|
||||
|
||||
int rectLatMinEnc = LatLonPoint.encodeLat(rect.minLat);
|
||||
int rectLatMaxEnc = LatLonPoint.encodeLat(rect.maxLat);
|
||||
int rectLonMinEnc = LatLonPoint.encodeLon(rect.minLon);
|
||||
int rectLonMaxEnc = LatLonPoint.encodeLon(rect.maxLon);
|
||||
|
||||
int pointLatEnc = LatLonPoint.encodeLat(pointLat);
|
||||
int pointLonEnc = LatLonPoint.encodeLon(pointLon);
|
||||
|
||||
if (rect.minLon < rect.maxLon) {
|
||||
return pointLatEnc >= rectLatMinEnc &&
|
||||
pointLatEnc < rectLatMaxEnc &&
|
||||
pointLonEnc >= rectLonMinEnc &&
|
||||
pointLonEnc < rectLonMaxEnc;
|
||||
} else {
|
||||
// Rect crosses dateline:
|
||||
return pointLatEnc >= rectLatMinEnc &&
|
||||
pointLatEnc < rectLatMaxEnc &&
|
||||
(pointLonEnc >= rectLonMinEnc ||
|
||||
pointLonEnc < rectLonMaxEnc);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected double quantizeLat(double latRaw) {
|
||||
return LatLonPoint.decodeLat(LatLonPoint.encodeLat(latRaw));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected double quantizeLon(double lonRaw) {
|
||||
return LatLonPoint.decodeLon(LatLonPoint.encodeLon(lonRaw));
|
||||
}
|
||||
|
||||
// todo reconcile with GeoUtils (see LUCENE-6996)
|
||||
public static double compare(final double v1, final double v2) {
|
||||
final double delta = v1-v2;
|
||||
return Math.abs(delta) <= BKD_TOLERANCE ? 0 : delta;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean polyRectContainsPoint(GeoRect rect, double pointLat, double pointLon) {
|
||||
// TODO write better random polygon tests
|
||||
|
||||
assert Double.isNaN(pointLat) == false;
|
||||
|
||||
// TODO: this comment is wrong! we have fixed the quantization error (we now pre-quantize all randomly generated test points) yet the test
|
||||
// still fails if we remove this evil "return null":
|
||||
|
||||
// false positive/negatives due to quantization error exist for both rectangles and polygons
|
||||
if (compare(pointLat, rect.minLat) == 0
|
||||
|| compare(pointLat, rect.maxLat) == 0
|
||||
|
@ -88,18 +134,6 @@ public class TestLatLonPointQueries extends BaseGeoPointTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
// todo reconcile with GeoUtils (see LUCENE-6996)
|
||||
public static double compare(final double v1, final double v2) {
|
||||
final double delta = v1-v2;
|
||||
return Math.abs(delta) <= BKD_TOLERANCE ? 0 : delta;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean polyRectContainsPoint(GeoRect rect, double pointLat, double pointLon) {
|
||||
// TODO write better random polygon tests
|
||||
return rectContainsPoint(rect, pointLat, pointLon);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Boolean circleContainsPoint(double centerLat, double centerLon, double radiusMeters, double pointLat, double pointLon) {
|
||||
double distanceMeters = GeoDistanceUtils.haversin(centerLat, centerLon, pointLat, pointLon);
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
<?xml version="1.0"?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
<project name="spatial-extras" default="default">
|
||||
<description>
|
||||
Geospatial search
|
||||
</description>
|
||||
|
||||
<import file="../module-build.xml"/>
|
||||
|
||||
<path id="spatialjar">
|
||||
<fileset dir="lib"/>
|
||||
</path>
|
||||
|
||||
<path id="classpath">
|
||||
<path refid="base.classpath"/>
|
||||
<path refid="spatialjar"/>
|
||||
<pathelement path="${queries.jar}" />
|
||||
<pathelement path="${misc.jar}" />
|
||||
<pathelement path="${spatial3d.jar}" />
|
||||
</path>
|
||||
|
||||
<path id="test.classpath">
|
||||
<path refid="test.base.classpath" />
|
||||
<path refid="spatialjar"/>
|
||||
<pathelement path="src/test-files" />
|
||||
</path>
|
||||
|
||||
<target name="compile-core" depends="jar-queries,jar-misc,jar-spatial3d,common.compile-core" />
|
||||
|
||||
<target name="javadocs" depends="javadocs-queries,javadocs-misc,javadocs-spatial3d,compile-core,check-javadocs-uptodate"
|
||||
unless="javadocs-uptodate-${name}">
|
||||
<invoke-module-javadoc>
|
||||
<links>
|
||||
<link href="../queries"/>
|
||||
<link href="../misc"/>
|
||||
<link href="../spatial3d"/>
|
||||
</links>
|
||||
</invoke-module-javadoc>
|
||||
</target>
|
||||
</project>
|
|
@ -0,0 +1,36 @@
|
|||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one
|
||||
or more contributor license agreements. See the NOTICE file
|
||||
distributed with this work for additional information
|
||||
regarding copyright ownership. The ASF licenses this file
|
||||
to you under the Apache License, Version 2.0 (the
|
||||
"License"); you may not use this file except in compliance
|
||||
with the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing,
|
||||
software distributed under the License is distributed on an
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, either express or implied. See the License for the
|
||||
specific language governing permissions and limitations
|
||||
under the License.
|
||||
-->
|
||||
<ivy-module version="2.0" xmlns:maven="http://ant.apache.org/ivy/maven">
|
||||
<info organisation="org.apache.lucene" module="spatial-extras"/>
|
||||
<configurations defaultconfmapping="compile->master;test->master">
|
||||
<conf name="compile" transitive="false"/>
|
||||
<conf name="test" transitive="false"/>
|
||||
</configurations>
|
||||
<dependencies>
|
||||
<dependency org="com.spatial4j" name="spatial4j" rev="${/com.spatial4j/spatial4j}" conf="compile"/>
|
||||
|
||||
<dependency org="com.spatial4j" name="spatial4j" rev="${/com.spatial4j/spatial4j}" conf="test">
|
||||
<artifact name="spatial4j" type="test" ext="jar" maven:classifier="tests" />
|
||||
</dependency>
|
||||
|
||||
<dependency org="org.slf4j" name="slf4j-api" rev="${/org.slf4j/slf4j-api}" conf="test"/>
|
||||
|
||||
<exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/>
|
||||
</dependencies>
|
||||
</ivy-module>
|
|
@ -586,6 +586,3 @@ public class BBoxStrategy extends SpatialStrategy {
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -14,8 +14,8 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
|
||||
/**
|
||||
* Bounding Box Spatial Strategy
|
||||
* <p>
|
||||
* Index a shape extent using 4 numeric fields and a flag to say if it crosses the dateline
|
|
@ -30,7 +30,6 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.TwoPhaseIterator;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.util.Bits;
|
||||
|
||||
/**
|
||||
* A Query that considers an "indexQuery" to have approximate results, and a follow-on
|
|
@ -0,0 +1,26 @@
|
|||
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<!-- not a package-info.java, because we already defined this package in spatial/ -->
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
|
||||
</head>
|
||||
<body>
|
||||
Lucene advanced spatial search
|
||||
</body>
|
||||
</html>
|
|
@ -115,7 +115,7 @@ public class PrefixTreeFacetCounter {
|
|||
//AbstractVisitingPrefixTreeFilter is a Lucene Filter. We don't need a filter; we use it for its great prefix-tree
|
||||
// traversal code. TODO consider refactoring if/when it makes sense (more use cases than this)
|
||||
new AbstractVisitingPrefixTreeQuery(queryShape, strategy.getFieldName(), tree, facetLevel, scanLevel) {
|
||||
|
||||
|
||||
@Override
|
||||
public String toString(String field) {
|
||||
return "anonPrefixTreeQuery";//un-used
|
|
@ -190,7 +190,3 @@ public class RecursivePrefixTreeStrategy extends PrefixTreeStrategy {
|
|||
throw new UnsupportedSpatialOperation(op);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
|
@ -31,7 +31,6 @@ import org.apache.lucene.spatial.prefix.tree.Cell;
|
|||
import org.apache.lucene.spatial.prefix.tree.CellIterator;
|
||||
import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
|
||||
import org.apache.lucene.util.BitDocIdSet;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
|
||||
/**
|
||||
|
@ -89,7 +88,7 @@ public class WithinPrefixTreeQuery extends AbstractVisitingPrefixTreeQuery {
|
|||
result = 31 * result + (bufferedQueryShape != null ? bufferedQueryShape.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String toString(String field) {
|
||||
return getClass().getSimpleName() + "(" +
|
|
@ -14,8 +14,8 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
|
||||
/**
|
||||
* Prefix Tree Strategy.
|
||||
*/
|
||||
package org.apache.lucene.spatial.prefix;
|
|
@ -14,8 +14,8 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
|
||||
/**
|
||||
* This package is about SpatialPrefixTree and any supporting classes.
|
||||
* A SpatialPrefixTree supports spatial indexing by index-time tokens
|
||||
* where adding characters to a string gives greater resolution.
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue