mirror of
https://github.com/apache/lucene.git
synced 2025-03-03 23:09:36 +00:00
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/lucene-solr
This commit is contained in:
commit
e325973119
@ -14,6 +14,7 @@
|
||||
<orderEntry type="library" scope="TEST" name="JUnit" level="project" />
|
||||
<orderEntry type="module" scope="TEST" module-name="lucene-test-framework" />
|
||||
<orderEntry type="module" module-name="grouping" />
|
||||
<orderEntry type="module" module-name="backward-codecs" />
|
||||
<orderEntry type="module" module-name="lucene-core" />
|
||||
</component>
|
||||
</module>
|
||||
|
@ -17,5 +17,6 @@
|
||||
<orderEntry type="module" module-name="lucene-core" />
|
||||
<orderEntry type="module" module-name="queries" />
|
||||
<orderEntry type="module" module-name="sandbox" />
|
||||
<orderEntry type="module" module-name="backward-codecs" />
|
||||
</component>
|
||||
</module>
|
||||
|
@ -27,6 +27,7 @@
|
||||
<orderEntry type="module" module-name="queries" />
|
||||
<orderEntry type="module" module-name="misc" />
|
||||
<orderEntry type="module" module-name="spatial3d" />
|
||||
<orderEntry type="module" module-name="backward-codecs" />
|
||||
<orderEntry type="module" module-name="analysis-common" scope="TEST"/>
|
||||
</component>
|
||||
</module>
|
@ -20,6 +20,7 @@
|
||||
<orderEntry type="module" module-name="queries" />
|
||||
<orderEntry type="module" module-name="solr-core" />
|
||||
<orderEntry type="module" module-name="solrj" />
|
||||
<orderEntry type="module" module-name="backward-codecs" />
|
||||
</component>
|
||||
</module>
|
||||
|
||||
|
@ -31,5 +31,6 @@
|
||||
<orderEntry type="module" module-name="queryparser" />
|
||||
<orderEntry type="module" module-name="join" />
|
||||
<orderEntry type="module" module-name="sandbox" />
|
||||
<orderEntry type="module" module-name="backward-codecs" />
|
||||
</component>
|
||||
</module>
|
||||
|
@ -32,5 +32,6 @@
|
||||
<orderEntry type="module" scope="TEST" module-name="expressions" />
|
||||
<orderEntry type="module" scope="TEST" module-name="icu" />
|
||||
<orderEntry type="module" scope="TEST" module-name="analysis-extras" />
|
||||
<orderEntry type="module" scope="TEST" module-name="backward-codecs" />
|
||||
</component>
|
||||
</module>
|
||||
|
@ -217,7 +217,9 @@ def main():
|
||||
update_changes('lucene/CHANGES.txt', c.version)
|
||||
update_changes('solr/CHANGES.txt', c.version, get_solr_init_changes())
|
||||
|
||||
if current_version.is_back_compat_with(c.version):
|
||||
is_back_compat = current_version.major == c.version.major or current_version.is_back_compat_with(c.version)
|
||||
|
||||
if is_back_compat:
|
||||
add_constant(c.version, not c.is_latest_version)
|
||||
else:
|
||||
print('\nNot adding constant for version %s because it is no longer supported' % c.version)
|
||||
@ -232,7 +234,7 @@ def main():
|
||||
print('\nTODO: ')
|
||||
print(' - Move backcompat oldIndexes to unsupportedIndexes in TestBackwardsCompatibility')
|
||||
print(' - Update IndexFormatTooOldException throw cases')
|
||||
elif current_version.is_back_compat_with(c.version):
|
||||
elif is_back_compat:
|
||||
print('\nTesting changes')
|
||||
check_lucene_version_tests()
|
||||
check_solr_version_tests()
|
||||
|
@ -218,7 +218,7 @@ def check_cmdline_tools(): # Fail fast if there are cmdline tool problems
|
||||
if os.system('git --version >/dev/null 2>/dev/null'):
|
||||
raise RuntimeError('"git --version" returned a non-zero exit code.')
|
||||
antVersion = os.popen('ant -version').read().strip()
|
||||
if not antVersion.startswith('Apache Ant(TM) version 1.8'):
|
||||
if not antVersion.startswith('Apache Ant(TM) version 1.8') and not antVersion.startswith('Apache Ant(TM) version 1.9'):
|
||||
raise RuntimeError('ant version is not 1.8.X: "%s"' % antVersion)
|
||||
|
||||
def main():
|
||||
|
@ -30,6 +30,9 @@ Other
|
||||
|
||||
* LUCENE-7360: Remove Explanation.toHtml() (Alan Woodward)
|
||||
|
||||
======================= Lucene 6.3.0 =======================
|
||||
(No Changes)
|
||||
|
||||
======================= Lucene 6.2.0 =======================
|
||||
|
||||
API Changes
|
||||
@ -38,6 +41,9 @@ API Changes
|
||||
|
||||
New Features
|
||||
|
||||
* LUCENE-7388: Add point based IntRangeField, FloatRangeField, LongRangeField along with
|
||||
supporting queries and tests (Nick Knize)
|
||||
|
||||
* LUCENE-7381: Add point based DoubleRangeField and RangeFieldQuery for
|
||||
indexing and querying on Ranges up to 4 dimensions (Nick Knize)
|
||||
|
||||
@ -85,6 +91,12 @@ Bug Fixes
|
||||
* LUCENE-7391: Fix performance regression in MemoryIndex's fields() introduced
|
||||
in Lucene 6. (Steve Mason via David Smiley)
|
||||
|
||||
* SOLR-9413: Fix analysis/kuromoji's CSVUtil.quoteEscape logic, add TestCSVUtil test.
|
||||
(AppChecker, Christine Poerschke)
|
||||
|
||||
* LUCENE-7419: Fix performance bug with TokenStream.end(), where it would lookup
|
||||
PositionIncrementAttribute every time. (Mike McCandless, Robert Muir)
|
||||
|
||||
Improvements
|
||||
|
||||
* LUCENE-7323: Compound file writing now verifies the incoming
|
||||
@ -142,6 +154,13 @@ Improvements
|
||||
because the ICU word-breaking algorithm has some issues. This allows for the previous
|
||||
tokenization used before Lucene 5. (AM, Robert Muir)
|
||||
|
||||
* LUCENE-7409: Changed MMapDirectory's unmapping to work safer, but still with
|
||||
no guarantees. This uses a store-store barrier and yields the current thread
|
||||
before unmapping to allow in-flight requests to finish. The new code no longer
|
||||
uses WeakIdentityMap as it delegates all ByteBuffer reads throgh a new
|
||||
ByteBufferGuard wrapper that is shared between all ByteBufferIndexInput clones.
|
||||
(Robert Muir, Uwe Schindler)
|
||||
|
||||
Optimizations
|
||||
|
||||
* LUCENE-7330, LUCENE-7339: Speed up conjunction queries. (Adrien Grand)
|
||||
|
@ -101,7 +101,7 @@ public final class CSVUtil {
|
||||
String result = original;
|
||||
|
||||
if (result.indexOf('\"') >= 0) {
|
||||
result.replace("\"", ESCAPED_QUOTE);
|
||||
result = result.replace("\"", ESCAPED_QUOTE);
|
||||
}
|
||||
if(result.indexOf(COMMA) >= 0) {
|
||||
result = "\"" + result + "\"";
|
||||
|
@ -0,0 +1,52 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.analysis.ja;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.analysis.ja.util.CSVUtil;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
/*
|
||||
* Tests for the CSVUtil class.
|
||||
*/
|
||||
public class TestCSVUtil extends LuceneTestCase {
|
||||
|
||||
public void testQuoteEscapeQuotes() throws IOException {
|
||||
final String input = "\"Let It Be\" is a song and album by the The Beatles.";
|
||||
final String expectedOutput = input.replace("\"", "\"\"");
|
||||
implTestQuoteEscape(input, expectedOutput);
|
||||
}
|
||||
|
||||
public void testQuoteEscapeComma() throws IOException {
|
||||
final String input = "To be, or not to be ...";
|
||||
final String expectedOutput = '"'+input+'"';
|
||||
implTestQuoteEscape(input, expectedOutput);
|
||||
}
|
||||
|
||||
public void testQuoteEscapeQuotesAndComma() throws IOException {
|
||||
final String input = "\"To be, or not to be ...\" is a well-known phrase from Shakespeare's Hamlet.";
|
||||
final String expectedOutput = '"'+input.replace("\"", "\"\"")+'"';
|
||||
implTestQuoteEscape(input, expectedOutput);
|
||||
}
|
||||
|
||||
private void implTestQuoteEscape(String input, String expectedOutput) throws IOException {
|
||||
final String actualOutput = CSVUtil.quoteEscape(input);
|
||||
assertEquals(expectedOutput, actualOutput);
|
||||
}
|
||||
|
||||
}
|
@ -14,9 +14,11 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.document;
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.DoublePoint;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
|
||||
|
||||
@ -49,7 +51,7 @@ import org.apache.lucene.index.IndexOptions;
|
||||
* LegacyFloatField}.
|
||||
*
|
||||
* <p>To perform range querying or filtering against a
|
||||
* <code>LegacyDoubleField</code>, use {@link org.apache.lucene.search.LegacyNumericRangeQuery}.
|
||||
* <code>LegacyDoubleField</code>, use {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}.
|
||||
* To sort according to a
|
||||
* <code>LegacyDoubleField</code>, use the normal numeric sort types, eg
|
||||
* {@link org.apache.lucene.search.SortField.Type#DOUBLE}. <code>LegacyDoubleField</code>
|
||||
@ -79,11 +81,11 @@ import org.apache.lucene.index.IndexOptions;
|
||||
* but may result in faster range search performance. The
|
||||
* default value, 16, was selected for a reasonable tradeoff
|
||||
* of disk space consumption versus performance. You can
|
||||
* create a custom {@link FieldType} and invoke the {@link
|
||||
* FieldType#setNumericPrecisionStep} method if you'd
|
||||
* create a custom {@link LegacyFieldType} and invoke the {@link
|
||||
* LegacyFieldType#setNumericPrecisionStep} method if you'd
|
||||
* like to change the value. Note that you must also
|
||||
* specify a congruent value when creating {@link
|
||||
* org.apache.lucene.search.LegacyNumericRangeQuery}.
|
||||
* org.apache.lucene.legacy.LegacyNumericRangeQuery}.
|
||||
* For low cardinality fields larger precision steps are good.
|
||||
* If the cardinality is < 100, it is fair
|
||||
* to use {@link Integer#MAX_VALUE}, which produces one
|
||||
@ -91,9 +93,9 @@ import org.apache.lucene.index.IndexOptions;
|
||||
*
|
||||
* <p>For more information on the internals of numeric trie
|
||||
* indexing, including the <a
|
||||
* href="../search/LegacyNumericRangeQuery.html#precisionStepDesc"><code>precisionStep</code></a>
|
||||
* configuration, see {@link org.apache.lucene.search.LegacyNumericRangeQuery}. The format of
|
||||
* indexed values is described in {@link org.apache.lucene.util.LegacyNumericUtils}.
|
||||
* href="LegacyNumericRangeQuery.html#precisionStepDesc"><code>precisionStep</code></a>
|
||||
* configuration, see {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}. The format of
|
||||
* indexed values is described in {@link org.apache.lucene.legacy.LegacyNumericUtils}.
|
||||
*
|
||||
* <p>If you only need to sort by numeric value, and never
|
||||
* run range querying/filtering, you can index using a
|
||||
@ -101,7 +103,7 @@ import org.apache.lucene.index.IndexOptions;
|
||||
* This will minimize disk space consumed. </p>
|
||||
*
|
||||
* <p>More advanced users can instead use {@link
|
||||
* org.apache.lucene.analysis.LegacyNumericTokenStream} directly, when indexing numbers. This
|
||||
* org.apache.lucene.legacy.LegacyNumericTokenStream} directly, when indexing numbers. This
|
||||
* class is a wrapper around this token stream type for
|
||||
* easier, more intuitive usage.</p>
|
||||
*
|
||||
@ -111,18 +113,18 @@ import org.apache.lucene.index.IndexOptions;
|
||||
*/
|
||||
|
||||
@Deprecated
|
||||
public final class LegacyDoubleField extends Field {
|
||||
public final class LegacyDoubleField extends LegacyField {
|
||||
|
||||
/**
|
||||
* Type for a LegacyDoubleField that is not stored:
|
||||
* normalization factors, frequencies, and positions are omitted.
|
||||
*/
|
||||
public static final FieldType TYPE_NOT_STORED = new FieldType();
|
||||
public static final LegacyFieldType TYPE_NOT_STORED = new LegacyFieldType();
|
||||
static {
|
||||
TYPE_NOT_STORED.setTokenized(true);
|
||||
TYPE_NOT_STORED.setOmitNorms(true);
|
||||
TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS);
|
||||
TYPE_NOT_STORED.setNumericType(FieldType.LegacyNumericType.DOUBLE);
|
||||
TYPE_NOT_STORED.setNumericType(LegacyNumericType.DOUBLE);
|
||||
TYPE_NOT_STORED.freeze();
|
||||
}
|
||||
|
||||
@ -130,19 +132,19 @@ public final class LegacyDoubleField extends Field {
|
||||
* Type for a stored LegacyDoubleField:
|
||||
* normalization factors, frequencies, and positions are omitted.
|
||||
*/
|
||||
public static final FieldType TYPE_STORED = new FieldType();
|
||||
public static final LegacyFieldType TYPE_STORED = new LegacyFieldType();
|
||||
static {
|
||||
TYPE_STORED.setTokenized(true);
|
||||
TYPE_STORED.setOmitNorms(true);
|
||||
TYPE_STORED.setIndexOptions(IndexOptions.DOCS);
|
||||
TYPE_STORED.setNumericType(FieldType.LegacyNumericType.DOUBLE);
|
||||
TYPE_STORED.setNumericType(LegacyNumericType.DOUBLE);
|
||||
TYPE_STORED.setStored(true);
|
||||
TYPE_STORED.freeze();
|
||||
}
|
||||
|
||||
/** Creates a stored or un-stored LegacyDoubleField with the provided value
|
||||
* and default <code>precisionStep</code> {@link
|
||||
* org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16).
|
||||
* org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16).
|
||||
* @param name field name
|
||||
* @param value 64-bit double value
|
||||
* @param stored Store.YES if the content should also be stored
|
||||
@ -154,17 +156,17 @@ public final class LegacyDoubleField extends Field {
|
||||
}
|
||||
|
||||
/** Expert: allows you to customize the {@link
|
||||
* FieldType}.
|
||||
* LegacyFieldType}.
|
||||
* @param name field name
|
||||
* @param value 64-bit double value
|
||||
* @param type customized field type: must have {@link FieldType#numericType()}
|
||||
* of {@link org.apache.lucene.document.FieldType.LegacyNumericType#DOUBLE}.
|
||||
* @param type customized field type: must have {@link LegacyFieldType#numericType()}
|
||||
* of {@link LegacyNumericType#DOUBLE}.
|
||||
* @throws IllegalArgumentException if the field name or type is null, or
|
||||
* if the field type does not have a DOUBLE numericType()
|
||||
*/
|
||||
public LegacyDoubleField(String name, double value, FieldType type) {
|
||||
public LegacyDoubleField(String name, double value, LegacyFieldType type) {
|
||||
super(name, type);
|
||||
if (type.numericType() != FieldType.LegacyNumericType.DOUBLE) {
|
||||
if (type.numericType() != LegacyNumericType.DOUBLE) {
|
||||
throw new IllegalArgumentException("type.numericType() must be DOUBLE but got " + type.numericType());
|
||||
}
|
||||
fieldsData = Double.valueOf(value);
|
@ -0,0 +1,90 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
|
||||
/**
|
||||
* Field extension with support for legacy numerics
|
||||
* @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public class LegacyField extends Field {
|
||||
|
||||
/**
|
||||
* Expert: creates a field with no initial value.
|
||||
* Intended only for custom LegacyField subclasses.
|
||||
* @param name field name
|
||||
* @param type field type
|
||||
* @throws IllegalArgumentException if either the name or type
|
||||
* is null.
|
||||
*/
|
||||
public LegacyField(String name, LegacyFieldType type) {
|
||||
super(name, type);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenStream tokenStream(Analyzer analyzer, TokenStream reuse) {
|
||||
if (fieldType().indexOptions() == IndexOptions.NONE) {
|
||||
// Not indexed
|
||||
return null;
|
||||
}
|
||||
final LegacyFieldType fieldType = (LegacyFieldType) fieldType();
|
||||
final LegacyNumericType numericType = fieldType.numericType();
|
||||
if (numericType != null) {
|
||||
if (!(reuse instanceof LegacyNumericTokenStream && ((LegacyNumericTokenStream)reuse).getPrecisionStep() == fieldType.numericPrecisionStep())) {
|
||||
// lazy init the TokenStream as it is heavy to instantiate
|
||||
// (attributes,...) if not needed (stored field loading)
|
||||
reuse = new LegacyNumericTokenStream(fieldType.numericPrecisionStep());
|
||||
}
|
||||
final LegacyNumericTokenStream nts = (LegacyNumericTokenStream) reuse;
|
||||
// initialize value in TokenStream
|
||||
final Number val = (Number) fieldsData;
|
||||
switch (numericType) {
|
||||
case INT:
|
||||
nts.setIntValue(val.intValue());
|
||||
break;
|
||||
case LONG:
|
||||
nts.setLongValue(val.longValue());
|
||||
break;
|
||||
case FLOAT:
|
||||
nts.setFloatValue(val.floatValue());
|
||||
break;
|
||||
case DOUBLE:
|
||||
nts.setDoubleValue(val.doubleValue());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Should never get here");
|
||||
}
|
||||
return reuse;
|
||||
}
|
||||
return super.tokenStream(analyzer, reuse);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setTokenStream(TokenStream tokenStream) {
|
||||
final LegacyFieldType fieldType = (LegacyFieldType) fieldType();
|
||||
if (fieldType.numericType() != null) {
|
||||
throw new IllegalArgumentException("cannot set private TokenStream on numeric fields");
|
||||
}
|
||||
super.setTokenStream(tokenStream);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,149 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
|
||||
/**
|
||||
* FieldType extension with support for legacy numerics
|
||||
* @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public final class LegacyFieldType extends FieldType {
|
||||
private LegacyNumericType numericType;
|
||||
private int numericPrecisionStep = LegacyNumericUtils.PRECISION_STEP_DEFAULT;
|
||||
|
||||
/**
|
||||
* Create a new mutable LegacyFieldType with all of the properties from <code>ref</code>
|
||||
*/
|
||||
public LegacyFieldType(LegacyFieldType ref) {
|
||||
super(ref);
|
||||
this.numericType = ref.numericType;
|
||||
this.numericPrecisionStep = ref.numericPrecisionStep;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new FieldType with default properties.
|
||||
*/
|
||||
public LegacyFieldType() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the field's numeric type.
|
||||
* @param type numeric type, or null if the field has no numeric type.
|
||||
* @throws IllegalStateException if this FieldType is frozen against
|
||||
* future modifications.
|
||||
* @see #numericType()
|
||||
*
|
||||
* @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public void setNumericType(LegacyNumericType type) {
|
||||
checkIfFrozen();
|
||||
numericType = type;
|
||||
}
|
||||
|
||||
/**
|
||||
* LegacyNumericType: if non-null then the field's value will be indexed
|
||||
* numerically so that {@link org.apache.lucene.legacy.LegacyNumericRangeQuery} can be used at
|
||||
* search time.
|
||||
* <p>
|
||||
* The default is <code>null</code> (no numeric type)
|
||||
* @see #setNumericType(LegacyNumericType)
|
||||
*
|
||||
* @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public LegacyNumericType numericType() {
|
||||
return numericType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the numeric precision step for the field.
|
||||
* @param precisionStep numeric precision step for the field
|
||||
* @throws IllegalArgumentException if precisionStep is less than 1.
|
||||
* @throws IllegalStateException if this FieldType is frozen against
|
||||
* future modifications.
|
||||
* @see #numericPrecisionStep()
|
||||
*
|
||||
* @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public void setNumericPrecisionStep(int precisionStep) {
|
||||
checkIfFrozen();
|
||||
if (precisionStep < 1) {
|
||||
throw new IllegalArgumentException("precisionStep must be >= 1 (got " + precisionStep + ")");
|
||||
}
|
||||
this.numericPrecisionStep = precisionStep;
|
||||
}
|
||||
|
||||
/**
|
||||
* Precision step for numeric field.
|
||||
* <p>
|
||||
* This has no effect if {@link #numericType()} returns null.
|
||||
* <p>
|
||||
* The default is {@link org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT}
|
||||
* @see #setNumericPrecisionStep(int)
|
||||
*
|
||||
* @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public int numericPrecisionStep() {
|
||||
return numericPrecisionStep;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = super.hashCode();
|
||||
result = prime * result + numericPrecisionStep;
|
||||
result = prime * result + ((numericType == null) ? 0 : numericType.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (!super.equals(obj)) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) return false;
|
||||
LegacyFieldType other = (LegacyFieldType) obj;
|
||||
if (numericPrecisionStep != other.numericPrecisionStep) return false;
|
||||
if (numericType != other.numericType) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Prints a Field for human consumption. */
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder result = new StringBuilder();
|
||||
result.append(super.toString());
|
||||
if (indexOptions() != IndexOptions.NONE) {
|
||||
if (result.length() > 0) {
|
||||
result.append(",");
|
||||
}
|
||||
if (numericType != null) {
|
||||
result.append(",numericType=");
|
||||
result.append(numericType);
|
||||
result.append(",numericPrecisionStep=");
|
||||
result.append(numericPrecisionStep);
|
||||
}
|
||||
}
|
||||
return result.toString();
|
||||
}
|
||||
}
|
@ -14,11 +14,11 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.document;
|
||||
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.FloatPoint;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
@ -49,7 +49,7 @@ import org.apache.lucene.util.LegacyNumericUtils;
|
||||
* LegacyDoubleField}.
|
||||
*
|
||||
* <p>To perform range querying or filtering against a
|
||||
* <code>LegacyFloatField</code>, use {@link org.apache.lucene.search.LegacyNumericRangeQuery}.
|
||||
* <code>LegacyFloatField</code>, use {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}.
|
||||
* To sort according to a
|
||||
* <code>LegacyFloatField</code>, use the normal numeric sort types, eg
|
||||
* {@link org.apache.lucene.search.SortField.Type#FLOAT}. <code>LegacyFloatField</code>
|
||||
@ -79,11 +79,11 @@ import org.apache.lucene.util.LegacyNumericUtils;
|
||||
* but may result in faster range search performance. The
|
||||
* default value, 8, was selected for a reasonable tradeoff
|
||||
* of disk space consumption versus performance. You can
|
||||
* create a custom {@link FieldType} and invoke the {@link
|
||||
* FieldType#setNumericPrecisionStep} method if you'd
|
||||
* create a custom {@link LegacyFieldType} and invoke the {@link
|
||||
* LegacyFieldType#setNumericPrecisionStep} method if you'd
|
||||
* like to change the value. Note that you must also
|
||||
* specify a congruent value when creating {@link
|
||||
* org.apache.lucene.search.LegacyNumericRangeQuery}.
|
||||
* org.apache.lucene.legacy.LegacyNumericRangeQuery}.
|
||||
* For low cardinality fields larger precision steps are good.
|
||||
* If the cardinality is < 100, it is fair
|
||||
* to use {@link Integer#MAX_VALUE}, which produces one
|
||||
@ -91,9 +91,9 @@ import org.apache.lucene.util.LegacyNumericUtils;
|
||||
*
|
||||
* <p>For more information on the internals of numeric trie
|
||||
* indexing, including the <a
|
||||
* href="../search/LegacyNumericRangeQuery.html#precisionStepDesc"><code>precisionStep</code></a>
|
||||
* configuration, see {@link org.apache.lucene.search.LegacyNumericRangeQuery}. The format of
|
||||
* indexed values is described in {@link org.apache.lucene.util.LegacyNumericUtils}.
|
||||
* href="LegacyNumericRangeQuery.html#precisionStepDesc"><code>precisionStep</code></a>
|
||||
* configuration, see {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}. The format of
|
||||
* indexed values is described in {@link org.apache.lucene.legacy.LegacyNumericUtils}.
|
||||
*
|
||||
* <p>If you only need to sort by numeric value, and never
|
||||
* run range querying/filtering, you can index using a
|
||||
@ -101,7 +101,7 @@ import org.apache.lucene.util.LegacyNumericUtils;
|
||||
* This will minimize disk space consumed. </p>
|
||||
*
|
||||
* <p>More advanced users can instead use {@link
|
||||
* org.apache.lucene.analysis.LegacyNumericTokenStream} directly, when indexing numbers. This
|
||||
* org.apache.lucene.legacy.LegacyNumericTokenStream} directly, when indexing numbers. This
|
||||
* class is a wrapper around this token stream type for
|
||||
* easier, more intuitive usage.</p>
|
||||
*
|
||||
@ -111,18 +111,18 @@ import org.apache.lucene.util.LegacyNumericUtils;
|
||||
*/
|
||||
|
||||
@Deprecated
|
||||
public final class LegacyFloatField extends Field {
|
||||
public final class LegacyFloatField extends LegacyField {
|
||||
|
||||
/**
|
||||
* Type for a LegacyFloatField that is not stored:
|
||||
* normalization factors, frequencies, and positions are omitted.
|
||||
*/
|
||||
public static final FieldType TYPE_NOT_STORED = new FieldType();
|
||||
public static final LegacyFieldType TYPE_NOT_STORED = new LegacyFieldType();
|
||||
static {
|
||||
TYPE_NOT_STORED.setTokenized(true);
|
||||
TYPE_NOT_STORED.setOmitNorms(true);
|
||||
TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS);
|
||||
TYPE_NOT_STORED.setNumericType(FieldType.LegacyNumericType.FLOAT);
|
||||
TYPE_NOT_STORED.setNumericType(LegacyNumericType.FLOAT);
|
||||
TYPE_NOT_STORED.setNumericPrecisionStep(LegacyNumericUtils.PRECISION_STEP_DEFAULT_32);
|
||||
TYPE_NOT_STORED.freeze();
|
||||
}
|
||||
@ -131,12 +131,12 @@ public final class LegacyFloatField extends Field {
|
||||
* Type for a stored LegacyFloatField:
|
||||
* normalization factors, frequencies, and positions are omitted.
|
||||
*/
|
||||
public static final FieldType TYPE_STORED = new FieldType();
|
||||
public static final LegacyFieldType TYPE_STORED = new LegacyFieldType();
|
||||
static {
|
||||
TYPE_STORED.setTokenized(true);
|
||||
TYPE_STORED.setOmitNorms(true);
|
||||
TYPE_STORED.setIndexOptions(IndexOptions.DOCS);
|
||||
TYPE_STORED.setNumericType(FieldType.LegacyNumericType.FLOAT);
|
||||
TYPE_STORED.setNumericType(LegacyNumericType.FLOAT);
|
||||
TYPE_STORED.setNumericPrecisionStep(LegacyNumericUtils.PRECISION_STEP_DEFAULT_32);
|
||||
TYPE_STORED.setStored(true);
|
||||
TYPE_STORED.freeze();
|
||||
@ -144,7 +144,7 @@ public final class LegacyFloatField extends Field {
|
||||
|
||||
/** Creates a stored or un-stored LegacyFloatField with the provided value
|
||||
* and default <code>precisionStep</code> {@link
|
||||
* org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT_32} (8).
|
||||
* org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT_32} (8).
|
||||
* @param name field name
|
||||
* @param value 32-bit double value
|
||||
* @param stored Store.YES if the content should also be stored
|
||||
@ -156,17 +156,17 @@ public final class LegacyFloatField extends Field {
|
||||
}
|
||||
|
||||
/** Expert: allows you to customize the {@link
|
||||
* FieldType}.
|
||||
* LegacyFieldType}.
|
||||
* @param name field name
|
||||
* @param value 32-bit float value
|
||||
* @param type customized field type: must have {@link FieldType#numericType()}
|
||||
* of {@link org.apache.lucene.document.FieldType.LegacyNumericType#FLOAT}.
|
||||
* @param type customized field type: must have {@link LegacyFieldType#numericType()}
|
||||
* of {@link LegacyNumericType#FLOAT}.
|
||||
* @throws IllegalArgumentException if the field name or type is null, or
|
||||
* if the field type does not have a FLOAT numericType()
|
||||
*/
|
||||
public LegacyFloatField(String name, float value, FieldType type) {
|
||||
public LegacyFloatField(String name, float value, LegacyFieldType type) {
|
||||
super(name, type);
|
||||
if (type.numericType() != FieldType.LegacyNumericType.FLOAT) {
|
||||
if (type.numericType() != LegacyNumericType.FLOAT) {
|
||||
throw new IllegalArgumentException("type.numericType() must be FLOAT but got " + type.numericType());
|
||||
}
|
||||
fieldsData = Float.valueOf(value);
|
@ -14,11 +14,12 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.document;
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.IntPoint;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
@ -49,7 +50,7 @@ import org.apache.lucene.util.LegacyNumericUtils;
|
||||
* LegacyDoubleField}.
|
||||
*
|
||||
* <p>To perform range querying or filtering against a
|
||||
* <code>LegacyIntField</code>, use {@link org.apache.lucene.search.LegacyNumericRangeQuery}.
|
||||
* <code>LegacyIntField</code>, use {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}.
|
||||
* To sort according to a
|
||||
* <code>LegacyIntField</code>, use the normal numeric sort types, eg
|
||||
* {@link org.apache.lucene.search.SortField.Type#INT}. <code>LegacyIntField</code>
|
||||
@ -79,11 +80,11 @@ import org.apache.lucene.util.LegacyNumericUtils;
|
||||
* but may result in faster range search performance. The
|
||||
* default value, 8, was selected for a reasonable tradeoff
|
||||
* of disk space consumption versus performance. You can
|
||||
* create a custom {@link FieldType} and invoke the {@link
|
||||
* FieldType#setNumericPrecisionStep} method if you'd
|
||||
* create a custom {@link LegacyFieldType} and invoke the {@link
|
||||
* LegacyFieldType#setNumericPrecisionStep} method if you'd
|
||||
* like to change the value. Note that you must also
|
||||
* specify a congruent value when creating {@link
|
||||
* org.apache.lucene.search.LegacyNumericRangeQuery}.
|
||||
* org.apache.lucene.legacy.LegacyNumericRangeQuery}.
|
||||
* For low cardinality fields larger precision steps are good.
|
||||
* If the cardinality is < 100, it is fair
|
||||
* to use {@link Integer#MAX_VALUE}, which produces one
|
||||
@ -91,9 +92,9 @@ import org.apache.lucene.util.LegacyNumericUtils;
|
||||
*
|
||||
* <p>For more information on the internals of numeric trie
|
||||
* indexing, including the <a
|
||||
* href="../search/LegacyNumericRangeQuery.html#precisionStepDesc"><code>precisionStep</code></a>
|
||||
* configuration, see {@link org.apache.lucene.search.LegacyNumericRangeQuery}. The format of
|
||||
* indexed values is described in {@link org.apache.lucene.util.LegacyNumericUtils}.
|
||||
* href="LegacyNumericRangeQuery.html#precisionStepDesc"><code>precisionStep</code></a>
|
||||
* configuration, see {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}. The format of
|
||||
* indexed values is described in {@link org.apache.lucene.legacy.LegacyNumericUtils}.
|
||||
*
|
||||
* <p>If you only need to sort by numeric value, and never
|
||||
* run range querying/filtering, you can index using a
|
||||
@ -101,7 +102,7 @@ import org.apache.lucene.util.LegacyNumericUtils;
|
||||
* This will minimize disk space consumed. </p>
|
||||
*
|
||||
* <p>More advanced users can instead use {@link
|
||||
* org.apache.lucene.analysis.LegacyNumericTokenStream} directly, when indexing numbers. This
|
||||
* org.apache.lucene.legacy.LegacyNumericTokenStream} directly, when indexing numbers. This
|
||||
* class is a wrapper around this token stream type for
|
||||
* easier, more intuitive usage.</p>
|
||||
*
|
||||
@ -111,18 +112,18 @@ import org.apache.lucene.util.LegacyNumericUtils;
|
||||
*/
|
||||
|
||||
@Deprecated
|
||||
public final class LegacyIntField extends Field {
|
||||
public final class LegacyIntField extends LegacyField {
|
||||
|
||||
/**
|
||||
* Type for an LegacyIntField that is not stored:
|
||||
* normalization factors, frequencies, and positions are omitted.
|
||||
*/
|
||||
public static final FieldType TYPE_NOT_STORED = new FieldType();
|
||||
public static final LegacyFieldType TYPE_NOT_STORED = new LegacyFieldType();
|
||||
static {
|
||||
TYPE_NOT_STORED.setTokenized(true);
|
||||
TYPE_NOT_STORED.setOmitNorms(true);
|
||||
TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS);
|
||||
TYPE_NOT_STORED.setNumericType(FieldType.LegacyNumericType.INT);
|
||||
TYPE_NOT_STORED.setNumericType(LegacyNumericType.INT);
|
||||
TYPE_NOT_STORED.setNumericPrecisionStep(LegacyNumericUtils.PRECISION_STEP_DEFAULT_32);
|
||||
TYPE_NOT_STORED.freeze();
|
||||
}
|
||||
@ -131,12 +132,12 @@ public final class LegacyIntField extends Field {
|
||||
* Type for a stored LegacyIntField:
|
||||
* normalization factors, frequencies, and positions are omitted.
|
||||
*/
|
||||
public static final FieldType TYPE_STORED = new FieldType();
|
||||
public static final LegacyFieldType TYPE_STORED = new LegacyFieldType();
|
||||
static {
|
||||
TYPE_STORED.setTokenized(true);
|
||||
TYPE_STORED.setOmitNorms(true);
|
||||
TYPE_STORED.setIndexOptions(IndexOptions.DOCS);
|
||||
TYPE_STORED.setNumericType(FieldType.LegacyNumericType.INT);
|
||||
TYPE_STORED.setNumericType(LegacyNumericType.INT);
|
||||
TYPE_STORED.setNumericPrecisionStep(LegacyNumericUtils.PRECISION_STEP_DEFAULT_32);
|
||||
TYPE_STORED.setStored(true);
|
||||
TYPE_STORED.freeze();
|
||||
@ -144,7 +145,7 @@ public final class LegacyIntField extends Field {
|
||||
|
||||
/** Creates a stored or un-stored LegacyIntField with the provided value
|
||||
* and default <code>precisionStep</code> {@link
|
||||
* org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT_32} (8).
|
||||
* org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT_32} (8).
|
||||
* @param name field name
|
||||
* @param value 32-bit integer value
|
||||
* @param stored Store.YES if the content should also be stored
|
||||
@ -156,17 +157,17 @@ public final class LegacyIntField extends Field {
|
||||
}
|
||||
|
||||
/** Expert: allows you to customize the {@link
|
||||
* FieldType}.
|
||||
* LegacyFieldType}.
|
||||
* @param name field name
|
||||
* @param value 32-bit integer value
|
||||
* @param type customized field type: must have {@link FieldType#numericType()}
|
||||
* of {@link org.apache.lucene.document.FieldType.LegacyNumericType#INT}.
|
||||
* @param type customized field type: must have {@link LegacyFieldType#numericType()}
|
||||
* of {@link LegacyNumericType#INT}.
|
||||
* @throws IllegalArgumentException if the field name or type is null, or
|
||||
* if the field type does not have a INT numericType()
|
||||
*/
|
||||
public LegacyIntField(String name, int value, FieldType type) {
|
||||
public LegacyIntField(String name, int value, LegacyFieldType type) {
|
||||
super(name, type);
|
||||
if (type.numericType() != FieldType.LegacyNumericType.INT) {
|
||||
if (type.numericType() != LegacyNumericType.INT) {
|
||||
throw new IllegalArgumentException("type.numericType() must be INT but got " + type.numericType());
|
||||
}
|
||||
fieldsData = Integer.valueOf(value);
|
@ -14,9 +14,11 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.document;
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.LongPoint;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
|
||||
|
||||
@ -59,7 +61,7 @@ import org.apache.lucene.index.IndexOptions;
|
||||
* <code>long</code> value.
|
||||
*
|
||||
* <p>To perform range querying or filtering against a
|
||||
* <code>LegacyLongField</code>, use {@link org.apache.lucene.search.LegacyNumericRangeQuery}.
|
||||
* <code>LegacyLongField</code>, use {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}.
|
||||
* To sort according to a
|
||||
* <code>LegacyLongField</code>, use the normal numeric sort types, eg
|
||||
* {@link org.apache.lucene.search.SortField.Type#LONG}. <code>LegacyLongField</code>
|
||||
@ -89,11 +91,11 @@ import org.apache.lucene.index.IndexOptions;
|
||||
* but may result in faster range search performance. The
|
||||
* default value, 16, was selected for a reasonable tradeoff
|
||||
* of disk space consumption versus performance. You can
|
||||
* create a custom {@link FieldType} and invoke the {@link
|
||||
* FieldType#setNumericPrecisionStep} method if you'd
|
||||
* create a custom {@link LegacyFieldType} and invoke the {@link
|
||||
* LegacyFieldType#setNumericPrecisionStep} method if you'd
|
||||
* like to change the value. Note that you must also
|
||||
* specify a congruent value when creating {@link
|
||||
* org.apache.lucene.search.LegacyNumericRangeQuery}.
|
||||
* org.apache.lucene.legacy.LegacyNumericRangeQuery}.
|
||||
* For low cardinality fields larger precision steps are good.
|
||||
* If the cardinality is < 100, it is fair
|
||||
* to use {@link Integer#MAX_VALUE}, which produces one
|
||||
@ -101,9 +103,9 @@ import org.apache.lucene.index.IndexOptions;
|
||||
*
|
||||
* <p>For more information on the internals of numeric trie
|
||||
* indexing, including the <a
|
||||
* href="../search/LegacyNumericRangeQuery.html#precisionStepDesc"><code>precisionStep</code></a>
|
||||
* configuration, see {@link org.apache.lucene.search.LegacyNumericRangeQuery}. The format of
|
||||
* indexed values is described in {@link org.apache.lucene.util.LegacyNumericUtils}.
|
||||
* href="LegacyNumericRangeQuery.html#precisionStepDesc"><code>precisionStep</code></a>
|
||||
* configuration, see {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}. The format of
|
||||
* indexed values is described in {@link org.apache.lucene.legacy.LegacyNumericUtils}.
|
||||
*
|
||||
* <p>If you only need to sort by numeric value, and never
|
||||
* run range querying/filtering, you can index using a
|
||||
@ -111,7 +113,7 @@ import org.apache.lucene.index.IndexOptions;
|
||||
* This will minimize disk space consumed.
|
||||
*
|
||||
* <p>More advanced users can instead use {@link
|
||||
* org.apache.lucene.analysis.LegacyNumericTokenStream} directly, when indexing numbers. This
|
||||
* org.apache.lucene.legacy.LegacyNumericTokenStream} directly, when indexing numbers. This
|
||||
* class is a wrapper around this token stream type for
|
||||
* easier, more intuitive usage.</p>
|
||||
*
|
||||
@ -121,18 +123,18 @@ import org.apache.lucene.index.IndexOptions;
|
||||
*/
|
||||
|
||||
@Deprecated
|
||||
public final class LegacyLongField extends Field {
|
||||
public final class LegacyLongField extends LegacyField {
|
||||
|
||||
/**
|
||||
* Type for a LegacyLongField that is not stored:
|
||||
* normalization factors, frequencies, and positions are omitted.
|
||||
*/
|
||||
public static final FieldType TYPE_NOT_STORED = new FieldType();
|
||||
public static final LegacyFieldType TYPE_NOT_STORED = new LegacyFieldType();
|
||||
static {
|
||||
TYPE_NOT_STORED.setTokenized(true);
|
||||
TYPE_NOT_STORED.setOmitNorms(true);
|
||||
TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS);
|
||||
TYPE_NOT_STORED.setNumericType(FieldType.LegacyNumericType.LONG);
|
||||
TYPE_NOT_STORED.setNumericType(LegacyNumericType.LONG);
|
||||
TYPE_NOT_STORED.freeze();
|
||||
}
|
||||
|
||||
@ -140,19 +142,19 @@ public final class LegacyLongField extends Field {
|
||||
* Type for a stored LegacyLongField:
|
||||
* normalization factors, frequencies, and positions are omitted.
|
||||
*/
|
||||
public static final FieldType TYPE_STORED = new FieldType();
|
||||
public static final LegacyFieldType TYPE_STORED = new LegacyFieldType();
|
||||
static {
|
||||
TYPE_STORED.setTokenized(true);
|
||||
TYPE_STORED.setOmitNorms(true);
|
||||
TYPE_STORED.setIndexOptions(IndexOptions.DOCS);
|
||||
TYPE_STORED.setNumericType(FieldType.LegacyNumericType.LONG);
|
||||
TYPE_STORED.setNumericType(LegacyNumericType.LONG);
|
||||
TYPE_STORED.setStored(true);
|
||||
TYPE_STORED.freeze();
|
||||
}
|
||||
|
||||
/** Creates a stored or un-stored LegacyLongField with the provided value
|
||||
* and default <code>precisionStep</code> {@link
|
||||
* org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16).
|
||||
* org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16).
|
||||
* @param name field name
|
||||
* @param value 64-bit long value
|
||||
* @param stored Store.YES if the content should also be stored
|
||||
@ -164,17 +166,17 @@ public final class LegacyLongField extends Field {
|
||||
}
|
||||
|
||||
/** Expert: allows you to customize the {@link
|
||||
* FieldType}.
|
||||
* LegacyFieldType}.
|
||||
* @param name field name
|
||||
* @param value 64-bit long value
|
||||
* @param type customized field type: must have {@link FieldType#numericType()}
|
||||
* of {@link org.apache.lucene.document.FieldType.LegacyNumericType#LONG}.
|
||||
* @param type customized field type: must have {@link LegacyFieldType#numericType()}
|
||||
* of {@link LegacyNumericType#LONG}.
|
||||
* @throws IllegalArgumentException if the field name or type is null, or
|
||||
* if the field type does not have a LONG numericType()
|
||||
*/
|
||||
public LegacyLongField(String name, long value, FieldType type) {
|
||||
public LegacyLongField(String name, long value, LegacyFieldType type) {
|
||||
super(name, type);
|
||||
if (type.numericType() != FieldType.LegacyNumericType.LONG) {
|
||||
if (type.numericType() != LegacyNumericType.LONG) {
|
||||
throw new IllegalArgumentException("type.numericType() must be LONG but got " + type.numericType());
|
||||
}
|
||||
fieldsData = Long.valueOf(value);
|
@ -14,7 +14,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.search;
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
@ -22,8 +22,6 @@ import java.util.LinkedList;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.lucene.document.DoublePoint;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.FieldType.LegacyNumericType;
|
||||
import org.apache.lucene.document.FloatPoint;
|
||||
import org.apache.lucene.document.IntPoint;
|
||||
import org.apache.lucene.document.LongPoint;
|
||||
@ -31,18 +29,21 @@ import org.apache.lucene.index.FilteredTermsEnum;
|
||||
import org.apache.lucene.index.PointValues;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermRangeQuery;
|
||||
import org.apache.lucene.util.AttributeSource;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.index.Term; // for javadocs
|
||||
|
||||
/**
|
||||
* <p>A {@link Query} that matches numeric values within a
|
||||
* specified range. To use this, you must first index the
|
||||
* numeric values using {@link org.apache.lucene.document.LegacyIntField}, {@link
|
||||
* org.apache.lucene.document.LegacyFloatField}, {@link org.apache.lucene.document.LegacyLongField} or {@link org.apache.lucene.document.LegacyDoubleField} (expert: {@link
|
||||
* org.apache.lucene.analysis.LegacyNumericTokenStream}). If your terms are instead textual,
|
||||
* numeric values using {@link org.apache.lucene.legacy.LegacyIntField}, {@link
|
||||
* org.apache.lucene.legacy.LegacyFloatField}, {@link org.apache.lucene.legacy.LegacyLongField} or {@link org.apache.lucene.legacy.LegacyDoubleField} (expert: {@link
|
||||
* org.apache.lucene.legacy.LegacyNumericTokenStream}). If your terms are instead textual,
|
||||
* you should use {@link TermRangeQuery}.</p>
|
||||
*
|
||||
* <p>You create a new LegacyNumericRangeQuery with the static
|
||||
@ -96,7 +97,7 @@ import org.apache.lucene.index.Term; // for javadocs
|
||||
* (all numerical values like doubles, longs, floats, and ints are converted to
|
||||
* lexicographic sortable string representations and stored with different precisions
|
||||
* (for a more detailed description of how the values are stored,
|
||||
* see {@link org.apache.lucene.util.LegacyNumericUtils}). A range is then divided recursively into multiple intervals for searching:
|
||||
* see {@link org.apache.lucene.legacy.LegacyNumericUtils}). A range is then divided recursively into multiple intervals for searching:
|
||||
* The center of the range is searched only with the lowest possible precision in the <em>trie</em>,
|
||||
* while the boundaries are matched more exactly. This reduces the number of terms dramatically.</p>
|
||||
*
|
||||
@ -112,7 +113,7 @@ import org.apache.lucene.index.Term; // for javadocs
|
||||
* <h3><a name="precisionStepDesc">Precision Step</a></h3>
|
||||
* <p>You can choose any <code>precisionStep</code> when encoding values.
|
||||
* Lower step values mean more precisions and so more terms in index (and index gets larger). The number
|
||||
* of indexed terms per value is (those are generated by {@link org.apache.lucene.analysis.LegacyNumericTokenStream}):
|
||||
* of indexed terms per value is (those are generated by {@link org.apache.lucene.legacy.LegacyNumericTokenStream}):
|
||||
* <p style="font-family:serif">
|
||||
* indexedTermsPerValue = <b>ceil</b><big>(</big>bitsPerValue / precisionStep<big>)</big>
|
||||
* </p>
|
||||
@ -148,8 +149,8 @@ import org.apache.lucene.index.Term; // for javadocs
|
||||
* <li>Steps <b>≥64</b> for <em>long/double</em> and <b>≥32</b> for <em>int/float</em> produces one token
|
||||
* per value in the index and querying is as slow as a conventional {@link TermRangeQuery}. But it can be used
|
||||
* to produce fields, that are solely used for sorting (in this case simply use {@link Integer#MAX_VALUE} as
|
||||
* <code>precisionStep</code>). Using {@link org.apache.lucene.document.LegacyIntField},
|
||||
* {@link org.apache.lucene.document.LegacyLongField}, {@link org.apache.lucene.document.LegacyFloatField} or {@link org.apache.lucene.document.LegacyDoubleField} for sorting
|
||||
* <code>precisionStep</code>). Using {@link org.apache.lucene.legacy.LegacyIntField},
|
||||
* {@link org.apache.lucene.legacy.LegacyLongField}, {@link org.apache.lucene.legacy.LegacyFloatField} or {@link org.apache.lucene.legacy.LegacyDoubleField} for sorting
|
||||
* is ideal, because building the field cache is much faster than with text-only numbers.
|
||||
* These fields have one term per value and therefore also work with term enumeration for building distinct lists
|
||||
* (e.g. facets / preselected values to search for).
|
||||
@ -199,12 +200,12 @@ public final class LegacyNumericRangeQuery<T extends Number> extends MultiTermQu
|
||||
public static LegacyNumericRangeQuery<Long> newLongRange(final String field, final int precisionStep,
|
||||
Long min, Long max, final boolean minInclusive, final boolean maxInclusive
|
||||
) {
|
||||
return new LegacyNumericRangeQuery<>(field, precisionStep, FieldType.LegacyNumericType.LONG, min, max, minInclusive, maxInclusive);
|
||||
return new LegacyNumericRangeQuery<>(field, precisionStep, LegacyNumericType.LONG, min, max, minInclusive, maxInclusive);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory that creates a <code>LegacyNumericRangeQuery</code>, that queries a <code>long</code>
|
||||
* range using the default <code>precisionStep</code> {@link org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16).
|
||||
* range using the default <code>precisionStep</code> {@link org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16).
|
||||
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
|
||||
* by setting the min or max value to <code>null</code>. By setting inclusive to false, it will
|
||||
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
|
||||
@ -212,7 +213,7 @@ public final class LegacyNumericRangeQuery<T extends Number> extends MultiTermQu
|
||||
public static LegacyNumericRangeQuery<Long> newLongRange(final String field,
|
||||
Long min, Long max, final boolean minInclusive, final boolean maxInclusive
|
||||
) {
|
||||
return new LegacyNumericRangeQuery<>(field, LegacyNumericUtils.PRECISION_STEP_DEFAULT, FieldType.LegacyNumericType.LONG, min, max, minInclusive, maxInclusive);
|
||||
return new LegacyNumericRangeQuery<>(field, LegacyNumericUtils.PRECISION_STEP_DEFAULT, LegacyNumericType.LONG, min, max, minInclusive, maxInclusive);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -225,12 +226,12 @@ public final class LegacyNumericRangeQuery<T extends Number> extends MultiTermQu
|
||||
public static LegacyNumericRangeQuery<Integer> newIntRange(final String field, final int precisionStep,
|
||||
Integer min, Integer max, final boolean minInclusive, final boolean maxInclusive
|
||||
) {
|
||||
return new LegacyNumericRangeQuery<>(field, precisionStep, FieldType.LegacyNumericType.INT, min, max, minInclusive, maxInclusive);
|
||||
return new LegacyNumericRangeQuery<>(field, precisionStep, LegacyNumericType.INT, min, max, minInclusive, maxInclusive);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory that creates a <code>LegacyNumericRangeQuery</code>, that queries a <code>int</code>
|
||||
* range using the default <code>precisionStep</code> {@link org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT_32} (8).
|
||||
* range using the default <code>precisionStep</code> {@link org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT_32} (8).
|
||||
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
|
||||
* by setting the min or max value to <code>null</code>. By setting inclusive to false, it will
|
||||
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
|
||||
@ -238,7 +239,7 @@ public final class LegacyNumericRangeQuery<T extends Number> extends MultiTermQu
|
||||
public static LegacyNumericRangeQuery<Integer> newIntRange(final String field,
|
||||
Integer min, Integer max, final boolean minInclusive, final boolean maxInclusive
|
||||
) {
|
||||
return new LegacyNumericRangeQuery<>(field, LegacyNumericUtils.PRECISION_STEP_DEFAULT_32, FieldType.LegacyNumericType.INT, min, max, minInclusive, maxInclusive);
|
||||
return new LegacyNumericRangeQuery<>(field, LegacyNumericUtils.PRECISION_STEP_DEFAULT_32, LegacyNumericType.INT, min, max, minInclusive, maxInclusive);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -253,12 +254,12 @@ public final class LegacyNumericRangeQuery<T extends Number> extends MultiTermQu
|
||||
public static LegacyNumericRangeQuery<Double> newDoubleRange(final String field, final int precisionStep,
|
||||
Double min, Double max, final boolean minInclusive, final boolean maxInclusive
|
||||
) {
|
||||
return new LegacyNumericRangeQuery<>(field, precisionStep, FieldType.LegacyNumericType.DOUBLE, min, max, minInclusive, maxInclusive);
|
||||
return new LegacyNumericRangeQuery<>(field, precisionStep, LegacyNumericType.DOUBLE, min, max, minInclusive, maxInclusive);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory that creates a <code>LegacyNumericRangeQuery</code>, that queries a <code>double</code>
|
||||
* range using the default <code>precisionStep</code> {@link org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16).
|
||||
* range using the default <code>precisionStep</code> {@link org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16).
|
||||
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
|
||||
* by setting the min or max value to <code>null</code>.
|
||||
* {@link Double#NaN} will never match a half-open range, to hit {@code NaN} use a query
|
||||
@ -268,7 +269,7 @@ public final class LegacyNumericRangeQuery<T extends Number> extends MultiTermQu
|
||||
public static LegacyNumericRangeQuery<Double> newDoubleRange(final String field,
|
||||
Double min, Double max, final boolean minInclusive, final boolean maxInclusive
|
||||
) {
|
||||
return new LegacyNumericRangeQuery<>(field, LegacyNumericUtils.PRECISION_STEP_DEFAULT, FieldType.LegacyNumericType.DOUBLE, min, max, minInclusive, maxInclusive);
|
||||
return new LegacyNumericRangeQuery<>(field, LegacyNumericUtils.PRECISION_STEP_DEFAULT, LegacyNumericType.DOUBLE, min, max, minInclusive, maxInclusive);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -283,12 +284,12 @@ public final class LegacyNumericRangeQuery<T extends Number> extends MultiTermQu
|
||||
public static LegacyNumericRangeQuery<Float> newFloatRange(final String field, final int precisionStep,
|
||||
Float min, Float max, final boolean minInclusive, final boolean maxInclusive
|
||||
) {
|
||||
return new LegacyNumericRangeQuery<>(field, precisionStep, FieldType.LegacyNumericType.FLOAT, min, max, minInclusive, maxInclusive);
|
||||
return new LegacyNumericRangeQuery<>(field, precisionStep, LegacyNumericType.FLOAT, min, max, minInclusive, maxInclusive);
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory that creates a <code>LegacyNumericRangeQuery</code>, that queries a <code>float</code>
|
||||
* range using the default <code>precisionStep</code> {@link org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT_32} (8).
|
||||
* range using the default <code>precisionStep</code> {@link org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT_32} (8).
|
||||
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
|
||||
* by setting the min or max value to <code>null</code>.
|
||||
* {@link Float#NaN} will never match a half-open range, to hit {@code NaN} use a query
|
||||
@ -298,7 +299,7 @@ public final class LegacyNumericRangeQuery<T extends Number> extends MultiTermQu
|
||||
public static LegacyNumericRangeQuery<Float> newFloatRange(final String field,
|
||||
Float min, Float max, final boolean minInclusive, final boolean maxInclusive
|
||||
) {
|
||||
return new LegacyNumericRangeQuery<>(field, LegacyNumericUtils.PRECISION_STEP_DEFAULT_32, FieldType.LegacyNumericType.FLOAT, min, max, minInclusive, maxInclusive);
|
||||
return new LegacyNumericRangeQuery<>(field, LegacyNumericUtils.PRECISION_STEP_DEFAULT_32, LegacyNumericType.FLOAT, min, max, minInclusive, maxInclusive);
|
||||
}
|
||||
|
||||
@Override @SuppressWarnings("unchecked")
|
||||
@ -369,7 +370,7 @@ public final class LegacyNumericRangeQuery<T extends Number> extends MultiTermQu
|
||||
|
||||
// members (package private, to be also fast accessible by NumericRangeTermEnum)
|
||||
final int precisionStep;
|
||||
final FieldType.LegacyNumericType dataType;
|
||||
final LegacyNumericType dataType;
|
||||
final T min, max;
|
||||
final boolean minInclusive,maxInclusive;
|
||||
|
||||
@ -389,8 +390,8 @@ public final class LegacyNumericRangeQuery<T extends Number> extends MultiTermQu
|
||||
* <p>
|
||||
* WARNING: This term enumeration is not guaranteed to be always ordered by
|
||||
* {@link Term#compareTo}.
|
||||
* The ordering depends on how {@link org.apache.lucene.util.LegacyNumericUtils#splitLongRange} and
|
||||
* {@link org.apache.lucene.util.LegacyNumericUtils#splitIntRange} generates the sub-ranges. For
|
||||
* The ordering depends on how {@link org.apache.lucene.legacy.LegacyNumericUtils#splitLongRange} and
|
||||
* {@link org.apache.lucene.legacy.LegacyNumericUtils#splitIntRange} generates the sub-ranges. For
|
||||
* {@link MultiTermQuery} ordering is not relevant.
|
||||
*/
|
||||
private final class NumericRangeTermsEnum extends FilteredTermsEnum {
|
||||
@ -406,10 +407,10 @@ public final class LegacyNumericRangeQuery<T extends Number> extends MultiTermQu
|
||||
case DOUBLE: {
|
||||
// lower
|
||||
long minBound;
|
||||
if (dataType == FieldType.LegacyNumericType.LONG) {
|
||||
if (dataType == LegacyNumericType.LONG) {
|
||||
minBound = (min == null) ? Long.MIN_VALUE : min.longValue();
|
||||
} else {
|
||||
assert dataType == FieldType.LegacyNumericType.DOUBLE;
|
||||
assert dataType == LegacyNumericType.DOUBLE;
|
||||
minBound = (min == null) ? LONG_NEGATIVE_INFINITY
|
||||
: NumericUtils.doubleToSortableLong(min.doubleValue());
|
||||
}
|
||||
@ -420,10 +421,10 @@ public final class LegacyNumericRangeQuery<T extends Number> extends MultiTermQu
|
||||
|
||||
// upper
|
||||
long maxBound;
|
||||
if (dataType == FieldType.LegacyNumericType.LONG) {
|
||||
if (dataType == LegacyNumericType.LONG) {
|
||||
maxBound = (max == null) ? Long.MAX_VALUE : max.longValue();
|
||||
} else {
|
||||
assert dataType == FieldType.LegacyNumericType.DOUBLE;
|
||||
assert dataType == LegacyNumericType.DOUBLE;
|
||||
maxBound = (max == null) ? LONG_POSITIVE_INFINITY
|
||||
: NumericUtils.doubleToSortableLong(max.doubleValue());
|
||||
}
|
||||
@ -446,10 +447,10 @@ public final class LegacyNumericRangeQuery<T extends Number> extends MultiTermQu
|
||||
case FLOAT: {
|
||||
// lower
|
||||
int minBound;
|
||||
if (dataType == FieldType.LegacyNumericType.INT) {
|
||||
if (dataType == LegacyNumericType.INT) {
|
||||
minBound = (min == null) ? Integer.MIN_VALUE : min.intValue();
|
||||
} else {
|
||||
assert dataType == FieldType.LegacyNumericType.FLOAT;
|
||||
assert dataType == LegacyNumericType.FLOAT;
|
||||
minBound = (min == null) ? INT_NEGATIVE_INFINITY
|
||||
: NumericUtils.floatToSortableInt(min.floatValue());
|
||||
}
|
||||
@ -463,7 +464,7 @@ public final class LegacyNumericRangeQuery<T extends Number> extends MultiTermQu
|
||||
if (dataType == LegacyNumericType.INT) {
|
||||
maxBound = (max == null) ? Integer.MAX_VALUE : max.intValue();
|
||||
} else {
|
||||
assert dataType == FieldType.LegacyNumericType.FLOAT;
|
||||
assert dataType == LegacyNumericType.FLOAT;
|
||||
maxBound = (max == null) ? INT_POSITIVE_INFINITY
|
||||
: NumericUtils.floatToSortableInt(max.floatValue());
|
||||
}
|
@ -14,11 +14,12 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.analysis;
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
|
||||
@ -29,16 +30,15 @@ import org.apache.lucene.util.AttributeImpl;
|
||||
import org.apache.lucene.util.AttributeReflector;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
|
||||
/**
|
||||
* <b>Expert:</b> This class provides a {@link TokenStream}
|
||||
* for indexing numeric values that can be used by {@link
|
||||
* org.apache.lucene.search.LegacyNumericRangeQuery}.
|
||||
* org.apache.lucene.legacy.LegacyNumericRangeQuery}.
|
||||
*
|
||||
* <p>Note that for simple usage, {@link org.apache.lucene.document.LegacyIntField}, {@link
|
||||
* org.apache.lucene.document.LegacyLongField}, {@link org.apache.lucene.document.LegacyFloatField} or {@link org.apache.lucene.document.LegacyDoubleField} is
|
||||
* <p>Note that for simple usage, {@link org.apache.lucene.legacy.LegacyIntField}, {@link
|
||||
* org.apache.lucene.legacy.LegacyLongField}, {@link org.apache.lucene.legacy.LegacyFloatField} or {@link org.apache.lucene.legacy.LegacyDoubleField} is
|
||||
* recommended. These fields disable norms and
|
||||
* term freqs, as they are not usually needed during
|
||||
* searching. If you need to change these settings, you
|
||||
@ -81,9 +81,9 @@ import org.apache.lucene.util.NumericUtils;
|
||||
* than one numeric field, use a separate <code>LegacyNumericTokenStream</code>
|
||||
* instance for each.</p>
|
||||
*
|
||||
* <p>See {@link org.apache.lucene.search.LegacyNumericRangeQuery} for more details on the
|
||||
* <p>See {@link org.apache.lucene.legacy.LegacyNumericRangeQuery} for more details on the
|
||||
* <a
|
||||
* href="../search/LegacyNumericRangeQuery.html#precisionStepDesc"><code>precisionStep</code></a>
|
||||
* href="LegacyNumericRangeQuery.html#precisionStepDesc"><code>precisionStep</code></a>
|
||||
* parameter as well as how numeric fields work under the hood.</p>
|
||||
*
|
||||
* @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
|
||||
@ -140,7 +140,7 @@ public final class LegacyNumericTokenStream extends TokenStream {
|
||||
}
|
||||
}
|
||||
|
||||
/** Implementation of {@link org.apache.lucene.analysis.LegacyNumericTokenStream.LegacyNumericTermAttribute}.
|
||||
/** Implementation of {@link org.apache.lucene.legacy.LegacyNumericTokenStream.LegacyNumericTermAttribute}.
|
||||
* @lucene.internal
|
||||
* @since 4.0
|
||||
*/
|
||||
@ -240,7 +240,7 @@ public final class LegacyNumericTokenStream extends TokenStream {
|
||||
|
||||
/**
|
||||
* Creates a token stream for numeric values using the default <code>precisionStep</code>
|
||||
* {@link org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16). The stream is not yet initialized,
|
||||
* {@link org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16). The stream is not yet initialized,
|
||||
* before using set a value using the various set<em>???</em>Value() methods.
|
||||
*/
|
||||
public LegacyNumericTokenStream() {
|
@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
/** Data type of the numeric value
|
||||
* @since 3.2
|
||||
*
|
||||
* @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public enum LegacyNumericType {
|
||||
/** 32-bit integer numeric type */
|
||||
INT,
|
||||
/** 64-bit long numeric type */
|
||||
LONG,
|
||||
/** 32-bit float numeric type */
|
||||
FLOAT,
|
||||
/** 64-bit double numeric type */
|
||||
DOUBLE
|
||||
}
|
@ -14,7 +14,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.util;
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
@ -23,6 +23,8 @@ import org.apache.lucene.index.FilterLeafReader;
|
||||
import org.apache.lucene.index.FilteredTermsEnum;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
|
||||
/**
|
||||
* This is a helper class to generate prefix-encoded representations for numerical values
|
||||
@ -41,9 +43,9 @@ import org.apache.lucene.index.TermsEnum;
|
||||
* during encoding.
|
||||
*
|
||||
* <p>For easy usage, the trie algorithm is implemented for indexing inside
|
||||
* {@link org.apache.lucene.analysis.LegacyNumericTokenStream} that can index <code>int</code>, <code>long</code>,
|
||||
* {@link org.apache.lucene.legacy.LegacyNumericTokenStream} that can index <code>int</code>, <code>long</code>,
|
||||
* <code>float</code>, and <code>double</code>. For querying,
|
||||
* {@link org.apache.lucene.search.LegacyNumericRangeQuery} implements the query part
|
||||
* {@link org.apache.lucene.legacy.LegacyNumericRangeQuery} implements the query part
|
||||
* for the same data types.
|
||||
*
|
||||
* @lucene.internal
|
||||
@ -59,15 +61,15 @@ public final class LegacyNumericUtils {
|
||||
private LegacyNumericUtils() {} // no instance!
|
||||
|
||||
/**
|
||||
* The default precision step used by {@link org.apache.lucene.document.LegacyLongField},
|
||||
* {@link org.apache.lucene.document.LegacyDoubleField}, {@link org.apache.lucene.analysis.LegacyNumericTokenStream}, {@link
|
||||
* org.apache.lucene.search.LegacyNumericRangeQuery}.
|
||||
* The default precision step used by {@link org.apache.lucene.legacy.LegacyLongField},
|
||||
* {@link org.apache.lucene.legacy.LegacyDoubleField}, {@link org.apache.lucene.legacy.LegacyNumericTokenStream}, {@link
|
||||
* org.apache.lucene.legacy.LegacyNumericRangeQuery}.
|
||||
*/
|
||||
public static final int PRECISION_STEP_DEFAULT = 16;
|
||||
|
||||
/**
|
||||
* The default precision step used by {@link org.apache.lucene.document.LegacyIntField} and
|
||||
* {@link org.apache.lucene.document.LegacyFloatField}.
|
||||
* The default precision step used by {@link org.apache.lucene.legacy.LegacyIntField} and
|
||||
* {@link org.apache.lucene.legacy.LegacyFloatField}.
|
||||
*/
|
||||
public static final int PRECISION_STEP_DEFAULT_32 = 8;
|
||||
|
||||
@ -99,7 +101,7 @@ public final class LegacyNumericUtils {
|
||||
|
||||
/**
|
||||
* Returns prefix coded bits after reducing the precision by <code>shift</code> bits.
|
||||
* This is method is used by {@link org.apache.lucene.analysis.LegacyNumericTokenStream}.
|
||||
* This is method is used by {@link org.apache.lucene.legacy.LegacyNumericTokenStream}.
|
||||
* After encoding, {@code bytes.offset} will always be 0.
|
||||
* @param val the numeric value
|
||||
* @param shift how many bits to strip from the right
|
||||
@ -126,7 +128,7 @@ public final class LegacyNumericUtils {
|
||||
|
||||
/**
|
||||
* Returns prefix coded bits after reducing the precision by <code>shift</code> bits.
|
||||
* This is method is used by {@link org.apache.lucene.analysis.LegacyNumericTokenStream}.
|
||||
* This is method is used by {@link org.apache.lucene.legacy.LegacyNumericTokenStream}.
|
||||
* After encoding, {@code bytes.offset} will always be 0.
|
||||
* @param val the numeric value
|
||||
* @param shift how many bits to strip from the right
|
||||
@ -230,7 +232,7 @@ public final class LegacyNumericUtils {
|
||||
* {@link org.apache.lucene.search.BooleanQuery} for each call to its
|
||||
* {@link LongRangeBuilder#addRange(BytesRef,BytesRef)}
|
||||
* method.
|
||||
* <p>This method is used by {@link org.apache.lucene.search.LegacyNumericRangeQuery}.
|
||||
* <p>This method is used by {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}.
|
||||
*/
|
||||
public static void splitLongRange(final LongRangeBuilder builder,
|
||||
final int precisionStep, final long minBound, final long maxBound
|
||||
@ -244,7 +246,7 @@ public final class LegacyNumericUtils {
|
||||
* {@link org.apache.lucene.search.BooleanQuery} for each call to its
|
||||
* {@link IntRangeBuilder#addRange(BytesRef,BytesRef)}
|
||||
* method.
|
||||
* <p>This method is used by {@link org.apache.lucene.search.LegacyNumericRangeQuery}.
|
||||
* <p>This method is used by {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}.
|
||||
*/
|
||||
public static void splitIntRange(final IntRangeBuilder builder,
|
||||
final int precisionStep, final int minBound, final int maxBound
|
@ -0,0 +1,21 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Deprecated stuff!
|
||||
*/
|
||||
package org.apache.lucene.legacy;
|
@ -47,8 +47,6 @@ import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.FloatDocValuesField;
|
||||
import org.apache.lucene.document.FloatPoint;
|
||||
import org.apache.lucene.document.IntPoint;
|
||||
import org.apache.lucene.document.LegacyIntField;
|
||||
import org.apache.lucene.document.LegacyLongField;
|
||||
import org.apache.lucene.document.LongPoint;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.document.SortedDocValuesField;
|
||||
@ -57,9 +55,12 @@ import org.apache.lucene.document.SortedSetDocValuesField;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.legacy.LegacyIntField;
|
||||
import org.apache.lucene.legacy.LegacyLongField;
|
||||
import org.apache.lucene.legacy.LegacyNumericRangeQuery;
|
||||
import org.apache.lucene.legacy.LegacyNumericUtils;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.LegacyNumericRangeQuery;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.BaseDirectoryWrapper;
|
||||
@ -72,7 +73,6 @@ import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.InfoStream;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
import org.apache.lucene.util.LineFileDocs;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
@ -0,0 +1,196 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
import java.io.StringReader;
|
||||
|
||||
import org.apache.lucene.analysis.CannedTokenStream;
|
||||
import org.apache.lucene.analysis.Token;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
public class TestLegacyField extends LuceneTestCase {
|
||||
|
||||
public void testLegacyDoubleField() throws Exception {
|
||||
Field fields[] = new Field[] {
|
||||
new LegacyDoubleField("foo", 5d, Field.Store.NO),
|
||||
new LegacyDoubleField("foo", 5d, Field.Store.YES)
|
||||
};
|
||||
|
||||
for (Field field : fields) {
|
||||
trySetBoost(field);
|
||||
trySetByteValue(field);
|
||||
trySetBytesValue(field);
|
||||
trySetBytesRefValue(field);
|
||||
field.setDoubleValue(6d); // ok
|
||||
trySetIntValue(field);
|
||||
trySetFloatValue(field);
|
||||
trySetLongValue(field);
|
||||
trySetReaderValue(field);
|
||||
trySetShortValue(field);
|
||||
trySetStringValue(field);
|
||||
trySetTokenStreamValue(field);
|
||||
|
||||
assertEquals(6d, field.numericValue().doubleValue(), 0.0d);
|
||||
}
|
||||
}
|
||||
|
||||
public void testLegacyFloatField() throws Exception {
|
||||
Field fields[] = new Field[] {
|
||||
new LegacyFloatField("foo", 5f, Field.Store.NO),
|
||||
new LegacyFloatField("foo", 5f, Field.Store.YES)
|
||||
};
|
||||
|
||||
for (Field field : fields) {
|
||||
trySetBoost(field);
|
||||
trySetByteValue(field);
|
||||
trySetBytesValue(field);
|
||||
trySetBytesRefValue(field);
|
||||
trySetDoubleValue(field);
|
||||
trySetIntValue(field);
|
||||
field.setFloatValue(6f); // ok
|
||||
trySetLongValue(field);
|
||||
trySetReaderValue(field);
|
||||
trySetShortValue(field);
|
||||
trySetStringValue(field);
|
||||
trySetTokenStreamValue(field);
|
||||
|
||||
assertEquals(6f, field.numericValue().floatValue(), 0.0f);
|
||||
}
|
||||
}
|
||||
|
||||
public void testLegacyIntField() throws Exception {
|
||||
Field fields[] = new Field[] {
|
||||
new LegacyIntField("foo", 5, Field.Store.NO),
|
||||
new LegacyIntField("foo", 5, Field.Store.YES)
|
||||
};
|
||||
|
||||
for (Field field : fields) {
|
||||
trySetBoost(field);
|
||||
trySetByteValue(field);
|
||||
trySetBytesValue(field);
|
||||
trySetBytesRefValue(field);
|
||||
trySetDoubleValue(field);
|
||||
field.setIntValue(6); // ok
|
||||
trySetFloatValue(field);
|
||||
trySetLongValue(field);
|
||||
trySetReaderValue(field);
|
||||
trySetShortValue(field);
|
||||
trySetStringValue(field);
|
||||
trySetTokenStreamValue(field);
|
||||
|
||||
assertEquals(6, field.numericValue().intValue());
|
||||
}
|
||||
}
|
||||
|
||||
public void testLegacyLongField() throws Exception {
|
||||
Field fields[] = new Field[] {
|
||||
new LegacyLongField("foo", 5L, Field.Store.NO),
|
||||
new LegacyLongField("foo", 5L, Field.Store.YES)
|
||||
};
|
||||
|
||||
for (Field field : fields) {
|
||||
trySetBoost(field);
|
||||
trySetByteValue(field);
|
||||
trySetBytesValue(field);
|
||||
trySetBytesRefValue(field);
|
||||
trySetDoubleValue(field);
|
||||
trySetIntValue(field);
|
||||
trySetFloatValue(field);
|
||||
field.setLongValue(6); // ok
|
||||
trySetReaderValue(field);
|
||||
trySetShortValue(field);
|
||||
trySetStringValue(field);
|
||||
trySetTokenStreamValue(field);
|
||||
|
||||
assertEquals(6L, field.numericValue().longValue());
|
||||
}
|
||||
}
|
||||
|
||||
private void trySetByteValue(Field f) {
|
||||
expectThrows(IllegalArgumentException.class, () -> {
|
||||
f.setByteValue((byte) 10);
|
||||
});
|
||||
}
|
||||
|
||||
private void trySetBytesValue(Field f) {
|
||||
expectThrows(IllegalArgumentException.class, () -> {
|
||||
f.setBytesValue(new byte[] { 5, 5 });
|
||||
});
|
||||
}
|
||||
|
||||
private void trySetBytesRefValue(Field f) {
|
||||
expectThrows(IllegalArgumentException.class, () -> {
|
||||
f.setBytesValue(new BytesRef("bogus"));
|
||||
});
|
||||
}
|
||||
|
||||
private void trySetDoubleValue(Field f) {
|
||||
expectThrows(IllegalArgumentException.class, () -> {
|
||||
f.setDoubleValue(Double.MAX_VALUE);
|
||||
});
|
||||
}
|
||||
|
||||
private void trySetIntValue(Field f) {
|
||||
expectThrows(IllegalArgumentException.class, () -> {
|
||||
f.setIntValue(Integer.MAX_VALUE);
|
||||
});
|
||||
}
|
||||
|
||||
private void trySetLongValue(Field f) {
|
||||
expectThrows(IllegalArgumentException.class, () -> {
|
||||
f.setLongValue(Long.MAX_VALUE);
|
||||
});
|
||||
}
|
||||
|
||||
private void trySetFloatValue(Field f) {
|
||||
expectThrows(IllegalArgumentException.class, () -> {
|
||||
f.setFloatValue(Float.MAX_VALUE);
|
||||
});
|
||||
}
|
||||
|
||||
private void trySetReaderValue(Field f) {
|
||||
expectThrows(IllegalArgumentException.class, () -> {
|
||||
f.setReaderValue(new StringReader("BOO!"));
|
||||
});
|
||||
}
|
||||
|
||||
private void trySetShortValue(Field f) {
|
||||
expectThrows(IllegalArgumentException.class, () -> {
|
||||
f.setShortValue(Short.MAX_VALUE);
|
||||
});
|
||||
}
|
||||
|
||||
private void trySetStringValue(Field f) {
|
||||
expectThrows(IllegalArgumentException.class, () -> {
|
||||
f.setStringValue("BOO!");
|
||||
});
|
||||
}
|
||||
|
||||
private void trySetTokenStreamValue(Field f) {
|
||||
expectThrows(IllegalArgumentException.class, () -> {
|
||||
f.setTokenStream(new CannedTokenStream(new Token("foo", 0, 3)));
|
||||
});
|
||||
}
|
||||
|
||||
private void trySetBoost(Field f) {
|
||||
expectThrows(IllegalArgumentException.class, () -> {
|
||||
f.setBoost(5.0f);
|
||||
});
|
||||
}
|
||||
}
|
@ -0,0 +1,81 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
|
||||
import org.apache.lucene.analysis.CannedTokenStream;
|
||||
import org.apache.lucene.analysis.Token;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.legacy.LegacyIntField;
|
||||
import org.apache.lucene.legacy.LegacyNumericTokenStream;
|
||||
import org.apache.lucene.legacy.LegacyNumericUtils;
|
||||
import org.apache.lucene.legacy.LegacyNumericTokenStream.LegacyNumericTermAttribute;
|
||||
|
||||
/** test tokenstream reuse by DefaultIndexingChain */
|
||||
public class TestLegacyFieldReuse extends BaseTokenStreamTestCase {
|
||||
|
||||
public void testNumericReuse() throws IOException {
|
||||
LegacyIntField legacyIntField = new LegacyIntField("foo", 5, Field.Store.NO);
|
||||
|
||||
// passing null
|
||||
TokenStream ts = legacyIntField.tokenStream(null, null);
|
||||
assertTrue(ts instanceof LegacyNumericTokenStream);
|
||||
assertEquals(LegacyNumericUtils.PRECISION_STEP_DEFAULT_32, ((LegacyNumericTokenStream)ts).getPrecisionStep());
|
||||
assertNumericContents(5, ts);
|
||||
|
||||
// now reuse previous stream
|
||||
legacyIntField = new LegacyIntField("foo", 20, Field.Store.NO);
|
||||
TokenStream ts2 = legacyIntField.tokenStream(null, ts);
|
||||
assertSame(ts, ts2);
|
||||
assertNumericContents(20, ts);
|
||||
|
||||
// pass a bogus stream and ensure it's still ok
|
||||
legacyIntField = new LegacyIntField("foo", 2343, Field.Store.NO);
|
||||
TokenStream bogus = new CannedTokenStream(new Token("bogus", 0, 5));
|
||||
ts = legacyIntField.tokenStream(null, bogus);
|
||||
assertNotSame(bogus, ts);
|
||||
assertNumericContents(2343, ts);
|
||||
|
||||
// pass another bogus stream (numeric, but different precision step!)
|
||||
legacyIntField = new LegacyIntField("foo", 42, Field.Store.NO);
|
||||
assert 3 != LegacyNumericUtils.PRECISION_STEP_DEFAULT;
|
||||
bogus = new LegacyNumericTokenStream(3);
|
||||
ts = legacyIntField.tokenStream(null, bogus);
|
||||
assertNotSame(bogus, ts);
|
||||
assertNumericContents(42, ts);
|
||||
}
|
||||
|
||||
private void assertNumericContents(int value, TokenStream ts) throws IOException {
|
||||
assertTrue(ts instanceof LegacyNumericTokenStream);
|
||||
LegacyNumericTermAttribute numericAtt = ts.getAttribute(LegacyNumericTermAttribute.class);
|
||||
ts.reset();
|
||||
boolean seen = false;
|
||||
while (ts.incrementToken()) {
|
||||
if (numericAtt.getShift() == 0) {
|
||||
assertEquals(value, numericAtt.getRawValue());
|
||||
seen = true;
|
||||
}
|
||||
}
|
||||
ts.end();
|
||||
ts.close();
|
||||
assertTrue(seen);
|
||||
}
|
||||
}
|
@ -14,7 +14,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.util;
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
|
||||
import java.util.Arrays;
|
||||
@ -22,6 +22,13 @@ import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.legacy.LegacyNumericUtils;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
import org.apache.lucene.util.LongBitSet;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
|
||||
public class TestLegacyNumericUtils extends LuceneTestCase {
|
||||
|
||||
public void testLongConversionAndOrdering() throws Exception {
|
@ -0,0 +1,164 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.MultiFields;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.legacy.LegacyDoubleField;
|
||||
import org.apache.lucene.legacy.LegacyFloatField;
|
||||
import org.apache.lucene.legacy.LegacyIntField;
|
||||
import org.apache.lucene.legacy.LegacyLongField;
|
||||
import org.apache.lucene.legacy.LegacyNumericUtils;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
|
||||
public class TestLegacyTerms extends LuceneTestCase {
|
||||
|
||||
public void testEmptyIntFieldMinMax() throws Exception {
|
||||
assertNull(LegacyNumericUtils.getMinInt(EMPTY_TERMS));
|
||||
assertNull(LegacyNumericUtils.getMaxInt(EMPTY_TERMS));
|
||||
}
|
||||
|
||||
public void testIntFieldMinMax() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
|
||||
int numDocs = atLeast(100);
|
||||
int minValue = Integer.MAX_VALUE;
|
||||
int maxValue = Integer.MIN_VALUE;
|
||||
for(int i=0;i<numDocs;i++ ){
|
||||
Document doc = new Document();
|
||||
int num = random().nextInt();
|
||||
minValue = Math.min(num, minValue);
|
||||
maxValue = Math.max(num, maxValue);
|
||||
doc.add(new LegacyIntField("field", num, Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
}
|
||||
|
||||
IndexReader r = w.getReader();
|
||||
Terms terms = MultiFields.getTerms(r, "field");
|
||||
assertEquals(new Integer(minValue), LegacyNumericUtils.getMinInt(terms));
|
||||
assertEquals(new Integer(maxValue), LegacyNumericUtils.getMaxInt(terms));
|
||||
|
||||
r.close();
|
||||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testEmptyLongFieldMinMax() throws Exception {
|
||||
assertNull(LegacyNumericUtils.getMinLong(EMPTY_TERMS));
|
||||
assertNull(LegacyNumericUtils.getMaxLong(EMPTY_TERMS));
|
||||
}
|
||||
|
||||
public void testLongFieldMinMax() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
|
||||
int numDocs = atLeast(100);
|
||||
long minValue = Long.MAX_VALUE;
|
||||
long maxValue = Long.MIN_VALUE;
|
||||
for(int i=0;i<numDocs;i++ ){
|
||||
Document doc = new Document();
|
||||
long num = random().nextLong();
|
||||
minValue = Math.min(num, minValue);
|
||||
maxValue = Math.max(num, maxValue);
|
||||
doc.add(new LegacyLongField("field", num, Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
}
|
||||
|
||||
IndexReader r = w.getReader();
|
||||
|
||||
Terms terms = MultiFields.getTerms(r, "field");
|
||||
assertEquals(new Long(minValue), LegacyNumericUtils.getMinLong(terms));
|
||||
assertEquals(new Long(maxValue), LegacyNumericUtils.getMaxLong(terms));
|
||||
|
||||
r.close();
|
||||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testFloatFieldMinMax() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
|
||||
int numDocs = atLeast(100);
|
||||
float minValue = Float.POSITIVE_INFINITY;
|
||||
float maxValue = Float.NEGATIVE_INFINITY;
|
||||
for(int i=0;i<numDocs;i++ ){
|
||||
Document doc = new Document();
|
||||
float num = random().nextFloat();
|
||||
minValue = Math.min(num, minValue);
|
||||
maxValue = Math.max(num, maxValue);
|
||||
doc.add(new LegacyFloatField("field", num, Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
}
|
||||
|
||||
IndexReader r = w.getReader();
|
||||
Terms terms = MultiFields.getTerms(r, "field");
|
||||
assertEquals(minValue, NumericUtils.sortableIntToFloat(LegacyNumericUtils.getMinInt(terms)), 0.0f);
|
||||
assertEquals(maxValue, NumericUtils.sortableIntToFloat(LegacyNumericUtils.getMaxInt(terms)), 0.0f);
|
||||
|
||||
r.close();
|
||||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testDoubleFieldMinMax() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
|
||||
int numDocs = atLeast(100);
|
||||
double minValue = Double.POSITIVE_INFINITY;
|
||||
double maxValue = Double.NEGATIVE_INFINITY;
|
||||
for(int i=0;i<numDocs;i++ ){
|
||||
Document doc = new Document();
|
||||
double num = random().nextDouble();
|
||||
minValue = Math.min(num, minValue);
|
||||
maxValue = Math.max(num, maxValue);
|
||||
doc.add(new LegacyDoubleField("field", num, Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
}
|
||||
|
||||
IndexReader r = w.getReader();
|
||||
|
||||
Terms terms = MultiFields.getTerms(r, "field");
|
||||
|
||||
assertEquals(minValue, NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMinLong(terms)), 0.0);
|
||||
assertEquals(maxValue, NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMaxLong(terms)), 0.0);
|
||||
|
||||
r.close();
|
||||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* A complete empty Terms instance that has no terms in it and supports no optional statistics
|
||||
*/
|
||||
private static Terms EMPTY_TERMS = new Terms() {
|
||||
public TermsEnum iterator() { return TermsEnum.EMPTY; }
|
||||
public long size() { return -1; }
|
||||
public long getSumTotalTermFreq() { return -1; }
|
||||
public long getSumDocFreq() { return -1; }
|
||||
public int getDocCount() { return -1; }
|
||||
public boolean hasFreqs() { return false; }
|
||||
public boolean hasOffsets() { return false; }
|
||||
public boolean hasPositions() { return false; }
|
||||
public boolean hasPayloads() { return false; }
|
||||
};
|
||||
}
|
@ -14,7 +14,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.search;
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
|
||||
import java.util.Locale;
|
||||
@ -24,9 +24,13 @@ import java.text.DecimalFormatSymbols;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.LegacyIntField;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.legacy.LegacyIntField;
|
||||
import org.apache.lucene.legacy.LegacyNumericRangeQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.TermRangeQuery;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
@ -14,28 +14,26 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.search;
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.LegacyFloatField;
|
||||
import org.apache.lucene.document.LegacyIntField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.MultiFields;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryUtils;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.TestLegacyNumericUtils; // NaN arrays
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
@ -63,31 +61,31 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
|
||||
.setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000))
|
||||
.setMergePolicy(newLogMergePolicy()));
|
||||
|
||||
final FieldType storedInt = new FieldType(LegacyIntField.TYPE_NOT_STORED);
|
||||
final LegacyFieldType storedInt = new LegacyFieldType(LegacyIntField.TYPE_NOT_STORED);
|
||||
storedInt.setStored(true);
|
||||
storedInt.freeze();
|
||||
|
||||
final FieldType storedInt8 = new FieldType(storedInt);
|
||||
final LegacyFieldType storedInt8 = new LegacyFieldType(storedInt);
|
||||
storedInt8.setNumericPrecisionStep(8);
|
||||
|
||||
final FieldType storedInt4 = new FieldType(storedInt);
|
||||
final LegacyFieldType storedInt4 = new LegacyFieldType(storedInt);
|
||||
storedInt4.setNumericPrecisionStep(4);
|
||||
|
||||
final FieldType storedInt2 = new FieldType(storedInt);
|
||||
final LegacyFieldType storedInt2 = new LegacyFieldType(storedInt);
|
||||
storedInt2.setNumericPrecisionStep(2);
|
||||
|
||||
final FieldType storedIntNone = new FieldType(storedInt);
|
||||
final LegacyFieldType storedIntNone = new LegacyFieldType(storedInt);
|
||||
storedIntNone.setNumericPrecisionStep(Integer.MAX_VALUE);
|
||||
|
||||
final FieldType unstoredInt = LegacyIntField.TYPE_NOT_STORED;
|
||||
final LegacyFieldType unstoredInt = LegacyIntField.TYPE_NOT_STORED;
|
||||
|
||||
final FieldType unstoredInt8 = new FieldType(unstoredInt);
|
||||
final LegacyFieldType unstoredInt8 = new LegacyFieldType(unstoredInt);
|
||||
unstoredInt8.setNumericPrecisionStep(8);
|
||||
|
||||
final FieldType unstoredInt4 = new FieldType(unstoredInt);
|
||||
final LegacyFieldType unstoredInt4 = new LegacyFieldType(unstoredInt);
|
||||
unstoredInt4.setNumericPrecisionStep(4);
|
||||
|
||||
final FieldType unstoredInt2 = new FieldType(unstoredInt);
|
||||
final LegacyFieldType unstoredInt2 = new LegacyFieldType(unstoredInt);
|
||||
unstoredInt2.setNumericPrecisionStep(2);
|
||||
|
||||
LegacyIntField
|
||||
@ -350,132 +348,6 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
|
||||
dir.close();
|
||||
}
|
||||
|
||||
private void testRandomTrieAndClassicRangeQuery(int precisionStep) throws Exception {
|
||||
String field="field"+precisionStep;
|
||||
int totalTermCountT=0,totalTermCountC=0,termCountT,termCountC;
|
||||
int num = TestUtil.nextInt(random(), 10, 20);
|
||||
for (int i = 0; i < num; i++) {
|
||||
int lower=(int)(random().nextDouble()*noDocs*distance)+startOffset;
|
||||
int upper=(int)(random().nextDouble()*noDocs*distance)+startOffset;
|
||||
if (lower>upper) {
|
||||
int a=lower; lower=upper; upper=a;
|
||||
}
|
||||
final BytesRef lowerBytes, upperBytes;
|
||||
BytesRefBuilder b = new BytesRefBuilder();
|
||||
LegacyNumericUtils.intToPrefixCoded(lower, 0, b);
|
||||
lowerBytes = b.toBytesRef();
|
||||
LegacyNumericUtils.intToPrefixCoded(upper, 0, b);
|
||||
upperBytes = b.toBytesRef();
|
||||
|
||||
// test inclusive range
|
||||
LegacyNumericRangeQuery<Integer> tq= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, true);
|
||||
TermRangeQuery cq=new TermRangeQuery(field, lowerBytes, upperBytes, true, true);
|
||||
TopDocs tTopDocs = searcher.search(tq, 1);
|
||||
TopDocs cTopDocs = searcher.search(cq, 1);
|
||||
assertEquals("Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", cTopDocs.totalHits, tTopDocs.totalHits );
|
||||
totalTermCountT += termCountT = countTerms(tq);
|
||||
totalTermCountC += termCountC = countTerms(cq);
|
||||
checkTermCounts(precisionStep, termCountT, termCountC);
|
||||
// test exclusive range
|
||||
tq= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, false, false);
|
||||
cq=new TermRangeQuery(field, lowerBytes, upperBytes, false, false);
|
||||
tTopDocs = searcher.search(tq, 1);
|
||||
cTopDocs = searcher.search(cq, 1);
|
||||
assertEquals("Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", cTopDocs.totalHits, tTopDocs.totalHits );
|
||||
totalTermCountT += termCountT = countTerms(tq);
|
||||
totalTermCountC += termCountC = countTerms(cq);
|
||||
checkTermCounts(precisionStep, termCountT, termCountC);
|
||||
// test left exclusive range
|
||||
tq= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, false, true);
|
||||
cq=new TermRangeQuery(field, lowerBytes, upperBytes, false, true);
|
||||
tTopDocs = searcher.search(tq, 1);
|
||||
cTopDocs = searcher.search(cq, 1);
|
||||
assertEquals("Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", cTopDocs.totalHits, tTopDocs.totalHits );
|
||||
totalTermCountT += termCountT = countTerms(tq);
|
||||
totalTermCountC += termCountC = countTerms(cq);
|
||||
checkTermCounts(precisionStep, termCountT, termCountC);
|
||||
// test right exclusive range
|
||||
tq= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, false);
|
||||
cq=new TermRangeQuery(field, lowerBytes, upperBytes, true, false);
|
||||
tTopDocs = searcher.search(tq, 1);
|
||||
cTopDocs = searcher.search(cq, 1);
|
||||
assertEquals("Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", cTopDocs.totalHits, tTopDocs.totalHits );
|
||||
totalTermCountT += termCountT = countTerms(tq);
|
||||
totalTermCountC += termCountC = countTerms(cq);
|
||||
checkTermCounts(precisionStep, termCountT, termCountC);
|
||||
}
|
||||
|
||||
checkTermCounts(precisionStep, totalTermCountT, totalTermCountC);
|
||||
if (VERBOSE && precisionStep != Integer.MAX_VALUE) {
|
||||
System.out.println("Average number of terms during random search on '" + field + "':");
|
||||
System.out.println(" Numeric query: " + (((double)totalTermCountT)/(num * 4)));
|
||||
System.out.println(" Classical query: " + (((double)totalTermCountC)/(num * 4)));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyEnums() throws Exception {
|
||||
int count=3000;
|
||||
int lower=(distance*3/2)+startOffset, upper=lower + count*distance + (distance/3);
|
||||
// test empty enum
|
||||
assert lower < upper;
|
||||
assertTrue(0 < countTerms(LegacyNumericRangeQuery.newIntRange("field4", 4, lower, upper, true, true)));
|
||||
assertEquals(0, countTerms(LegacyNumericRangeQuery.newIntRange("field4", 4, upper, lower, true, true)));
|
||||
// test empty enum outside of bounds
|
||||
lower = distance*noDocs+startOffset;
|
||||
upper = 2 * lower;
|
||||
assert lower < upper;
|
||||
assertEquals(0, countTerms(LegacyNumericRangeQuery.newIntRange("field4", 4, lower, upper, true, true)));
|
||||
}
|
||||
|
||||
private int countTerms(MultiTermQuery q) throws Exception {
|
||||
final Terms terms = MultiFields.getTerms(reader, q.getField());
|
||||
if (terms == null)
|
||||
return 0;
|
||||
final TermsEnum termEnum = q.getTermsEnum(terms);
|
||||
assertNotNull(termEnum);
|
||||
int count = 0;
|
||||
BytesRef cur, last = null;
|
||||
while ((cur = termEnum.next()) != null) {
|
||||
count++;
|
||||
if (last != null) {
|
||||
assertTrue(last.compareTo(cur) < 0);
|
||||
}
|
||||
last = BytesRef.deepCopyOf(cur);
|
||||
}
|
||||
// LUCENE-3314: the results after next() already returned null are undefined,
|
||||
// assertNull(termEnum.next());
|
||||
return count;
|
||||
}
|
||||
|
||||
private void checkTermCounts(int precisionStep, int termCountT, int termCountC) {
|
||||
if (precisionStep == Integer.MAX_VALUE) {
|
||||
assertEquals("Number of terms should be equal for unlimited precStep", termCountC, termCountT);
|
||||
} else {
|
||||
assertTrue("Number of terms for NRQ should be <= compared to classical TRQ", termCountT <= termCountC);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRandomTrieAndClassicRangeQuery_8bit() throws Exception {
|
||||
testRandomTrieAndClassicRangeQuery(8);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRandomTrieAndClassicRangeQuery_4bit() throws Exception {
|
||||
testRandomTrieAndClassicRangeQuery(4);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRandomTrieAndClassicRangeQuery_2bit() throws Exception {
|
||||
testRandomTrieAndClassicRangeQuery(2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRandomTrieAndClassicRangeQuery_NoTrie() throws Exception {
|
||||
testRandomTrieAndClassicRangeQuery(Integer.MAX_VALUE);
|
||||
}
|
||||
|
||||
private void testRangeSplit(int precisionStep) throws Exception {
|
||||
String field="ascfield"+precisionStep;
|
||||
// 10 random tests
|
@ -14,28 +14,26 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.search;
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.LegacyDoubleField;
|
||||
import org.apache.lucene.document.LegacyLongField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.MultiFields;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryUtils;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.TestLegacyNumericUtils;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
@ -63,37 +61,37 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
|
||||
.setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000))
|
||||
.setMergePolicy(newLogMergePolicy()));
|
||||
|
||||
final FieldType storedLong = new FieldType(LegacyLongField.TYPE_NOT_STORED);
|
||||
final LegacyFieldType storedLong = new LegacyFieldType(LegacyLongField.TYPE_NOT_STORED);
|
||||
storedLong.setStored(true);
|
||||
storedLong.freeze();
|
||||
|
||||
final FieldType storedLong8 = new FieldType(storedLong);
|
||||
final LegacyFieldType storedLong8 = new LegacyFieldType(storedLong);
|
||||
storedLong8.setNumericPrecisionStep(8);
|
||||
|
||||
final FieldType storedLong4 = new FieldType(storedLong);
|
||||
final LegacyFieldType storedLong4 = new LegacyFieldType(storedLong);
|
||||
storedLong4.setNumericPrecisionStep(4);
|
||||
|
||||
final FieldType storedLong6 = new FieldType(storedLong);
|
||||
final LegacyFieldType storedLong6 = new LegacyFieldType(storedLong);
|
||||
storedLong6.setNumericPrecisionStep(6);
|
||||
|
||||
final FieldType storedLong2 = new FieldType(storedLong);
|
||||
final LegacyFieldType storedLong2 = new LegacyFieldType(storedLong);
|
||||
storedLong2.setNumericPrecisionStep(2);
|
||||
|
||||
final FieldType storedLongNone = new FieldType(storedLong);
|
||||
final LegacyFieldType storedLongNone = new LegacyFieldType(storedLong);
|
||||
storedLongNone.setNumericPrecisionStep(Integer.MAX_VALUE);
|
||||
|
||||
final FieldType unstoredLong = LegacyLongField.TYPE_NOT_STORED;
|
||||
final LegacyFieldType unstoredLong = LegacyLongField.TYPE_NOT_STORED;
|
||||
|
||||
final FieldType unstoredLong8 = new FieldType(unstoredLong);
|
||||
final LegacyFieldType unstoredLong8 = new LegacyFieldType(unstoredLong);
|
||||
unstoredLong8.setNumericPrecisionStep(8);
|
||||
|
||||
final FieldType unstoredLong6 = new FieldType(unstoredLong);
|
||||
final LegacyFieldType unstoredLong6 = new LegacyFieldType(unstoredLong);
|
||||
unstoredLong6.setNumericPrecisionStep(6);
|
||||
|
||||
final FieldType unstoredLong4 = new FieldType(unstoredLong);
|
||||
final LegacyFieldType unstoredLong4 = new LegacyFieldType(unstoredLong);
|
||||
unstoredLong4.setNumericPrecisionStep(4);
|
||||
|
||||
final FieldType unstoredLong2 = new FieldType(unstoredLong);
|
||||
final LegacyFieldType unstoredLong2 = new LegacyFieldType(unstoredLong);
|
||||
unstoredLong2.setNumericPrecisionStep(2);
|
||||
|
||||
LegacyLongField
|
||||
@ -374,137 +372,6 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
|
||||
dir.close();
|
||||
}
|
||||
|
||||
private void testRandomTrieAndClassicRangeQuery(int precisionStep) throws Exception {
|
||||
String field="field"+precisionStep;
|
||||
int totalTermCountT=0,totalTermCountC=0,termCountT,termCountC;
|
||||
int num = TestUtil.nextInt(random(), 10, 20);
|
||||
for (int i = 0; i < num; i++) {
|
||||
long lower=(long)(random().nextDouble()*noDocs*distance)+startOffset;
|
||||
long upper=(long)(random().nextDouble()*noDocs*distance)+startOffset;
|
||||
if (lower>upper) {
|
||||
long a=lower; lower=upper; upper=a;
|
||||
}
|
||||
final BytesRef lowerBytes, upperBytes;
|
||||
BytesRefBuilder b = new BytesRefBuilder();
|
||||
LegacyNumericUtils.longToPrefixCoded(lower, 0, b);
|
||||
lowerBytes = b.toBytesRef();
|
||||
LegacyNumericUtils.longToPrefixCoded(upper, 0, b);
|
||||
upperBytes = b.toBytesRef();
|
||||
|
||||
// test inclusive range
|
||||
LegacyNumericRangeQuery<Long> tq= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, true);
|
||||
TermRangeQuery cq=new TermRangeQuery(field, lowerBytes, upperBytes, true, true);
|
||||
TopDocs tTopDocs = searcher.search(tq, 1);
|
||||
TopDocs cTopDocs = searcher.search(cq, 1);
|
||||
assertEquals("Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", cTopDocs.totalHits, tTopDocs.totalHits );
|
||||
totalTermCountT += termCountT = countTerms(tq);
|
||||
totalTermCountC += termCountC = countTerms(cq);
|
||||
checkTermCounts(precisionStep, termCountT, termCountC);
|
||||
// test exclusive range
|
||||
tq= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, false, false);
|
||||
cq=new TermRangeQuery(field, lowerBytes, upperBytes, false, false);
|
||||
tTopDocs = searcher.search(tq, 1);
|
||||
cTopDocs = searcher.search(cq, 1);
|
||||
assertEquals("Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", cTopDocs.totalHits, tTopDocs.totalHits );
|
||||
totalTermCountT += termCountT = countTerms(tq);
|
||||
totalTermCountC += termCountC = countTerms(cq);
|
||||
checkTermCounts(precisionStep, termCountT, termCountC);
|
||||
// test left exclusive range
|
||||
tq= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, false, true);
|
||||
cq=new TermRangeQuery(field, lowerBytes, upperBytes, false, true);
|
||||
tTopDocs = searcher.search(tq, 1);
|
||||
cTopDocs = searcher.search(cq, 1);
|
||||
assertEquals("Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", cTopDocs.totalHits, tTopDocs.totalHits );
|
||||
totalTermCountT += termCountT = countTerms(tq);
|
||||
totalTermCountC += termCountC = countTerms(cq);
|
||||
checkTermCounts(precisionStep, termCountT, termCountC);
|
||||
// test right exclusive range
|
||||
tq= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, false);
|
||||
cq=new TermRangeQuery(field, lowerBytes, upperBytes, true, false);
|
||||
tTopDocs = searcher.search(tq, 1);
|
||||
cTopDocs = searcher.search(cq, 1);
|
||||
assertEquals("Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", cTopDocs.totalHits, tTopDocs.totalHits );
|
||||
totalTermCountT += termCountT = countTerms(tq);
|
||||
totalTermCountC += termCountC = countTerms(cq);
|
||||
checkTermCounts(precisionStep, termCountT, termCountC);
|
||||
}
|
||||
|
||||
checkTermCounts(precisionStep, totalTermCountT, totalTermCountC);
|
||||
if (VERBOSE && precisionStep != Integer.MAX_VALUE) {
|
||||
System.out.println("Average number of terms during random search on '" + field + "':");
|
||||
System.out.println(" Numeric query: " + (((double)totalTermCountT)/(num * 4)));
|
||||
System.out.println(" Classical query: " + (((double)totalTermCountC)/(num * 4)));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyEnums() throws Exception {
|
||||
int count=3000;
|
||||
long lower=(distance*3/2)+startOffset, upper=lower + count*distance + (distance/3);
|
||||
// test empty enum
|
||||
assert lower < upper;
|
||||
assertTrue(0 < countTerms(LegacyNumericRangeQuery.newLongRange("field4", 4, lower, upper, true, true)));
|
||||
assertEquals(0, countTerms(LegacyNumericRangeQuery.newLongRange("field4", 4, upper, lower, true, true)));
|
||||
// test empty enum outside of bounds
|
||||
lower = distance*noDocs+startOffset;
|
||||
upper = 2L * lower;
|
||||
assert lower < upper;
|
||||
assertEquals(0, countTerms(LegacyNumericRangeQuery.newLongRange("field4", 4, lower, upper, true, true)));
|
||||
}
|
||||
|
||||
private int countTerms(MultiTermQuery q) throws Exception {
|
||||
final Terms terms = MultiFields.getTerms(reader, q.getField());
|
||||
if (terms == null)
|
||||
return 0;
|
||||
final TermsEnum termEnum = q.getTermsEnum(terms);
|
||||
assertNotNull(termEnum);
|
||||
int count = 0;
|
||||
BytesRef cur, last = null;
|
||||
while ((cur = termEnum.next()) != null) {
|
||||
count++;
|
||||
if (last != null) {
|
||||
assertTrue(last.compareTo(cur) < 0);
|
||||
}
|
||||
last = BytesRef.deepCopyOf(cur);
|
||||
}
|
||||
// LUCENE-3314: the results after next() already returned null are undefined,
|
||||
// assertNull(termEnum.next());
|
||||
return count;
|
||||
}
|
||||
|
||||
private void checkTermCounts(int precisionStep, int termCountT, int termCountC) {
|
||||
if (precisionStep == Integer.MAX_VALUE) {
|
||||
assertEquals("Number of terms should be equal for unlimited precStep", termCountC, termCountT);
|
||||
} else {
|
||||
assertTrue("Number of terms for NRQ should be <= compared to classical TRQ", termCountT <= termCountC);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRandomTrieAndClassicRangeQuery_8bit() throws Exception {
|
||||
testRandomTrieAndClassicRangeQuery(8);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRandomTrieAndClassicRangeQuery_6bit() throws Exception {
|
||||
testRandomTrieAndClassicRangeQuery(6);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRandomTrieAndClassicRangeQuery_4bit() throws Exception {
|
||||
testRandomTrieAndClassicRangeQuery(4);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRandomTrieAndClassicRangeQuery_2bit() throws Exception {
|
||||
testRandomTrieAndClassicRangeQuery(2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRandomTrieAndClassicRangeQuery_NoTrie() throws Exception {
|
||||
testRandomTrieAndClassicRangeQuery(Integer.MAX_VALUE);
|
||||
}
|
||||
|
||||
private void testRangeSplit(int precisionStep) throws Exception {
|
||||
String field="ascfield"+precisionStep;
|
||||
// 10 random tests
|
@ -14,15 +14,17 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.analysis;
|
||||
package org.apache.lucene.legacy;
|
||||
|
||||
|
||||
import org.apache.lucene.util.AttributeImpl;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
import org.apache.lucene.analysis.LegacyNumericTokenStream.LegacyNumericTermAttributeImpl;
|
||||
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.TestCharTermAttributeImpl;
|
||||
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
|
||||
import org.apache.lucene.legacy.LegacyNumericTokenStream;
|
||||
import org.apache.lucene.legacy.LegacyNumericUtils;
|
||||
import org.apache.lucene.legacy.LegacyNumericTokenStream.LegacyNumericTermAttributeImpl;
|
||||
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttributeImpl;
|
||||
|
||||
@ -150,20 +152,37 @@ public class TestNumericTokenStream extends BaseTokenStreamTestCase {
|
||||
public void testAttributeClone() throws Exception {
|
||||
LegacyNumericTermAttributeImpl att = new LegacyNumericTermAttributeImpl();
|
||||
att.init(lvalue, 64, 8, 0); // set some value, to make getBytesRef() work
|
||||
LegacyNumericTermAttributeImpl copy = TestCharTermAttributeImpl.assertCloneIsEqual(att);
|
||||
LegacyNumericTermAttributeImpl copy = assertCloneIsEqual(att);
|
||||
assertNotSame(att.getBytesRef(), copy.getBytesRef());
|
||||
LegacyNumericTermAttributeImpl copy2 = TestCharTermAttributeImpl.assertCopyIsEqual(att);
|
||||
LegacyNumericTermAttributeImpl copy2 = assertCopyIsEqual(att);
|
||||
assertNotSame(att.getBytesRef(), copy2.getBytesRef());
|
||||
|
||||
// LUCENE-7027 test
|
||||
att.init(lvalue, 64, 8, 64); // Exhausted TokenStream -> should return empty BytesRef
|
||||
assertEquals(new BytesRef(), att.getBytesRef());
|
||||
copy = TestCharTermAttributeImpl.assertCloneIsEqual(att);
|
||||
copy = assertCloneIsEqual(att);
|
||||
assertEquals(new BytesRef(), copy.getBytesRef());
|
||||
assertNotSame(att.getBytesRef(), copy.getBytesRef());
|
||||
copy2 = TestCharTermAttributeImpl.assertCopyIsEqual(att);
|
||||
copy2 = assertCopyIsEqual(att);
|
||||
assertEquals(new BytesRef(), copy2.getBytesRef());
|
||||
assertNotSame(att.getBytesRef(), copy2.getBytesRef());
|
||||
}
|
||||
|
||||
public static <T extends AttributeImpl> T assertCloneIsEqual(T att) {
|
||||
@SuppressWarnings("unchecked")
|
||||
T clone = (T) att.clone();
|
||||
assertEquals("Clone must be equal", att, clone);
|
||||
assertEquals("Clone's hashcode must be equal", att.hashCode(), clone.hashCode());
|
||||
return clone;
|
||||
}
|
||||
|
||||
public static <T extends AttributeImpl> T assertCopyIsEqual(T att) throws Exception {
|
||||
@SuppressWarnings("unchecked")
|
||||
T copy = (T) att.getClass().newInstance();
|
||||
att.copyTo(copy);
|
||||
assertEquals("Copied instance must be equal", att, copy);
|
||||
assertEquals("Copied instance's hashcode must be equal", att.hashCode(), copy.hashCode());
|
||||
return copy;
|
||||
}
|
||||
|
||||
}
|
@ -22,7 +22,6 @@ import java.io.Closeable;
|
||||
import java.lang.reflect.Modifier;
|
||||
|
||||
import org.apache.lucene.analysis.tokenattributes.PackedTokenAttributeImpl;
|
||||
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
@ -176,11 +175,7 @@ public abstract class TokenStream extends AttributeSource implements Closeable {
|
||||
* @throws IOException If an I/O error occurs
|
||||
*/
|
||||
public void end() throws IOException {
|
||||
clearAttributes(); // LUCENE-3849: don't consume dirty atts
|
||||
PositionIncrementAttribute posIncAtt = getAttribute(PositionIncrementAttribute.class);
|
||||
if (posIncAtt != null) {
|
||||
posIncAtt.setPositionIncrement(0);
|
||||
}
|
||||
endAttributes(); // LUCENE-3849: don't consume dirty atts
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -138,6 +138,17 @@ public class PackedTokenAttributeImpl extends CharTermAttributeImpl
|
||||
startOffset = endOffset = 0;
|
||||
type = DEFAULT_TYPE;
|
||||
}
|
||||
|
||||
/** Resets the attributes at end
|
||||
*/
|
||||
@Override
|
||||
public void end() {
|
||||
super.end();
|
||||
positionIncrement = 0;
|
||||
positionLength = 1;
|
||||
startOffset = endOffset = 0;
|
||||
type = DEFAULT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PackedTokenAttributeImpl clone() {
|
||||
|
@ -46,6 +46,11 @@ public class PositionIncrementAttributeImpl extends AttributeImpl implements Pos
|
||||
this.positionIncrement = 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void end() {
|
||||
this.positionIncrement = 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == this) {
|
||||
|
@ -21,7 +21,6 @@ import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.LegacyNumericTokenStream;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.tokenattributes.BytesTermAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
@ -426,9 +425,6 @@ public class Field implements IndexableField {
|
||||
if (type.indexOptions() == IndexOptions.NONE || !type.tokenized()) {
|
||||
throw new IllegalArgumentException("TokenStream fields must be indexed and tokenized");
|
||||
}
|
||||
if (type.numericType() != null) {
|
||||
throw new IllegalArgumentException("cannot set private TokenStream on numeric fields");
|
||||
}
|
||||
this.tokenStream = tokenStream;
|
||||
}
|
||||
|
||||
@ -511,35 +507,6 @@ public class Field implements IndexableField {
|
||||
return null;
|
||||
}
|
||||
|
||||
final FieldType.LegacyNumericType numericType = fieldType().numericType();
|
||||
if (numericType != null) {
|
||||
if (!(reuse instanceof LegacyNumericTokenStream && ((LegacyNumericTokenStream)reuse).getPrecisionStep() == type.numericPrecisionStep())) {
|
||||
// lazy init the TokenStream as it is heavy to instantiate
|
||||
// (attributes,...) if not needed (stored field loading)
|
||||
reuse = new LegacyNumericTokenStream(type.numericPrecisionStep());
|
||||
}
|
||||
final LegacyNumericTokenStream nts = (LegacyNumericTokenStream) reuse;
|
||||
// initialize value in TokenStream
|
||||
final Number val = (Number) fieldsData;
|
||||
switch (numericType) {
|
||||
case INT:
|
||||
nts.setIntValue(val.intValue());
|
||||
break;
|
||||
case LONG:
|
||||
nts.setLongValue(val.longValue());
|
||||
break;
|
||||
case FLOAT:
|
||||
nts.setFloatValue(val.floatValue());
|
||||
break;
|
||||
case DOUBLE:
|
||||
nts.setDoubleValue(val.doubleValue());
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Should never get here");
|
||||
}
|
||||
return reuse;
|
||||
}
|
||||
|
||||
if (!fieldType().tokenized()) {
|
||||
if (stringValue() != null) {
|
||||
if (!(reuse instanceof StringTokenStream)) {
|
||||
|
@ -22,30 +22,12 @@ import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableFieldType;
|
||||
import org.apache.lucene.index.PointValues;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
|
||||
/**
|
||||
* Describes the properties of a field.
|
||||
*/
|
||||
public class FieldType implements IndexableFieldType {
|
||||
|
||||
/** Data type of the numeric value
|
||||
* @since 3.2
|
||||
*
|
||||
* @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public enum LegacyNumericType {
|
||||
/** 32-bit integer numeric type */
|
||||
INT,
|
||||
/** 64-bit long numeric type */
|
||||
LONG,
|
||||
/** 32-bit float numeric type */
|
||||
FLOAT,
|
||||
/** 64-bit double numeric type */
|
||||
DOUBLE
|
||||
}
|
||||
|
||||
private boolean stored;
|
||||
private boolean tokenized = true;
|
||||
private boolean storeTermVectors;
|
||||
@ -54,9 +36,7 @@ public class FieldType implements IndexableFieldType {
|
||||
private boolean storeTermVectorPayloads;
|
||||
private boolean omitNorms;
|
||||
private IndexOptions indexOptions = IndexOptions.NONE;
|
||||
private LegacyNumericType numericType;
|
||||
private boolean frozen;
|
||||
private int numericPrecisionStep = LegacyNumericUtils.PRECISION_STEP_DEFAULT;
|
||||
private DocValuesType docValuesType = DocValuesType.NONE;
|
||||
private int dimensionCount;
|
||||
private int dimensionNumBytes;
|
||||
@ -73,8 +53,6 @@ public class FieldType implements IndexableFieldType {
|
||||
this.storeTermVectorPayloads = ref.storeTermVectorPayloads();
|
||||
this.omitNorms = ref.omitNorms();
|
||||
this.indexOptions = ref.indexOptions();
|
||||
this.numericType = ref.numericType();
|
||||
this.numericPrecisionStep = ref.numericPrecisionStep();
|
||||
this.docValuesType = ref.docValuesType();
|
||||
this.dimensionCount = ref.dimensionCount;
|
||||
this.dimensionNumBytes = ref.dimensionNumBytes;
|
||||
@ -297,70 +275,6 @@ public class FieldType implements IndexableFieldType {
|
||||
this.indexOptions = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the field's numeric type.
|
||||
* @param type numeric type, or null if the field has no numeric type.
|
||||
* @throws IllegalStateException if this FieldType is frozen against
|
||||
* future modifications.
|
||||
* @see #numericType()
|
||||
*
|
||||
* @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public void setNumericType(LegacyNumericType type) {
|
||||
checkIfFrozen();
|
||||
numericType = type;
|
||||
}
|
||||
|
||||
/**
|
||||
* LegacyNumericType: if non-null then the field's value will be indexed
|
||||
* numerically so that {@link org.apache.lucene.search.LegacyNumericRangeQuery} can be used at
|
||||
* search time.
|
||||
* <p>
|
||||
* The default is <code>null</code> (no numeric type)
|
||||
* @see #setNumericType(org.apache.lucene.document.FieldType.LegacyNumericType)
|
||||
*
|
||||
* @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public LegacyNumericType numericType() {
|
||||
return numericType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the numeric precision step for the field.
|
||||
* @param precisionStep numeric precision step for the field
|
||||
* @throws IllegalArgumentException if precisionStep is less than 1.
|
||||
* @throws IllegalStateException if this FieldType is frozen against
|
||||
* future modifications.
|
||||
* @see #numericPrecisionStep()
|
||||
*
|
||||
* @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public void setNumericPrecisionStep(int precisionStep) {
|
||||
checkIfFrozen();
|
||||
if (precisionStep < 1) {
|
||||
throw new IllegalArgumentException("precisionStep must be >= 1 (got " + precisionStep + ")");
|
||||
}
|
||||
this.numericPrecisionStep = precisionStep;
|
||||
}
|
||||
|
||||
/**
|
||||
* Precision step for numeric field.
|
||||
* <p>
|
||||
* This has no effect if {@link #numericType()} returns null.
|
||||
* <p>
|
||||
* The default is {@link org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT}
|
||||
* @see #setNumericPrecisionStep(int)
|
||||
*
|
||||
* @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public int numericPrecisionStep() {
|
||||
return numericPrecisionStep;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enables points indexing.
|
||||
*/
|
||||
@ -403,7 +317,7 @@ public class FieldType implements IndexableFieldType {
|
||||
|
||||
/** Prints a Field for human consumption. */
|
||||
@Override
|
||||
public final String toString() {
|
||||
public String toString() {
|
||||
StringBuilder result = new StringBuilder();
|
||||
if (stored()) {
|
||||
result.append("stored");
|
||||
@ -434,12 +348,6 @@ public class FieldType implements IndexableFieldType {
|
||||
result.append(",indexOptions=");
|
||||
result.append(indexOptions);
|
||||
}
|
||||
if (numericType != null) {
|
||||
result.append(",numericType=");
|
||||
result.append(numericType);
|
||||
result.append(",numericPrecisionStep=");
|
||||
result.append(numericPrecisionStep);
|
||||
}
|
||||
}
|
||||
if (dimensionCount != 0) {
|
||||
if (result.length() > 0) {
|
||||
@ -495,8 +403,6 @@ public class FieldType implements IndexableFieldType {
|
||||
result = prime * result + dimensionNumBytes;
|
||||
result = prime * result + ((docValuesType == null) ? 0 : docValuesType.hashCode());
|
||||
result = prime * result + indexOptions.hashCode();
|
||||
result = prime * result + numericPrecisionStep;
|
||||
result = prime * result + ((numericType == null) ? 0 : numericType.hashCode());
|
||||
result = prime * result + (omitNorms ? 1231 : 1237);
|
||||
result = prime * result + (storeTermVectorOffsets ? 1231 : 1237);
|
||||
result = prime * result + (storeTermVectorPayloads ? 1231 : 1237);
|
||||
@ -517,8 +423,6 @@ public class FieldType implements IndexableFieldType {
|
||||
if (dimensionNumBytes != other.dimensionNumBytes) return false;
|
||||
if (docValuesType != other.docValuesType) return false;
|
||||
if (indexOptions != other.indexOptions) return false;
|
||||
if (numericPrecisionStep != other.numericPrecisionStep) return false;
|
||||
if (numericType != other.numericType) return false;
|
||||
if (omitNorms != other.omitNorms) return false;
|
||||
if (storeTermVectorOffsets != other.storeTermVectorOffsets) return false;
|
||||
if (storeTermVectorPayloads != other.storeTermVectorPayloads) return false;
|
||||
|
@ -186,4 +186,33 @@ public class Rectangle {
|
||||
|
||||
return new Rectangle(minLat, maxLat, minLon, maxLon);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
Rectangle rectangle = (Rectangle) o;
|
||||
|
||||
if (Double.compare(rectangle.minLat, minLat) != 0) return false;
|
||||
if (Double.compare(rectangle.minLon, minLon) != 0) return false;
|
||||
if (Double.compare(rectangle.maxLat, maxLat) != 0) return false;
|
||||
return Double.compare(rectangle.maxLon, maxLon) == 0;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result;
|
||||
long temp;
|
||||
temp = Double.doubleToLongBits(minLat);
|
||||
result = (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(minLon);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(maxLat);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.doubleToLongBits(maxLon);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
@ -405,6 +405,7 @@ public class LRUQueryCache implements QueryCache, Accountable {
|
||||
lock.lock();
|
||||
try {
|
||||
cache.clear();
|
||||
// Note that this also clears the uniqueQueries map since mostRecentlyUsedQueries is the uniqueQueries.keySet view:
|
||||
mostRecentlyUsedQueries.clear();
|
||||
onClear();
|
||||
} finally {
|
||||
|
@ -0,0 +1,136 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.store;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
/**
|
||||
* A guard that is created for every {@link ByteBufferIndexInput} that tries on best effort
|
||||
* to reject any access to the {@link ByteBuffer} behind, once it is unmapped. A single instance
|
||||
* of this is used for the original and all clones, so once the original is closed and unmapped
|
||||
* all clones also throw {@link AlreadyClosedException}, triggered by a {@link NullPointerException}.
|
||||
* <p>
|
||||
* This code tries to hopefully flush any CPU caches using a store-store barrier. It also yields the
|
||||
* current thread to give other threads a chance to finish in-flight requests...
|
||||
*/
|
||||
final class ByteBufferGuard {
|
||||
|
||||
/**
|
||||
* Pass in an implementation of this interface to cleanup ByteBuffers.
|
||||
* MMapDirectory implements this to allow unmapping of bytebuffers with private Java APIs.
|
||||
*/
|
||||
@FunctionalInterface
|
||||
static interface BufferCleaner {
|
||||
void freeBuffer(String resourceDescription, ByteBuffer b) throws IOException;
|
||||
}
|
||||
|
||||
private final String resourceDescription;
|
||||
private final BufferCleaner cleaner;
|
||||
|
||||
/** Not volatile; see comments on visibility below! */
|
||||
private boolean invalidated = false;
|
||||
|
||||
/** Used as a store-store barrier; see comments below! */
|
||||
private final AtomicInteger barrier = new AtomicInteger();
|
||||
|
||||
/**
|
||||
* Creates an instance to be used for a single {@link ByteBufferIndexInput} which
|
||||
* must be shared by all of its clones.
|
||||
*/
|
||||
public ByteBufferGuard(String resourceDescription, BufferCleaner cleaner) {
|
||||
this.resourceDescription = resourceDescription;
|
||||
this.cleaner = cleaner;
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidates this guard and unmaps (if supported).
|
||||
*/
|
||||
public void invalidateAndUnmap(ByteBuffer... bufs) throws IOException {
|
||||
if (cleaner != null) {
|
||||
invalidated = true;
|
||||
// This call should hopefully flush any CPU caches and as a result make
|
||||
// the "invalidated" field update visible to other threads. We specifically
|
||||
// don't make "invalidated" field volatile for performance reasons, hoping the
|
||||
// JVM won't optimize away reads of that field and hardware should ensure
|
||||
// caches are in sync after this call. This isn't entirely "fool-proof"
|
||||
// (see LUCENE-7409 discussion), but it has been shown to work in practice
|
||||
// and we count on this behavior.
|
||||
barrier.lazySet(0);
|
||||
// we give other threads a bit of time to finish reads on their ByteBuffer...:
|
||||
Thread.yield();
|
||||
// finally unmap the ByteBuffers:
|
||||
for (ByteBuffer b : bufs) {
|
||||
cleaner.freeBuffer(resourceDescription, b);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void ensureValid() {
|
||||
if (invalidated) {
|
||||
// this triggers an AlreadyClosedException in ByteBufferIndexInput:
|
||||
throw new NullPointerException();
|
||||
}
|
||||
}
|
||||
|
||||
public void getBytes(ByteBuffer receiver, byte[] dst, int offset, int length) {
|
||||
ensureValid();
|
||||
receiver.get(dst, offset, length);
|
||||
}
|
||||
|
||||
public byte getByte(ByteBuffer receiver) {
|
||||
ensureValid();
|
||||
return receiver.get();
|
||||
}
|
||||
|
||||
public short getShort(ByteBuffer receiver) {
|
||||
ensureValid();
|
||||
return receiver.getShort();
|
||||
}
|
||||
|
||||
public int getInt(ByteBuffer receiver) {
|
||||
ensureValid();
|
||||
return receiver.getInt();
|
||||
}
|
||||
|
||||
public long getLong(ByteBuffer receiver) {
|
||||
ensureValid();
|
||||
return receiver.getLong();
|
||||
}
|
||||
|
||||
public byte getByte(ByteBuffer receiver, int pos) {
|
||||
ensureValid();
|
||||
return receiver.get(pos);
|
||||
}
|
||||
|
||||
public short getShort(ByteBuffer receiver, int pos) {
|
||||
ensureValid();
|
||||
return receiver.getShort(pos);
|
||||
}
|
||||
|
||||
public int getInt(ByteBuffer receiver, int pos) {
|
||||
ensureValid();
|
||||
return receiver.getInt(pos);
|
||||
}
|
||||
|
||||
public long getLong(ByteBuffer receiver, int pos) {
|
||||
ensureValid();
|
||||
return receiver.getLong(pos);
|
||||
}
|
||||
|
||||
}
|
@ -21,9 +21,6 @@ import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
import java.nio.BufferUnderflowException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.apache.lucene.util.WeakIdentityMap;
|
||||
|
||||
/**
|
||||
* Base IndexInput implementation that uses an array
|
||||
@ -37,35 +34,32 @@ import org.apache.lucene.util.WeakIdentityMap;
|
||||
* are a power-of-two (<code>chunkSizePower</code>).
|
||||
*/
|
||||
abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessInput {
|
||||
protected final BufferCleaner cleaner;
|
||||
protected final long length;
|
||||
protected final long chunkSizeMask;
|
||||
protected final int chunkSizePower;
|
||||
protected final ByteBufferGuard guard;
|
||||
|
||||
protected ByteBuffer[] buffers;
|
||||
protected int curBufIndex = -1;
|
||||
protected ByteBuffer curBuf; // redundant for speed: buffers[curBufIndex]
|
||||
|
||||
protected boolean isClone = false;
|
||||
protected final WeakIdentityMap<ByteBufferIndexInput,Boolean> clones;
|
||||
|
||||
public static ByteBufferIndexInput newInstance(String resourceDescription, ByteBuffer[] buffers, long length, int chunkSizePower, BufferCleaner cleaner, boolean trackClones) {
|
||||
final WeakIdentityMap<ByteBufferIndexInput,Boolean> clones = trackClones ? WeakIdentityMap.<ByteBufferIndexInput,Boolean>newConcurrentHashMap() : null;
|
||||
public static ByteBufferIndexInput newInstance(String resourceDescription, ByteBuffer[] buffers, long length, int chunkSizePower, ByteBufferGuard guard) {
|
||||
if (buffers.length == 1) {
|
||||
return new SingleBufferImpl(resourceDescription, buffers[0], length, chunkSizePower, cleaner, clones);
|
||||
return new SingleBufferImpl(resourceDescription, buffers[0], length, chunkSizePower, guard);
|
||||
} else {
|
||||
return new MultiBufferImpl(resourceDescription, buffers, 0, length, chunkSizePower, cleaner, clones);
|
||||
return new MultiBufferImpl(resourceDescription, buffers, 0, length, chunkSizePower, guard);
|
||||
}
|
||||
}
|
||||
|
||||
ByteBufferIndexInput(String resourceDescription, ByteBuffer[] buffers, long length, int chunkSizePower, BufferCleaner cleaner, WeakIdentityMap<ByteBufferIndexInput,Boolean> clones) {
|
||||
ByteBufferIndexInput(String resourceDescription, ByteBuffer[] buffers, long length, int chunkSizePower, ByteBufferGuard guard) {
|
||||
super(resourceDescription);
|
||||
this.buffers = buffers;
|
||||
this.length = length;
|
||||
this.chunkSizePower = chunkSizePower;
|
||||
this.chunkSizeMask = (1L << chunkSizePower) - 1L;
|
||||
this.clones = clones;
|
||||
this.cleaner = cleaner;
|
||||
this.guard = guard;
|
||||
assert chunkSizePower >= 0 && chunkSizePower <= 30;
|
||||
assert (length >>> chunkSizePower) < Integer.MAX_VALUE;
|
||||
}
|
||||
@ -73,7 +67,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
@Override
|
||||
public final byte readByte() throws IOException {
|
||||
try {
|
||||
return curBuf.get();
|
||||
return guard.getByte(curBuf);
|
||||
} catch (BufferUnderflowException e) {
|
||||
do {
|
||||
curBufIndex++;
|
||||
@ -83,7 +77,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
curBuf = buffers[curBufIndex];
|
||||
curBuf.position(0);
|
||||
} while (!curBuf.hasRemaining());
|
||||
return curBuf.get();
|
||||
return guard.getByte(curBuf);
|
||||
} catch (NullPointerException npe) {
|
||||
throw new AlreadyClosedException("Already closed: " + this);
|
||||
}
|
||||
@ -92,11 +86,11 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
@Override
|
||||
public final void readBytes(byte[] b, int offset, int len) throws IOException {
|
||||
try {
|
||||
curBuf.get(b, offset, len);
|
||||
guard.getBytes(curBuf, b, offset, len);
|
||||
} catch (BufferUnderflowException e) {
|
||||
int curAvail = curBuf.remaining();
|
||||
while (len > curAvail) {
|
||||
curBuf.get(b, offset, curAvail);
|
||||
guard.getBytes(curBuf, b, offset, curAvail);
|
||||
len -= curAvail;
|
||||
offset += curAvail;
|
||||
curBufIndex++;
|
||||
@ -107,7 +101,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
curBuf.position(0);
|
||||
curAvail = curBuf.remaining();
|
||||
}
|
||||
curBuf.get(b, offset, len);
|
||||
guard.getBytes(curBuf, b, offset, len);
|
||||
} catch (NullPointerException npe) {
|
||||
throw new AlreadyClosedException("Already closed: " + this);
|
||||
}
|
||||
@ -116,7 +110,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
@Override
|
||||
public final short readShort() throws IOException {
|
||||
try {
|
||||
return curBuf.getShort();
|
||||
return guard.getShort(curBuf);
|
||||
} catch (BufferUnderflowException e) {
|
||||
return super.readShort();
|
||||
} catch (NullPointerException npe) {
|
||||
@ -127,7 +121,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
@Override
|
||||
public final int readInt() throws IOException {
|
||||
try {
|
||||
return curBuf.getInt();
|
||||
return guard.getInt(curBuf);
|
||||
} catch (BufferUnderflowException e) {
|
||||
return super.readInt();
|
||||
} catch (NullPointerException npe) {
|
||||
@ -138,7 +132,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
@Override
|
||||
public final long readLong() throws IOException {
|
||||
try {
|
||||
return curBuf.getLong();
|
||||
return guard.getLong(curBuf);
|
||||
} catch (BufferUnderflowException e) {
|
||||
return super.readLong();
|
||||
} catch (NullPointerException npe) {
|
||||
@ -181,7 +175,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
public byte readByte(long pos) throws IOException {
|
||||
try {
|
||||
final int bi = (int) (pos >> chunkSizePower);
|
||||
return buffers[bi].get((int) (pos & chunkSizeMask));
|
||||
return guard.getByte(buffers[bi], (int) (pos & chunkSizeMask));
|
||||
} catch (IndexOutOfBoundsException ioobe) {
|
||||
throw new EOFException("seek past EOF: " + this);
|
||||
} catch (NullPointerException npe) {
|
||||
@ -207,7 +201,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
public short readShort(long pos) throws IOException {
|
||||
final int bi = (int) (pos >> chunkSizePower);
|
||||
try {
|
||||
return buffers[bi].getShort((int) (pos & chunkSizeMask));
|
||||
return guard.getShort(buffers[bi], (int) (pos & chunkSizeMask));
|
||||
} catch (IndexOutOfBoundsException ioobe) {
|
||||
// either it's a boundary, or read past EOF, fall back:
|
||||
setPos(pos, bi);
|
||||
@ -221,7 +215,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
public int readInt(long pos) throws IOException {
|
||||
final int bi = (int) (pos >> chunkSizePower);
|
||||
try {
|
||||
return buffers[bi].getInt((int) (pos & chunkSizeMask));
|
||||
return guard.getInt(buffers[bi], (int) (pos & chunkSizeMask));
|
||||
} catch (IndexOutOfBoundsException ioobe) {
|
||||
// either it's a boundary, or read past EOF, fall back:
|
||||
setPos(pos, bi);
|
||||
@ -235,7 +229,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
public long readLong(long pos) throws IOException {
|
||||
final int bi = (int) (pos >> chunkSizePower);
|
||||
try {
|
||||
return buffers[bi].getLong((int) (pos & chunkSizeMask));
|
||||
return guard.getLong(buffers[bi], (int) (pos & chunkSizeMask));
|
||||
} catch (IndexOutOfBoundsException ioobe) {
|
||||
// either it's a boundary, or read past EOF, fall back:
|
||||
setPos(pos, bi);
|
||||
@ -285,11 +279,6 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
|
||||
final ByteBufferIndexInput clone = newCloneInstance(getFullSliceDescription(sliceDescription), newBuffers, ofs, length);
|
||||
clone.isClone = true;
|
||||
|
||||
// register the new clone in our clone list to clean it up on closing:
|
||||
if (clones != null) {
|
||||
this.clones.put(clone, Boolean.TRUE);
|
||||
}
|
||||
|
||||
return clone;
|
||||
}
|
||||
@ -299,9 +288,9 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
protected ByteBufferIndexInput newCloneInstance(String newResourceDescription, ByteBuffer[] newBuffers, int offset, long length) {
|
||||
if (newBuffers.length == 1) {
|
||||
newBuffers[0].position(offset);
|
||||
return new SingleBufferImpl(newResourceDescription, newBuffers[0].slice(), length, chunkSizePower, this.cleaner, this.clones);
|
||||
return new SingleBufferImpl(newResourceDescription, newBuffers[0].slice(), length, chunkSizePower, this.guard);
|
||||
} else {
|
||||
return new MultiBufferImpl(newResourceDescription, newBuffers, offset, length, chunkSizePower, cleaner, clones);
|
||||
return new MultiBufferImpl(newResourceDescription, newBuffers, offset, length, chunkSizePower, guard);
|
||||
}
|
||||
}
|
||||
|
||||
@ -335,25 +324,11 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
// make local copy, then un-set early
|
||||
final ByteBuffer[] bufs = buffers;
|
||||
unsetBuffers();
|
||||
if (clones != null) {
|
||||
clones.remove(this);
|
||||
}
|
||||
|
||||
if (isClone) return;
|
||||
|
||||
// for extra safety unset also all clones' buffers:
|
||||
if (clones != null) {
|
||||
for (Iterator<ByteBufferIndexInput> it = this.clones.keyIterator(); it.hasNext();) {
|
||||
final ByteBufferIndexInput clone = it.next();
|
||||
assert clone.isClone;
|
||||
clone.unsetBuffers();
|
||||
}
|
||||
this.clones.clear();
|
||||
}
|
||||
|
||||
for (final ByteBuffer b : bufs) {
|
||||
freeBuffer(b);
|
||||
}
|
||||
// tell the guard to invalidate and later unmap the bytebuffers (if supported):
|
||||
guard.invalidateAndUnmap(bufs);
|
||||
} finally {
|
||||
unsetBuffers();
|
||||
}
|
||||
@ -367,31 +342,12 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
curBuf = null;
|
||||
curBufIndex = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Called when the contents of a buffer will be no longer needed.
|
||||
*/
|
||||
private void freeBuffer(ByteBuffer b) throws IOException {
|
||||
if (cleaner != null) {
|
||||
cleaner.freeBuffer(this, b);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pass in an implementation of this interface to cleanup ByteBuffers.
|
||||
* MMapDirectory implements this to allow unmapping of bytebuffers with private Java APIs.
|
||||
*/
|
||||
@FunctionalInterface
|
||||
static interface BufferCleaner {
|
||||
void freeBuffer(ByteBufferIndexInput parent, ByteBuffer b) throws IOException;
|
||||
}
|
||||
|
||||
/** Optimization of ByteBufferIndexInput for when there is only one buffer */
|
||||
static final class SingleBufferImpl extends ByteBufferIndexInput {
|
||||
|
||||
SingleBufferImpl(String resourceDescription, ByteBuffer buffer, long length, int chunkSizePower,
|
||||
BufferCleaner cleaner, WeakIdentityMap<ByteBufferIndexInput,Boolean> clones) {
|
||||
super(resourceDescription, new ByteBuffer[] { buffer }, length, chunkSizePower, cleaner, clones);
|
||||
SingleBufferImpl(String resourceDescription, ByteBuffer buffer, long length, int chunkSizePower, ByteBufferGuard guard) {
|
||||
super(resourceDescription, new ByteBuffer[] { buffer }, length, chunkSizePower, guard);
|
||||
this.curBufIndex = 0;
|
||||
this.curBuf = buffer;
|
||||
buffer.position(0);
|
||||
@ -426,7 +382,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
@Override
|
||||
public byte readByte(long pos) throws IOException {
|
||||
try {
|
||||
return curBuf.get((int) pos);
|
||||
return guard.getByte(curBuf, (int) pos);
|
||||
} catch (IllegalArgumentException e) {
|
||||
if (pos < 0) {
|
||||
throw new IllegalArgumentException("Seeking to negative position: " + this, e);
|
||||
@ -441,7 +397,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
@Override
|
||||
public short readShort(long pos) throws IOException {
|
||||
try {
|
||||
return curBuf.getShort((int) pos);
|
||||
return guard.getShort(curBuf, (int) pos);
|
||||
} catch (IllegalArgumentException e) {
|
||||
if (pos < 0) {
|
||||
throw new IllegalArgumentException("Seeking to negative position: " + this, e);
|
||||
@ -456,7 +412,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
@Override
|
||||
public int readInt(long pos) throws IOException {
|
||||
try {
|
||||
return curBuf.getInt((int) pos);
|
||||
return guard.getInt(curBuf, (int) pos);
|
||||
} catch (IllegalArgumentException e) {
|
||||
if (pos < 0) {
|
||||
throw new IllegalArgumentException("Seeking to negative position: " + this, e);
|
||||
@ -471,7 +427,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
@Override
|
||||
public long readLong(long pos) throws IOException {
|
||||
try {
|
||||
return curBuf.getLong((int) pos);
|
||||
return guard.getLong(curBuf, (int) pos);
|
||||
} catch (IllegalArgumentException e) {
|
||||
if (pos < 0) {
|
||||
throw new IllegalArgumentException("Seeking to negative position: " + this, e);
|
||||
@ -489,8 +445,8 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
|
||||
private final int offset;
|
||||
|
||||
MultiBufferImpl(String resourceDescription, ByteBuffer[] buffers, int offset, long length, int chunkSizePower,
|
||||
BufferCleaner cleaner, WeakIdentityMap<ByteBufferIndexInput,Boolean> clones) {
|
||||
super(resourceDescription, buffers, length, chunkSizePower, cleaner, clones);
|
||||
ByteBufferGuard guard) {
|
||||
super(resourceDescription, buffers, length, chunkSizePower, guard);
|
||||
this.offset = offset;
|
||||
try {
|
||||
seek(0L);
|
||||
|
@ -36,7 +36,7 @@ import java.util.concurrent.Future;
|
||||
import java.lang.invoke.MethodHandle;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import org.apache.lucene.store.ByteBufferIndexInput.BufferCleaner;
|
||||
import org.apache.lucene.store.ByteBufferGuard.BufferCleaner;
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.apache.lucene.util.SuppressForbidden;
|
||||
|
||||
@ -240,7 +240,7 @@ public class MMapDirectory extends FSDirectory {
|
||||
final boolean useUnmap = getUseUnmap();
|
||||
return ByteBufferIndexInput.newInstance(resourceDescription,
|
||||
map(resourceDescription, c, 0, c.size()),
|
||||
c.size(), chunkSizePower, useUnmap ? CLEANER : null, useUnmap);
|
||||
c.size(), chunkSizePower, new ByteBufferGuard(resourceDescription, useUnmap ? CLEANER : null));
|
||||
}
|
||||
}
|
||||
|
||||
@ -370,7 +370,7 @@ public class MMapDirectory extends FSDirectory {
|
||||
final MethodHandle unmapper = filterReturnValue(directBufferCleanerMethod, guardWithTest(nonNullTest, cleanMethod, noop))
|
||||
.asType(methodType(void.class, ByteBuffer.class));
|
||||
|
||||
return (BufferCleaner) (ByteBufferIndexInput parent, ByteBuffer buffer) -> {
|
||||
return (BufferCleaner) (String resourceDescription, ByteBuffer buffer) -> {
|
||||
if (directBufferClass.isInstance(buffer)) {
|
||||
final Throwable error = AccessController.doPrivileged((PrivilegedAction<Throwable>) () -> {
|
||||
try {
|
||||
@ -381,7 +381,7 @@ public class MMapDirectory extends FSDirectory {
|
||||
}
|
||||
});
|
||||
if (error != null) {
|
||||
throw new IOException("Unable to unmap the mapped buffer: " + parent.toString(), error);
|
||||
throw new IOException("Unable to unmap the mapped buffer: " + resourceDescription, error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -32,6 +32,17 @@ public abstract class AttributeImpl implements Cloneable, Attribute {
|
||||
*/
|
||||
public abstract void clear();
|
||||
|
||||
/**
|
||||
* Clears the values in this AttributeImpl and resets it to its value
|
||||
* at the end of the field. If this implementation implements more than one Attribute interface
|
||||
* it clears all.
|
||||
* <p>
|
||||
* The default implementation simply calls {@link #clear()}
|
||||
*/
|
||||
public void end() {
|
||||
clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* This method returns the current attribute values as a string in the following format
|
||||
* by calling the {@link #reflectWith(AttributeReflector)} method:
|
||||
|
@ -270,6 +270,16 @@ public class AttributeSource {
|
||||
state.attribute.clear();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets all Attributes in this AttributeSource by calling
|
||||
* {@link AttributeImpl#end()} on each Attribute implementation.
|
||||
*/
|
||||
public final void endAttributes() {
|
||||
for (State state = getCurrentState(); state != null; state = state.next) {
|
||||
state.attribute.end();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes all attributes and their implementations from this AttributeSource.
|
||||
|
@ -59,6 +59,13 @@ public final class Version {
|
||||
@Deprecated
|
||||
public static final Version LUCENE_6_2_0 = new Version(6, 2, 0);
|
||||
|
||||
/**
|
||||
* Match settings and bugs in Lucene's 6.3.0 release.
|
||||
* @deprecated Use latest
|
||||
*/
|
||||
@Deprecated
|
||||
public static final Version LUCENE_6_3_0 = new Version(6, 3, 0);
|
||||
|
||||
/**
|
||||
* Match settings and bugs in Lucene's 7.0.0 release.
|
||||
* <p>
|
||||
|
@ -58,7 +58,7 @@ public class TestGrowableByteArrayDataOutput extends LuceneTestCase {
|
||||
public void testWriteLargeStrings() throws Exception {
|
||||
int minSizeForDoublePass = GrowableByteArrayDataOutput.MIN_UTF8_SIZE_TO_ENABLE_DOUBLE_PASS_ENCODING;
|
||||
|
||||
int num = atLeast(1000);
|
||||
int num = atLeast(100);
|
||||
for (int i = 0; i < num; i++) {
|
||||
String unicode = TestUtil.randomRealisticUnicodeString(random(), minSizeForDoublePass, 10 * minSizeForDoublePass);
|
||||
byte[] utf8 = new byte[unicode.length() * UnicodeUtil.MAX_UTF8_BYTES_PER_CHAR];
|
||||
|
@ -79,29 +79,7 @@ public class TestField extends LuceneTestCase {
|
||||
assertEquals("DoublePoint <foo:6.0,7.0>", field.toString());
|
||||
}
|
||||
|
||||
public void testLegacyDoubleField() throws Exception {
|
||||
Field fields[] = new Field[] {
|
||||
new LegacyDoubleField("foo", 5d, Field.Store.NO),
|
||||
new LegacyDoubleField("foo", 5d, Field.Store.YES)
|
||||
};
|
||||
|
||||
for (Field field : fields) {
|
||||
trySetBoost(field);
|
||||
trySetByteValue(field);
|
||||
trySetBytesValue(field);
|
||||
trySetBytesRefValue(field);
|
||||
field.setDoubleValue(6d); // ok
|
||||
trySetIntValue(field);
|
||||
trySetFloatValue(field);
|
||||
trySetLongValue(field);
|
||||
trySetReaderValue(field);
|
||||
trySetShortValue(field);
|
||||
trySetStringValue(field);
|
||||
trySetTokenStreamValue(field);
|
||||
|
||||
assertEquals(6d, field.numericValue().doubleValue(), 0.0d);
|
||||
}
|
||||
}
|
||||
|
||||
public void testDoubleDocValuesField() throws Exception {
|
||||
DoubleDocValuesField field = new DoubleDocValuesField("foo", 5d);
|
||||
@ -185,30 +163,6 @@ public class TestField extends LuceneTestCase {
|
||||
assertEquals("FloatPoint <foo:6.0,7.0>", field.toString());
|
||||
}
|
||||
|
||||
public void testLegacyFloatField() throws Exception {
|
||||
Field fields[] = new Field[] {
|
||||
new LegacyFloatField("foo", 5f, Field.Store.NO),
|
||||
new LegacyFloatField("foo", 5f, Field.Store.YES)
|
||||
};
|
||||
|
||||
for (Field field : fields) {
|
||||
trySetBoost(field);
|
||||
trySetByteValue(field);
|
||||
trySetBytesValue(field);
|
||||
trySetBytesRefValue(field);
|
||||
trySetDoubleValue(field);
|
||||
trySetIntValue(field);
|
||||
field.setFloatValue(6f); // ok
|
||||
trySetLongValue(field);
|
||||
trySetReaderValue(field);
|
||||
trySetShortValue(field);
|
||||
trySetStringValue(field);
|
||||
trySetTokenStreamValue(field);
|
||||
|
||||
assertEquals(6f, field.numericValue().floatValue(), 0.0f);
|
||||
}
|
||||
}
|
||||
|
||||
public void testIntPoint() throws Exception {
|
||||
Field field = new IntPoint("foo", 5);
|
||||
|
||||
@ -253,30 +207,6 @@ public class TestField extends LuceneTestCase {
|
||||
assertEquals("IntPoint <foo:6,7>", field.toString());
|
||||
}
|
||||
|
||||
public void testLegacyIntField() throws Exception {
|
||||
Field fields[] = new Field[] {
|
||||
new LegacyIntField("foo", 5, Field.Store.NO),
|
||||
new LegacyIntField("foo", 5, Field.Store.YES)
|
||||
};
|
||||
|
||||
for (Field field : fields) {
|
||||
trySetBoost(field);
|
||||
trySetByteValue(field);
|
||||
trySetBytesValue(field);
|
||||
trySetBytesRefValue(field);
|
||||
trySetDoubleValue(field);
|
||||
field.setIntValue(6); // ok
|
||||
trySetFloatValue(field);
|
||||
trySetLongValue(field);
|
||||
trySetReaderValue(field);
|
||||
trySetShortValue(field);
|
||||
trySetStringValue(field);
|
||||
trySetTokenStreamValue(field);
|
||||
|
||||
assertEquals(6, field.numericValue().intValue());
|
||||
}
|
||||
}
|
||||
|
||||
public void testNumericDocValuesField() throws Exception {
|
||||
NumericDocValuesField field = new NumericDocValuesField("foo", 5L);
|
||||
|
||||
@ -340,30 +270,6 @@ public class TestField extends LuceneTestCase {
|
||||
assertEquals("LongPoint <foo:6,7>", field.toString());
|
||||
}
|
||||
|
||||
public void testLegacyLongField() throws Exception {
|
||||
Field fields[] = new Field[] {
|
||||
new LegacyLongField("foo", 5L, Field.Store.NO),
|
||||
new LegacyLongField("foo", 5L, Field.Store.YES)
|
||||
};
|
||||
|
||||
for (Field field : fields) {
|
||||
trySetBoost(field);
|
||||
trySetByteValue(field);
|
||||
trySetBytesValue(field);
|
||||
trySetBytesRefValue(field);
|
||||
trySetDoubleValue(field);
|
||||
trySetIntValue(field);
|
||||
trySetFloatValue(field);
|
||||
field.setLongValue(6); // ok
|
||||
trySetReaderValue(field);
|
||||
trySetShortValue(field);
|
||||
trySetStringValue(field);
|
||||
trySetTokenStreamValue(field);
|
||||
|
||||
assertEquals(6L, field.numericValue().longValue());
|
||||
}
|
||||
}
|
||||
|
||||
public void testSortedBytesDocValuesField() throws Exception {
|
||||
SortedDocValuesField field = new SortedDocValuesField("foo", new BytesRef("bar"));
|
||||
|
||||
|
@ -18,7 +18,6 @@ package org.apache.lucene.document;
|
||||
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import org.apache.lucene.document.FieldType.LegacyNumericType;
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.PointValues;
|
||||
@ -58,14 +57,6 @@ public class TestFieldType extends LuceneTestCase {
|
||||
ft7.setOmitNorms(true);
|
||||
assertFalse(ft7.equals(ft));
|
||||
|
||||
FieldType ft8 = new FieldType();
|
||||
ft8.setNumericType(LegacyNumericType.DOUBLE);
|
||||
assertFalse(ft8.equals(ft));
|
||||
|
||||
FieldType ft9 = new FieldType();
|
||||
ft9.setNumericPrecisionStep(3);
|
||||
assertFalse(ft9.equals(ft));
|
||||
|
||||
FieldType ft10 = new FieldType();
|
||||
ft10.setStoreTermVectors(true);
|
||||
assertFalse(ft10.equals(ft));
|
||||
|
@ -30,7 +30,7 @@ public class TestGeoUtils extends LuceneTestCase {
|
||||
|
||||
// We rely heavily on GeoUtils.circleToBBox so we test it here:
|
||||
public void testRandomCircleToBBox() throws Exception {
|
||||
int iters = atLeast(1000);
|
||||
int iters = atLeast(100);
|
||||
for(int iter=0;iter<iters;iter++) {
|
||||
|
||||
double centerLat = GeoTestUtil.nextLatitude();
|
||||
@ -89,7 +89,8 @@ public class TestGeoUtils extends LuceneTestCase {
|
||||
|
||||
// similar to testRandomCircleToBBox, but different, less evil, maybe simpler
|
||||
public void testBoundingBoxOpto() {
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
int iters = atLeast(100);
|
||||
for (int i = 0; i < iters; i++) {
|
||||
double lat = GeoTestUtil.nextLatitude();
|
||||
double lon = GeoTestUtil.nextLongitude();
|
||||
double radius = 50000000 * random().nextDouble();
|
||||
@ -119,7 +120,8 @@ public class TestGeoUtils extends LuceneTestCase {
|
||||
|
||||
// test we can use haversinSortKey() for distance queries.
|
||||
public void testHaversinOpto() {
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
int iters = atLeast(100);
|
||||
for (int i = 0; i < iters; i++) {
|
||||
double lat = GeoTestUtil.nextLatitude();
|
||||
double lon = GeoTestUtil.nextLongitude();
|
||||
double radius = 50000000 * random().nextDouble();
|
||||
@ -193,7 +195,8 @@ public class TestGeoUtils extends LuceneTestCase {
|
||||
// TODO: does not really belong here, but we test it like this for now
|
||||
// we can make a fake IndexReader to send boxes directly to Point visitors instead?
|
||||
public void testCircleOpto() throws Exception {
|
||||
for (int i = 0; i < 50; i++) {
|
||||
int iters = atLeast(20);
|
||||
for (int i = 0; i < iters; i++) {
|
||||
// circle
|
||||
final double centerLat = -90 + 180.0 * random().nextDouble();
|
||||
final double centerLon = -180 + 360.0 * random().nextDouble();
|
||||
|
@ -103,7 +103,8 @@ public class TestPolygon2D extends LuceneTestCase {
|
||||
|
||||
/** If polygon.contains(box) returns true, then any point in that box should return true as well */
|
||||
public void testContainsRandom() throws Exception {
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
int iters = atLeast(50);
|
||||
for (int i = 0; i < iters; i++) {
|
||||
Polygon polygon = nextPolygon();
|
||||
Polygon2D impl = Polygon2D.create(polygon);
|
||||
|
||||
@ -175,7 +176,8 @@ public class TestPolygon2D extends LuceneTestCase {
|
||||
|
||||
/** If polygon.intersects(box) returns false, then any point in that box should return false as well */
|
||||
public void testIntersectRandom() {
|
||||
for (int i = 0; i < 100; i++) {
|
||||
int iters = atLeast(10);
|
||||
for (int i = 0; i < iters; i++) {
|
||||
Polygon polygon = nextPolygon();
|
||||
Polygon2D impl = Polygon2D.create(polygon);
|
||||
|
||||
@ -268,7 +270,8 @@ public class TestPolygon2D extends LuceneTestCase {
|
||||
|
||||
/** Tests current impl against original algorithm */
|
||||
public void testContainsAgainstOriginal() {
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
int iters = atLeast(100);
|
||||
for (int i = 0; i < iters; i++) {
|
||||
Polygon polygon = nextPolygon();
|
||||
// currently we don't generate these, but this test does not want holes.
|
||||
while (polygon.getHoles().length > 0) {
|
||||
|
@ -24,16 +24,12 @@ import java.util.Collections;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
|
||||
import org.apache.lucene.analysis.CannedTokenStream;
|
||||
import org.apache.lucene.analysis.LegacyNumericTokenStream.LegacyNumericTermAttribute;
|
||||
import org.apache.lucene.analysis.LegacyNumericTokenStream;
|
||||
import org.apache.lucene.analysis.Token;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.LegacyIntField;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
|
||||
/** test tokenstream reuse by DefaultIndexingChain */
|
||||
public class TestFieldReuse extends BaseTokenStreamTestCase {
|
||||
@ -61,7 +57,7 @@ public class TestFieldReuse extends BaseTokenStreamTestCase {
|
||||
|
||||
// pass a bogus stream and ensure it's still ok
|
||||
stringField = new StringField("foo", "beer", Field.Store.NO);
|
||||
TokenStream bogus = new LegacyNumericTokenStream();
|
||||
TokenStream bogus = new CannedTokenStream();
|
||||
ts = stringField.tokenStream(null, bogus);
|
||||
assertNotSame(ts, bogus);
|
||||
assertTokenStreamContents(ts,
|
||||
@ -71,37 +67,6 @@ public class TestFieldReuse extends BaseTokenStreamTestCase {
|
||||
);
|
||||
}
|
||||
|
||||
public void testNumericReuse() throws IOException {
|
||||
LegacyIntField legacyIntField = new LegacyIntField("foo", 5, Field.Store.NO);
|
||||
|
||||
// passing null
|
||||
TokenStream ts = legacyIntField.tokenStream(null, null);
|
||||
assertTrue(ts instanceof LegacyNumericTokenStream);
|
||||
assertEquals(LegacyNumericUtils.PRECISION_STEP_DEFAULT_32, ((LegacyNumericTokenStream)ts).getPrecisionStep());
|
||||
assertNumericContents(5, ts);
|
||||
|
||||
// now reuse previous stream
|
||||
legacyIntField = new LegacyIntField("foo", 20, Field.Store.NO);
|
||||
TokenStream ts2 = legacyIntField.tokenStream(null, ts);
|
||||
assertSame(ts, ts2);
|
||||
assertNumericContents(20, ts);
|
||||
|
||||
// pass a bogus stream and ensure it's still ok
|
||||
legacyIntField = new LegacyIntField("foo", 2343, Field.Store.NO);
|
||||
TokenStream bogus = new CannedTokenStream(new Token("bogus", 0, 5));
|
||||
ts = legacyIntField.tokenStream(null, bogus);
|
||||
assertNotSame(bogus, ts);
|
||||
assertNumericContents(2343, ts);
|
||||
|
||||
// pass another bogus stream (numeric, but different precision step!)
|
||||
legacyIntField = new LegacyIntField("foo", 42, Field.Store.NO);
|
||||
assert 3 != LegacyNumericUtils.PRECISION_STEP_DEFAULT;
|
||||
bogus = new LegacyNumericTokenStream(3);
|
||||
ts = legacyIntField.tokenStream(null, bogus);
|
||||
assertNotSame(bogus, ts);
|
||||
assertNumericContents(42, ts);
|
||||
}
|
||||
|
||||
static class MyField implements IndexableField {
|
||||
TokenStream lastSeen;
|
||||
TokenStream lastReturned;
|
||||
@ -163,20 +128,4 @@ public class TestFieldReuse extends BaseTokenStreamTestCase {
|
||||
iw.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
private void assertNumericContents(int value, TokenStream ts) throws IOException {
|
||||
assertTrue(ts instanceof LegacyNumericTokenStream);
|
||||
LegacyNumericTermAttribute numericAtt = ts.getAttribute(LegacyNumericTermAttribute.class);
|
||||
ts.reset();
|
||||
boolean seen = false;
|
||||
while (ts.incrementToken()) {
|
||||
if (numericAtt.getShift() == 0) {
|
||||
assertEquals(value, numericAtt.getRawValue());
|
||||
seen = true;
|
||||
}
|
||||
}
|
||||
ts.end();
|
||||
ts.close();
|
||||
assertTrue(seen);
|
||||
}
|
||||
}
|
||||
|
@ -1217,7 +1217,7 @@ public class TestIndexSorting extends LuceneTestCase {
|
||||
if (TEST_NIGHTLY) {
|
||||
numDocs = atLeast(100000);
|
||||
} else {
|
||||
numDocs = atLeast(10000);
|
||||
numDocs = atLeast(1000);
|
||||
}
|
||||
List<RandomDoc> docs = new ArrayList<>();
|
||||
|
||||
|
@ -72,6 +72,7 @@ public class TestIndexingSequenceNumbers extends LuceneTestCase {
|
||||
dir.close();
|
||||
}
|
||||
|
||||
@Slow
|
||||
public void testStressUpdateSameID() throws Exception {
|
||||
int iters = atLeast(100);
|
||||
for(int iter=0;iter<iters;iter++) {
|
||||
@ -144,6 +145,7 @@ public class TestIndexingSequenceNumbers extends LuceneTestCase {
|
||||
long seqNo;
|
||||
}
|
||||
|
||||
@Slow
|
||||
public void testStressConcurrentCommit() throws Exception {
|
||||
final int opCount = atLeast(10000);
|
||||
final int idCount = TestUtil.nextInt(random(), 10, 1000);
|
||||
@ -303,6 +305,7 @@ public class TestIndexingSequenceNumbers extends LuceneTestCase {
|
||||
dir.close();
|
||||
}
|
||||
|
||||
@Slow
|
||||
public void testStressConcurrentDocValuesUpdatesCommit() throws Exception {
|
||||
final int opCount = atLeast(10000);
|
||||
final int idCount = TestUtil.nextInt(random(), 10, 1000);
|
||||
@ -459,6 +462,7 @@ public class TestIndexingSequenceNumbers extends LuceneTestCase {
|
||||
dir.close();
|
||||
}
|
||||
|
||||
@Slow
|
||||
public void testStressConcurrentAddAndDeleteAndCommit() throws Exception {
|
||||
final int opCount = atLeast(10000);
|
||||
final int idCount = TestUtil.nextInt(random(), 10, 1000);
|
||||
|
@ -18,17 +18,11 @@ package org.apache.lucene.index;
|
||||
|
||||
import org.apache.lucene.analysis.CannedBinaryTokenStream;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.LegacyDoubleField;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.LegacyFloatField;
|
||||
import org.apache.lucene.document.LegacyIntField;
|
||||
import org.apache.lucene.document.LegacyLongField;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
public class TestTerms extends LuceneTestCase {
|
||||
@ -88,132 +82,4 @@ public class TestTerms extends LuceneTestCase {
|
||||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testEmptyIntFieldMinMax() throws Exception {
|
||||
assertNull(LegacyNumericUtils.getMinInt(EMPTY_TERMS));
|
||||
assertNull(LegacyNumericUtils.getMaxInt(EMPTY_TERMS));
|
||||
}
|
||||
|
||||
public void testIntFieldMinMax() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
|
||||
int numDocs = atLeast(100);
|
||||
int minValue = Integer.MAX_VALUE;
|
||||
int maxValue = Integer.MIN_VALUE;
|
||||
for(int i=0;i<numDocs;i++ ){
|
||||
Document doc = new Document();
|
||||
int num = random().nextInt();
|
||||
minValue = Math.min(num, minValue);
|
||||
maxValue = Math.max(num, maxValue);
|
||||
doc.add(new LegacyIntField("field", num, Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
}
|
||||
|
||||
IndexReader r = w.getReader();
|
||||
Terms terms = MultiFields.getTerms(r, "field");
|
||||
assertEquals(new Integer(minValue), LegacyNumericUtils.getMinInt(terms));
|
||||
assertEquals(new Integer(maxValue), LegacyNumericUtils.getMaxInt(terms));
|
||||
|
||||
r.close();
|
||||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testEmptyLongFieldMinMax() throws Exception {
|
||||
assertNull(LegacyNumericUtils.getMinLong(EMPTY_TERMS));
|
||||
assertNull(LegacyNumericUtils.getMaxLong(EMPTY_TERMS));
|
||||
}
|
||||
|
||||
public void testLongFieldMinMax() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
|
||||
int numDocs = atLeast(100);
|
||||
long minValue = Long.MAX_VALUE;
|
||||
long maxValue = Long.MIN_VALUE;
|
||||
for(int i=0;i<numDocs;i++ ){
|
||||
Document doc = new Document();
|
||||
long num = random().nextLong();
|
||||
minValue = Math.min(num, minValue);
|
||||
maxValue = Math.max(num, maxValue);
|
||||
doc.add(new LegacyLongField("field", num, Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
}
|
||||
|
||||
IndexReader r = w.getReader();
|
||||
|
||||
Terms terms = MultiFields.getTerms(r, "field");
|
||||
assertEquals(new Long(minValue), LegacyNumericUtils.getMinLong(terms));
|
||||
assertEquals(new Long(maxValue), LegacyNumericUtils.getMaxLong(terms));
|
||||
|
||||
r.close();
|
||||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testFloatFieldMinMax() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
|
||||
int numDocs = atLeast(100);
|
||||
float minValue = Float.POSITIVE_INFINITY;
|
||||
float maxValue = Float.NEGATIVE_INFINITY;
|
||||
for(int i=0;i<numDocs;i++ ){
|
||||
Document doc = new Document();
|
||||
float num = random().nextFloat();
|
||||
minValue = Math.min(num, minValue);
|
||||
maxValue = Math.max(num, maxValue);
|
||||
doc.add(new LegacyFloatField("field", num, Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
}
|
||||
|
||||
IndexReader r = w.getReader();
|
||||
Terms terms = MultiFields.getTerms(r, "field");
|
||||
assertEquals(minValue, NumericUtils.sortableIntToFloat(LegacyNumericUtils.getMinInt(terms)), 0.0f);
|
||||
assertEquals(maxValue, NumericUtils.sortableIntToFloat(LegacyNumericUtils.getMaxInt(terms)), 0.0f);
|
||||
|
||||
r.close();
|
||||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testDoubleFieldMinMax() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
|
||||
int numDocs = atLeast(100);
|
||||
double minValue = Double.POSITIVE_INFINITY;
|
||||
double maxValue = Double.NEGATIVE_INFINITY;
|
||||
for(int i=0;i<numDocs;i++ ){
|
||||
Document doc = new Document();
|
||||
double num = random().nextDouble();
|
||||
minValue = Math.min(num, minValue);
|
||||
maxValue = Math.max(num, maxValue);
|
||||
doc.add(new LegacyDoubleField("field", num, Field.Store.NO));
|
||||
w.addDocument(doc);
|
||||
}
|
||||
|
||||
IndexReader r = w.getReader();
|
||||
|
||||
Terms terms = MultiFields.getTerms(r, "field");
|
||||
|
||||
assertEquals(minValue, NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMinLong(terms)), 0.0);
|
||||
assertEquals(maxValue, NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMaxLong(terms)), 0.0);
|
||||
|
||||
r.close();
|
||||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* A complete empty Terms instance that has no terms in it and supports no optional statistics
|
||||
*/
|
||||
private static Terms EMPTY_TERMS = new Terms() {
|
||||
public TermsEnum iterator() { return TermsEnum.EMPTY; }
|
||||
public long size() { return -1; }
|
||||
public long getSumTotalTermFreq() { return -1; }
|
||||
public long getSumDocFreq() { return -1; }
|
||||
public int getDocCount() { return -1; }
|
||||
public boolean hasFreqs() { return false; }
|
||||
public boolean hasOffsets() { return false; }
|
||||
public boolean hasPositions() { return false; }
|
||||
public boolean hasPayloads() { return false; }
|
||||
};
|
||||
}
|
||||
|
@ -25,6 +25,8 @@ import java.util.Random;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
@ -66,7 +68,7 @@ public class TestBoolean2 extends LuceneTestCase {
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
// in some runs, test immediate adjacency of matches - in others, force a full bucket gap betwen docs
|
||||
// in some runs, test immediate adjacency of matches - in others, force a full bucket gap between docs
|
||||
NUM_FILLER_DOCS = random().nextBoolean() ? 0 : BooleanScorer.SIZE;
|
||||
PRE_FILLER_DOCS = TestUtil.nextInt(random(), 0, (NUM_FILLER_DOCS / 2));
|
||||
|
||||
@ -77,13 +79,16 @@ public class TestBoolean2 extends LuceneTestCase {
|
||||
}
|
||||
|
||||
RandomIndexWriter writer= new RandomIndexWriter(random(), directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
|
||||
// we'll make a ton of docs, disable store/norms/vectors
|
||||
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
|
||||
ft.setOmitNorms(true);
|
||||
|
||||
Document doc = new Document();
|
||||
for (int filler = 0; filler < PRE_FILLER_DOCS; filler++) {
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
for (int i = 0; i < docFields.length; i++) {
|
||||
doc.add(newTextField(field, docFields[i], Field.Store.NO));
|
||||
doc.add(new Field(field, docFields[i], ft));
|
||||
writer.addDocument(doc);
|
||||
|
||||
doc = new Document();
|
||||
@ -148,12 +153,12 @@ public class TestBoolean2 extends LuceneTestCase {
|
||||
newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setMaxBufferedDocs(TestUtil.nextInt(random(), 50, 1000)));
|
||||
doc = new Document();
|
||||
doc.add(newTextField("field2", "xxx", Field.Store.NO));
|
||||
doc.add(new Field("field2", "xxx", ft));
|
||||
for(int i=0;i<NUM_EXTRA_DOCS/2;i++) {
|
||||
w.addDocument(doc);
|
||||
}
|
||||
doc = new Document();
|
||||
doc.add(newTextField("field2", "big bad bug", Field.Store.NO));
|
||||
doc.add(new Field("field2", "big bad bug", ft));
|
||||
for(int i=0;i<NUM_EXTRA_DOCS/2;i++) {
|
||||
w.addDocument(doc);
|
||||
}
|
||||
|
@ -26,6 +26,7 @@ import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
@ -43,6 +44,7 @@ import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.ThreadedIndexingAndSearchingTestCase;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LineFileDocs;
|
||||
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.NamedThreadFactory;
|
||||
@ -533,4 +535,144 @@ public class TestSearcherManager extends ThreadedIndexingAndSearchingTestCase {
|
||||
sm.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testConcurrentIndexCloseSearchAndRefresh() throws Exception {
|
||||
final Directory dir = newFSDirectory(createTempDir());
|
||||
AtomicReference<IndexWriter> writerRef = new AtomicReference<>();
|
||||
writerRef.set(new IndexWriter(dir, newIndexWriterConfig()));
|
||||
|
||||
AtomicReference<SearcherManager> mgrRef = new AtomicReference<>();
|
||||
mgrRef.set(new SearcherManager(writerRef.get(), null));
|
||||
final AtomicBoolean stop = new AtomicBoolean();
|
||||
|
||||
Thread indexThread = new Thread() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
LineFileDocs docs = new LineFileDocs(random());
|
||||
long runTimeSec = TEST_NIGHTLY ? atLeast(10) : atLeast(2);
|
||||
long endTime = System.nanoTime() + runTimeSec * 1000000000;
|
||||
while (System.nanoTime() < endTime) {
|
||||
IndexWriter w = writerRef.get();
|
||||
w.addDocument(docs.nextDoc());
|
||||
if (random().nextInt(1000) == 17) {
|
||||
if (random().nextBoolean()) {
|
||||
w.close();
|
||||
} else {
|
||||
w.rollback();
|
||||
}
|
||||
writerRef.set(new IndexWriter(dir, newIndexWriterConfig()));
|
||||
}
|
||||
}
|
||||
docs.close();
|
||||
stop.set(true);
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: index count=" + writerRef.get().maxDoc());
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
throw new RuntimeException(ioe);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Thread searchThread = new Thread() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
long totCount = 0;
|
||||
while (stop.get() == false) {
|
||||
SearcherManager mgr = mgrRef.get();
|
||||
if (mgr != null) {
|
||||
IndexSearcher searcher;
|
||||
try {
|
||||
searcher = mgr.acquire();
|
||||
} catch (AlreadyClosedException ace) {
|
||||
// ok
|
||||
continue;
|
||||
}
|
||||
totCount += searcher.getIndexReader().maxDoc();
|
||||
mgr.release(searcher);
|
||||
}
|
||||
}
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: search totCount=" + totCount);
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
throw new RuntimeException(ioe);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Thread refreshThread = new Thread() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
int refreshCount = 0;
|
||||
int aceCount = 0;
|
||||
while (stop.get() == false) {
|
||||
SearcherManager mgr = mgrRef.get();
|
||||
if (mgr != null) {
|
||||
refreshCount++;
|
||||
try {
|
||||
mgr.maybeRefreshBlocking();
|
||||
} catch (AlreadyClosedException ace) {
|
||||
// ok
|
||||
aceCount++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: refresh count=" + refreshCount + " aceCount=" + aceCount);
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
throw new RuntimeException(ioe);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Thread closeThread = new Thread() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
int closeCount = 0;
|
||||
int aceCount = 0;
|
||||
while (stop.get() == false) {
|
||||
SearcherManager mgr = mgrRef.get();
|
||||
assert mgr != null;
|
||||
mgr.close();
|
||||
closeCount++;
|
||||
while (stop.get() == false) {
|
||||
try {
|
||||
mgrRef.set(new SearcherManager(writerRef.get(), null));
|
||||
break;
|
||||
} catch (AlreadyClosedException ace) {
|
||||
// ok
|
||||
aceCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: close count=" + closeCount + " aceCount=" + aceCount);
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
throw new RuntimeException(ioe);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
indexThread.start();
|
||||
searchThread.start();
|
||||
refreshThread.start();
|
||||
closeThread.start();
|
||||
|
||||
indexThread.join();
|
||||
searchThread.join();
|
||||
refreshThread.join();
|
||||
closeThread.join();
|
||||
|
||||
mgrRef.get().close();
|
||||
writerRef.get().close();
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
@ -23,6 +23,7 @@ import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.lucene.util.LuceneTestCase.Slow;
|
||||
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Assume;
|
||||
@ -34,6 +35,7 @@ import org.junit.Assume;
|
||||
* all use terms from same set of source data as our regular docs (to emphasis the DocFreq factor in scoring),
|
||||
* in which case the queries will be wrapped so they can be excluded.
|
||||
*/
|
||||
@Slow // can this be sped up to be non-slow? filler docs make it quite a bit slower and many test methods...
|
||||
public class TestSimpleExplanationsWithFillerDocs extends TestSimpleExplanations {
|
||||
|
||||
/** num of empty docs injected between every doc in the index */
|
||||
|
@ -19,6 +19,10 @@ package org.apache.lucene.store;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
import org.junit.Ignore;
|
||||
|
||||
/**
|
||||
* Tests MMapDirectory
|
||||
@ -39,4 +43,38 @@ public class TestMmapDirectory extends BaseDirectoryTestCase {
|
||||
MMapDirectory.UNMAP_SUPPORTED);
|
||||
}
|
||||
|
||||
@Ignore("This test is for JVM testing purposes. There are no guarantees that it may not fail with SIGSEGV!")
|
||||
public void testAceWithThreads() throws Exception {
|
||||
for (int iter = 0; iter < 10; iter++) {
|
||||
Directory dir = getDirectory(createTempDir("testAceWithThreads"));
|
||||
IndexOutput out = dir.createOutput("test", IOContext.DEFAULT);
|
||||
Random random = random();
|
||||
for (int i = 0; i < 8 * 1024 * 1024; i++) {
|
||||
out.writeInt(random.nextInt());
|
||||
}
|
||||
out.close();
|
||||
IndexInput in = dir.openInput("test", IOContext.DEFAULT);
|
||||
IndexInput clone = in.clone();
|
||||
final byte accum[] = new byte[32 * 1024 * 1024];
|
||||
final CountDownLatch shotgun = new CountDownLatch(1);
|
||||
Thread t1 = new Thread(() -> {
|
||||
try {
|
||||
shotgun.await();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
clone.seek(0);
|
||||
clone.readBytes(accum, 0, accum.length);
|
||||
}
|
||||
} catch (IOException | AlreadyClosedException ok) {
|
||||
// OK
|
||||
} catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
});
|
||||
t1.start();
|
||||
shotgun.countDown();
|
||||
in.close();
|
||||
t1.join();
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -26,6 +26,7 @@
|
||||
|
||||
<path id="classpath">
|
||||
<pathelement path="${grouping.jar}"/>
|
||||
<pathelement path="${backward-codecs.jar}"/>
|
||||
<path refid="base.classpath"/>
|
||||
</path>
|
||||
|
||||
@ -34,13 +35,14 @@
|
||||
<pathelement location="${build.dir}/classes/java"/>
|
||||
</path>
|
||||
|
||||
<target name="init" depends="module-build.init,jar-grouping"/>
|
||||
<target name="init" depends="module-build.init,jar-grouping,jar-backward-codecs"/>
|
||||
|
||||
<target name="javadocs" depends="javadocs-grouping,compile-core,check-javadocs-uptodate"
|
||||
<target name="javadocs" depends="javadocs-grouping,javadocs-backward-codecs,compile-core,check-javadocs-uptodate"
|
||||
unless="javadocs-uptodate-${name}">
|
||||
<invoke-module-javadoc>
|
||||
<links>
|
||||
<link href="../grouping"/>
|
||||
<link href="../backward-codecs"/>
|
||||
</links>
|
||||
</invoke-module-javadoc>
|
||||
</target>
|
||||
|
@ -19,8 +19,6 @@ package org.apache.lucene.search.join;
|
||||
import java.io.IOException;
|
||||
import java.util.function.LongConsumer;
|
||||
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.FieldType.LegacyNumericType;
|
||||
import org.apache.lucene.index.BinaryDocValues;
|
||||
import org.apache.lucene.index.DocValues;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
@ -28,10 +26,11 @@ import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.NumericDocValues;
|
||||
import org.apache.lucene.index.SortedNumericDocValues;
|
||||
import org.apache.lucene.index.SortedSetDocValues;
|
||||
import org.apache.lucene.legacy.LegacyNumericType;
|
||||
import org.apache.lucene.legacy.LegacyNumericUtils;
|
||||
import org.apache.lucene.search.SimpleCollector;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
|
||||
abstract class DocValuesTermsCollector<DV> extends SimpleCollector {
|
||||
|
||||
@ -85,13 +84,13 @@ abstract class DocValuesTermsCollector<DV> extends SimpleCollector {
|
||||
return (l) -> LegacyNumericUtils.longToPrefixCoded(l, 0, bytes);
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported "+type+
|
||||
". Only "+ LegacyNumericType.INT+" and "+ FieldType.LegacyNumericType.LONG+" are supported."
|
||||
". Only "+ LegacyNumericType.INT+" and "+ LegacyNumericType.LONG+" are supported."
|
||||
+ "Field "+fieldName );
|
||||
}
|
||||
}
|
||||
|
||||
/** this adapter is quite weird. ords are per doc index, don't use ords across different docs*/
|
||||
static Function<SortedSetDocValues> sortedNumericAsSortedSetDocValues(String field, FieldType.LegacyNumericType numTyp) {
|
||||
static Function<SortedSetDocValues> sortedNumericAsSortedSetDocValues(String field, LegacyNumericType numTyp) {
|
||||
return (ctx) -> {
|
||||
final SortedNumericDocValues numerics = DocValues.getSortedNumeric(ctx, field);
|
||||
final BytesRefBuilder bytes = new BytesRefBuilder();
|
||||
|
@ -26,7 +26,7 @@ import java.util.function.BiConsumer;
|
||||
import java.util.function.LongFunction;
|
||||
|
||||
import org.apache.lucene.document.DoublePoint;
|
||||
import org.apache.lucene.document.FieldType.LegacyNumericType;
|
||||
import org.apache.lucene.legacy.LegacyNumericType;
|
||||
import org.apache.lucene.document.FloatPoint;
|
||||
import org.apache.lucene.document.IntPoint;
|
||||
import org.apache.lucene.document.LongPoint;
|
||||
@ -123,8 +123,8 @@ public final class JoinUtil {
|
||||
* @param multipleValuesPerDocument Whether the from field has multiple terms per document
|
||||
* when true fromField might be {@link DocValuesType#SORTED_NUMERIC},
|
||||
* otherwise fromField should be {@link DocValuesType#NUMERIC}
|
||||
* @param toField The to field to join to, should be {@link org.apache.lucene.document.LegacyIntField} or {@link org.apache.lucene.document.LegacyLongField}
|
||||
* @param numericType either {@link org.apache.lucene.document.FieldType.LegacyNumericType#INT} or {@link org.apache.lucene.document.FieldType.LegacyNumericType#LONG}, it should correspond to fromField and toField types
|
||||
* @param toField The to field to join to, should be {@link org.apache.lucene.legacy.LegacyIntField} or {@link org.apache.lucene.legacy.LegacyLongField}
|
||||
* @param numericType either {@link LegacyNumericType#INT} or {@link LegacyNumericType#LONG}, it should correspond to fromField and toField types
|
||||
* @param fromQuery The query to match documents on the from side
|
||||
* @param fromSearcher The searcher that executed the specified fromQuery
|
||||
* @param scoreMode Instructs how scores from the fromQuery are mapped to the returned query
|
||||
|
@ -27,6 +27,7 @@ import org.apache.lucene.index.PostingsEnum;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.legacy.LegacyNumericUtils;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
@ -37,7 +38,6 @@ import org.apache.lucene.util.BitSetIterator;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefHash;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
|
||||
class TermsIncludingScoreQuery extends Query {
|
||||
|
||||
|
@ -37,12 +37,9 @@ import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.DoubleDocValuesField;
|
||||
import org.apache.lucene.document.DoublePoint;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType.LegacyNumericType;
|
||||
import org.apache.lucene.document.FloatDocValuesField;
|
||||
import org.apache.lucene.document.FloatPoint;
|
||||
import org.apache.lucene.document.IntPoint;
|
||||
import org.apache.lucene.document.LegacyIntField;
|
||||
import org.apache.lucene.document.LegacyLongField;
|
||||
import org.apache.lucene.document.LongPoint;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.document.SortedDocValuesField;
|
||||
@ -59,6 +56,9 @@ import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.MultiDocValues;
|
||||
import org.apache.lucene.index.MultiDocValues.OrdinalMap;
|
||||
import org.apache.lucene.legacy.LegacyIntField;
|
||||
import org.apache.lucene.legacy.LegacyLongField;
|
||||
import org.apache.lucene.legacy.LegacyNumericType;
|
||||
import org.apache.lucene.index.MultiFields;
|
||||
import org.apache.lucene.index.NoMergePolicy;
|
||||
import org.apache.lucene.index.NumericDocValues;
|
||||
|
@ -45,7 +45,6 @@ import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.FloatPoint;
|
||||
import org.apache.lucene.document.IntPoint;
|
||||
import org.apache.lucene.document.LegacyLongField;
|
||||
import org.apache.lucene.document.LongPoint;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.document.SortedDocValuesField;
|
||||
@ -457,9 +456,6 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
|
||||
Document doc = new Document();
|
||||
long randomLong = random().nextLong();
|
||||
doc.add(new NumericDocValuesField("numeric", randomLong));
|
||||
if (random().nextBoolean()) {
|
||||
doc.add(new LegacyLongField("numeric", randomLong, Field.Store.NO));
|
||||
}
|
||||
int numValues = atLeast(5);
|
||||
for (int i = 0; i < numValues; i++) {
|
||||
randomLong = random().nextLong();
|
||||
@ -468,9 +464,6 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
|
||||
// randomly duplicate field/value
|
||||
doc.add(new SortedNumericDocValuesField("sorted_numeric", randomLong));
|
||||
}
|
||||
if (random().nextBoolean()) {
|
||||
doc.add(new LegacyLongField("numeric", randomLong, Field.Store.NO));
|
||||
}
|
||||
}
|
||||
BytesRef randomTerm = new BytesRef(randomTerm());
|
||||
doc.add(new BinaryDocValuesField("binary", randomTerm));
|
||||
|
@ -21,11 +21,10 @@ import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FloatDocValuesField;
|
||||
import org.apache.lucene.document.LegacyFloatField;
|
||||
import org.apache.lucene.document.SortedDocValuesField;
|
||||
import org.apache.lucene.document.StoredField;
|
||||
import org.apache.lucene.index.BinaryDocValues;
|
||||
import org.apache.lucene.index.DocValues;
|
||||
import org.apache.lucene.index.FieldInvertState;
|
||||
@ -331,7 +330,7 @@ public class TestDiversifiedTopDocsCollector extends LuceneTestCase {
|
||||
new BytesRef(""));
|
||||
Field weeksAtNumberOneField = new FloatDocValuesField("weeksAtNumberOne",
|
||||
0.0F);
|
||||
Field weeksStoredField = new LegacyFloatField("weeks", 0.0F, Store.YES);
|
||||
Field weeksStoredField = new StoredField("weeks", 0.0F);
|
||||
Field idField = newStringField("id", "", Field.Store.YES);
|
||||
Field songField = newTextField("song", "", Field.Store.NO);
|
||||
Field storedArtistField = newTextField("artistName", "", Field.Store.NO);
|
||||
|
@ -265,6 +265,7 @@ public class TestMoreLikeThis extends LuceneTestCase {
|
||||
return writer.numDocs() - 1;
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-7161")
|
||||
public void testMultiFieldShouldReturnPerFieldBooleanQuery() throws Exception {
|
||||
IndexReader reader = null;
|
||||
Directory dir = newDirectory();
|
||||
|
@ -25,15 +25,17 @@
|
||||
<path id="classpath">
|
||||
<pathelement path="${queries.jar}"/>
|
||||
<pathelement path="${sandbox.jar}"/>
|
||||
<pathelement path="${backward-codecs.jar}"/>
|
||||
<path refid="base.classpath"/>
|
||||
</path>
|
||||
|
||||
<target name="compile-core" depends="jar-queries,jar-sandbox,common.compile-core"/>
|
||||
<target name="compile-core" depends="jar-backward-codecs,jar-queries,jar-sandbox,common.compile-core"/>
|
||||
|
||||
<target name="javadocs" depends="javadocs-queries,javadocs-sandbox,compile-core,check-javadocs-uptodate"
|
||||
<target name="javadocs" depends="javadocs-backward-codecs,javadocs-queries,javadocs-sandbox,compile-core,check-javadocs-uptodate"
|
||||
unless="javadocs-uptodate-${name}">
|
||||
<invoke-module-javadoc>
|
||||
<links>
|
||||
<link href="../backward-codecs"/>
|
||||
<link href="../queries"/>
|
||||
<link href="../sandbox"/>
|
||||
</links>
|
||||
|
@ -16,7 +16,8 @@
|
||||
*/
|
||||
package org.apache.lucene.queryparser.flexible.standard.builders;
|
||||
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.legacy.LegacyNumericRangeQuery;
|
||||
import org.apache.lucene.legacy.LegacyNumericType;
|
||||
import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
|
||||
import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages;
|
||||
import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode;
|
||||
@ -25,12 +26,11 @@ import org.apache.lucene.queryparser.flexible.messages.MessageImpl;
|
||||
import org.apache.lucene.queryparser.flexible.standard.config.LegacyNumericConfig;
|
||||
import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericQueryNode;
|
||||
import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericRangeQueryNode;
|
||||
import org.apache.lucene.search.LegacyNumericRangeQuery;
|
||||
|
||||
/**
|
||||
* Builds {@link org.apache.lucene.search.LegacyNumericRangeQuery}s out of {@link LegacyNumericRangeQueryNode}s.
|
||||
* Builds {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}s out of {@link LegacyNumericRangeQueryNode}s.
|
||||
*
|
||||
* @see org.apache.lucene.search.LegacyNumericRangeQuery
|
||||
* @see org.apache.lucene.legacy.LegacyNumericRangeQuery
|
||||
* @see LegacyNumericRangeQueryNode
|
||||
* @deprecated Index with points and use {@link PointRangeQueryNodeBuilder} instead.
|
||||
*/
|
||||
@ -56,7 +56,7 @@ public class LegacyNumericRangeQueryNodeBuilder implements StandardQueryBuilder
|
||||
Number upperNumber = upperNumericNode.getValue();
|
||||
|
||||
LegacyNumericConfig numericConfig = numericRangeNode.getNumericConfig();
|
||||
FieldType.LegacyNumericType numberType = numericConfig.getType();
|
||||
LegacyNumericType numberType = numericConfig.getType();
|
||||
String field = StringUtils.toString(numericRangeNode.getField());
|
||||
boolean minInclusive = numericRangeNode.isLowerInclusive();
|
||||
boolean maxInclusive = numericRangeNode.isUpperInclusive();
|
||||
|
@ -19,14 +19,13 @@ package org.apache.lucene.queryparser.flexible.standard.config;
|
||||
import java.text.NumberFormat;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.FieldType.LegacyNumericType;
|
||||
import org.apache.lucene.legacy.LegacyNumericType;
|
||||
|
||||
/**
|
||||
* This class holds the configuration used to parse numeric queries and create
|
||||
* {@link org.apache.lucene.search.LegacyNumericRangeQuery}s.
|
||||
* {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}s.
|
||||
*
|
||||
* @see org.apache.lucene.search.LegacyNumericRangeQuery
|
||||
* @see org.apache.lucene.legacy.LegacyNumericRangeQuery
|
||||
* @see NumberFormat
|
||||
* @deprecated Index with Points instead and use {@link PointsConfig}
|
||||
*/
|
||||
@ -37,7 +36,7 @@ public class LegacyNumericConfig {
|
||||
|
||||
private NumberFormat format;
|
||||
|
||||
private FieldType.LegacyNumericType type;
|
||||
private LegacyNumericType type;
|
||||
|
||||
/**
|
||||
* Constructs a {@link LegacyNumericConfig} object.
|
||||
@ -52,7 +51,7 @@ public class LegacyNumericConfig {
|
||||
*
|
||||
* @see LegacyNumericConfig#setPrecisionStep(int)
|
||||
* @see LegacyNumericConfig#setNumberFormat(NumberFormat)
|
||||
* @see #setType(org.apache.lucene.document.FieldType.LegacyNumericType)
|
||||
* @see #setType(LegacyNumericType)
|
||||
*/
|
||||
public LegacyNumericConfig(int precisionStep, NumberFormat format,
|
||||
LegacyNumericType type) {
|
||||
@ -67,7 +66,7 @@ public class LegacyNumericConfig {
|
||||
*
|
||||
* @return the precision used to index the numeric values
|
||||
*
|
||||
* @see org.apache.lucene.search.LegacyNumericRangeQuery#getPrecisionStep()
|
||||
* @see org.apache.lucene.legacy.LegacyNumericRangeQuery#getPrecisionStep()
|
||||
*/
|
||||
public int getPrecisionStep() {
|
||||
return precisionStep;
|
||||
@ -79,7 +78,7 @@ public class LegacyNumericConfig {
|
||||
* @param precisionStep
|
||||
* the precision used to index the numeric values
|
||||
*
|
||||
* @see org.apache.lucene.search.LegacyNumericRangeQuery#getPrecisionStep()
|
||||
* @see org.apache.lucene.legacy.LegacyNumericRangeQuery#getPrecisionStep()
|
||||
*/
|
||||
public void setPrecisionStep(int precisionStep) {
|
||||
this.precisionStep = precisionStep;
|
||||
|
@ -16,8 +16,7 @@
|
||||
*/
|
||||
package org.apache.lucene.queryparser.flexible.standard.nodes;
|
||||
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.FieldType.LegacyNumericType;
|
||||
import org.apache.lucene.legacy.LegacyNumericType;
|
||||
import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
|
||||
import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages;
|
||||
import org.apache.lucene.queryparser.flexible.messages.MessageImpl;
|
||||
@ -57,13 +56,13 @@ public class LegacyNumericRangeQueryNode extends
|
||||
private static LegacyNumericType getNumericDataType(Number number) throws QueryNodeException {
|
||||
|
||||
if (number instanceof Long) {
|
||||
return FieldType.LegacyNumericType.LONG;
|
||||
return LegacyNumericType.LONG;
|
||||
} else if (number instanceof Integer) {
|
||||
return FieldType.LegacyNumericType.INT;
|
||||
return LegacyNumericType.INT;
|
||||
} else if (number instanceof Double) {
|
||||
return LegacyNumericType.DOUBLE;
|
||||
} else if (number instanceof Float) {
|
||||
return FieldType.LegacyNumericType.FLOAT;
|
||||
return LegacyNumericType.FLOAT;
|
||||
} else {
|
||||
throw new QueryNodeException(
|
||||
new MessageImpl(
|
||||
|
@ -16,19 +16,19 @@
|
||||
*/
|
||||
package org.apache.lucene.queryparser.xml.builders;
|
||||
|
||||
import org.apache.lucene.search.LegacyNumericRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
import org.apache.lucene.legacy.LegacyNumericRangeQuery;
|
||||
import org.apache.lucene.legacy.LegacyNumericUtils;
|
||||
import org.apache.lucene.queryparser.xml.DOMUtils;
|
||||
import org.apache.lucene.queryparser.xml.ParserException;
|
||||
import org.apache.lucene.queryparser.xml.QueryBuilder;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
* Creates a {@link org.apache.lucene.search.LegacyNumericRangeQuery}. The table below specifies the required
|
||||
* Creates a {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}. The table below specifies the required
|
||||
* attributes and the defaults if optional attributes are omitted. For more
|
||||
* detail on what each of the attributes actually do, consult the documentation
|
||||
* for {@link org.apache.lucene.search.LegacyNumericRangeQuery}:
|
||||
* for {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}:
|
||||
* <table summary="supported attributes">
|
||||
* <tr>
|
||||
* <th>Attribute name</th>
|
||||
|
@ -32,15 +32,15 @@ import java.util.TimeZone;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.LegacyDoubleField;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType.LegacyNumericType;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.LegacyFloatField;
|
||||
import org.apache.lucene.document.LegacyIntField;
|
||||
import org.apache.lucene.document.LegacyLongField;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.legacy.LegacyDoubleField;
|
||||
import org.apache.lucene.legacy.LegacyFieldType;
|
||||
import org.apache.lucene.legacy.LegacyFloatField;
|
||||
import org.apache.lucene.legacy.LegacyIntField;
|
||||
import org.apache.lucene.legacy.LegacyLongField;
|
||||
import org.apache.lucene.legacy.LegacyNumericType;
|
||||
import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
|
||||
import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax;
|
||||
import org.apache.lucene.queryparser.flexible.standard.config.NumberDateFormat;
|
||||
@ -179,7 +179,7 @@ public class TestLegacyNumericQueryParser extends LuceneTestCase {
|
||||
;
|
||||
|
||||
randomNumberMap.put(LegacyNumericType.LONG.name(), randomLong);
|
||||
randomNumberMap.put(FieldType.LegacyNumericType.INT.name(), randomInt);
|
||||
randomNumberMap.put(LegacyNumericType.INT.name(), randomInt);
|
||||
randomNumberMap.put(LegacyNumericType.FLOAT.name(), randomFloat);
|
||||
randomNumberMap.put(LegacyNumericType.DOUBLE.name(), randomDouble);
|
||||
randomNumberMap.put(DATE_FIELD_NAME, randomDate);
|
||||
@ -201,7 +201,7 @@ public class TestLegacyNumericQueryParser extends LuceneTestCase {
|
||||
numericConfigMap.put(type.name(), new LegacyNumericConfig(PRECISION_STEP,
|
||||
NUMBER_FORMAT, type));
|
||||
|
||||
FieldType ft = new FieldType(LegacyIntField.TYPE_NOT_STORED);
|
||||
LegacyFieldType ft = new LegacyFieldType(LegacyIntField.TYPE_NOT_STORED);
|
||||
ft.setNumericType(type);
|
||||
ft.setStored(true);
|
||||
ft.setNumericPrecisionStep(PRECISION_STEP);
|
||||
@ -231,7 +231,7 @@ public class TestLegacyNumericQueryParser extends LuceneTestCase {
|
||||
|
||||
numericConfigMap.put(DATE_FIELD_NAME, new LegacyNumericConfig(PRECISION_STEP,
|
||||
DATE_FORMAT, LegacyNumericType.LONG));
|
||||
FieldType ft = new FieldType(LegacyLongField.TYPE_NOT_STORED);
|
||||
LegacyFieldType ft = new LegacyFieldType(LegacyLongField.TYPE_NOT_STORED);
|
||||
ft.setStored(true);
|
||||
ft.setNumericPrecisionStep(PRECISION_STEP);
|
||||
LegacyLongField dateField = new LegacyLongField(DATE_FIELD_NAME, 0l, ft);
|
||||
@ -268,10 +268,10 @@ public class TestLegacyNumericQueryParser extends LuceneTestCase {
|
||||
|| DATE_FIELD_NAME.equals(fieldName)) {
|
||||
number = -number.longValue();
|
||||
|
||||
} else if (FieldType.LegacyNumericType.DOUBLE.name().equals(fieldName)) {
|
||||
} else if (LegacyNumericType.DOUBLE.name().equals(fieldName)) {
|
||||
number = -number.doubleValue();
|
||||
|
||||
} else if (FieldType.LegacyNumericType.FLOAT.name().equals(fieldName)) {
|
||||
} else if (LegacyNumericType.FLOAT.name().equals(fieldName)) {
|
||||
number = -number.floatValue();
|
||||
|
||||
} else if (LegacyNumericType.INT.name().equals(fieldName)) {
|
||||
@ -299,16 +299,16 @@ public class TestLegacyNumericQueryParser extends LuceneTestCase {
|
||||
numericFieldMap.get(LegacyNumericType.DOUBLE.name()).setDoubleValue(
|
||||
number.doubleValue());
|
||||
|
||||
number = getNumberType(numberType, FieldType.LegacyNumericType.INT.name());
|
||||
numericFieldMap.get(FieldType.LegacyNumericType.INT.name()).setIntValue(
|
||||
number = getNumberType(numberType, LegacyNumericType.INT.name());
|
||||
numericFieldMap.get(LegacyNumericType.INT.name()).setIntValue(
|
||||
number.intValue());
|
||||
|
||||
number = getNumberType(numberType, LegacyNumericType.LONG.name());
|
||||
numericFieldMap.get(FieldType.LegacyNumericType.LONG.name()).setLongValue(
|
||||
numericFieldMap.get(LegacyNumericType.LONG.name()).setLongValue(
|
||||
number.longValue());
|
||||
|
||||
number = getNumberType(numberType, FieldType.LegacyNumericType.FLOAT.name());
|
||||
numericFieldMap.get(FieldType.LegacyNumericType.FLOAT.name()).setFloatValue(
|
||||
number = getNumberType(numberType, LegacyNumericType.FLOAT.name());
|
||||
numericFieldMap.get(LegacyNumericType.FLOAT.name()).setFloatValue(
|
||||
number.floatValue());
|
||||
|
||||
number = getNumberType(numberType, DATE_FIELD_NAME);
|
||||
@ -456,7 +456,7 @@ public class TestLegacyNumericQueryParser extends LuceneTestCase {
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
||||
for (LegacyNumericType type : FieldType.LegacyNumericType.values()) {
|
||||
for (LegacyNumericType type : LegacyNumericType.values()) {
|
||||
String boundStr = numberToString(getNumberType(boundType, type.name()));
|
||||
|
||||
sb.append("+").append(type.name()).append(operator).append('"').append(boundStr).append('"').append(' ');
|
||||
|
@ -20,10 +20,10 @@ import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.IntPoint;
|
||||
import org.apache.lucene.document.LegacyIntField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.legacy.LegacyIntField;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -16,9 +16,9 @@
|
||||
*/
|
||||
package org.apache.lucene.queryparser.xml.builders;
|
||||
|
||||
import org.apache.lucene.search.LegacyNumericRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.legacy.LegacyNumericRangeQuery;
|
||||
import org.apache.lucene.queryparser.xml.ParserException;
|
||||
import org.w3c.dom.Document;
|
||||
import org.xml.sax.SAXException;
|
||||
|
@ -0,0 +1,262 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.document;
|
||||
|
||||
import org.apache.lucene.document.RangeFieldQuery.QueryType;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
|
||||
/**
|
||||
* An indexed Float Range field.
|
||||
* <p>
|
||||
* This field indexes dimensional ranges defined as min/max pairs. It supports
|
||||
* up to a maximum of 4 dimensions (indexed as 8 numeric values). With 1 dimension representing a single float range,
|
||||
* 2 dimensions representing a bounding box, 3 dimensions a bounding cube, and 4 dimensions a tesseract.
|
||||
* <p>
|
||||
* Multiple values for the same field in one document is supported, and open ended ranges can be defined using
|
||||
* {@code Float.NEGATIVE_INFINITY} and {@code Float.POSITIVE_INFINITY}.
|
||||
*
|
||||
* <p>
|
||||
* This field defines the following static factory methods for common search operations over float ranges:
|
||||
* <ul>
|
||||
* <li>{@link #newIntersectsQuery newIntersectsQuery()} matches ranges that intersect the defined search range.
|
||||
* <li>{@link #newWithinQuery newWithinQuery()} matches ranges that are within the defined search range.
|
||||
* <li>{@link #newContainsQuery newContainsQuery()} matches ranges that contain the defined search range.
|
||||
* </ul>
|
||||
*/
|
||||
public class FloatRangeField extends Field {
|
||||
/** stores float values so number of bytes is 4 */
|
||||
public static final int BYTES = Float.BYTES;
|
||||
|
||||
/**
|
||||
* Create a new FloatRangeField type, from min/max parallel arrays
|
||||
*
|
||||
* @param name field name. must not be null.
|
||||
* @param min range min values; each entry is the min value for the dimension
|
||||
* @param max range max values; each entry is the max value for the dimension
|
||||
*/
|
||||
public FloatRangeField(String name, final float[] min, final float[] max) {
|
||||
super(name, getType(min.length));
|
||||
setRangeValues(min, max);
|
||||
}
|
||||
|
||||
/** set the field type */
|
||||
private static FieldType getType(int dimensions) {
|
||||
if (dimensions > 4) {
|
||||
throw new IllegalArgumentException("FloatRangeField does not support greater than 4 dimensions");
|
||||
}
|
||||
|
||||
FieldType ft = new FieldType();
|
||||
// dimensions is set as 2*dimension size (min/max per dimension)
|
||||
ft.setDimensions(dimensions*2, BYTES);
|
||||
ft.freeze();
|
||||
return ft;
|
||||
}
|
||||
|
||||
/**
|
||||
* Changes the values of the field.
|
||||
* @param min array of min values. (accepts {@code Float.NEGATIVE_INFINITY})
|
||||
* @param max array of max values. (accepts {@code Float.POSITIVE_INFINITY})
|
||||
* @throws IllegalArgumentException if {@code min} or {@code max} is invalid
|
||||
*/
|
||||
public void setRangeValues(float[] min, float[] max) {
|
||||
checkArgs(min, max);
|
||||
if (min.length*2 != type.pointDimensionCount() || max.length*2 != type.pointDimensionCount()) {
|
||||
throw new IllegalArgumentException("field (name=" + name + ") uses " + type.pointDimensionCount()/2
|
||||
+ " dimensions; cannot change to (incoming) " + min.length + " dimensions");
|
||||
}
|
||||
|
||||
final byte[] bytes;
|
||||
if (fieldsData == null) {
|
||||
bytes = new byte[BYTES*2*min.length];
|
||||
fieldsData = new BytesRef(bytes);
|
||||
} else {
|
||||
bytes = ((BytesRef)fieldsData).bytes;
|
||||
}
|
||||
verifyAndEncode(min, max, bytes);
|
||||
}
|
||||
|
||||
/** validate the arguments */
|
||||
private static void checkArgs(final float[] min, final float[] max) {
|
||||
if (min == null || max == null || min.length == 0 || max.length == 0) {
|
||||
throw new IllegalArgumentException("min/max range values cannot be null or empty");
|
||||
}
|
||||
if (min.length != max.length) {
|
||||
throw new IllegalArgumentException("min/max ranges must agree");
|
||||
}
|
||||
if (min.length > 4) {
|
||||
throw new IllegalArgumentException("FloatRangeField does not support greater than 4 dimensions");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encodes the min, max ranges into a byte array
|
||||
*/
|
||||
private static byte[] encode(float[] min, float[] max) {
|
||||
checkArgs(min, max);
|
||||
byte[] b = new byte[BYTES*2*min.length];
|
||||
verifyAndEncode(min, max, b);
|
||||
return b;
|
||||
}
|
||||
|
||||
/**
|
||||
* encode the ranges into a sortable byte array ({@code Float.NaN} not allowed)
|
||||
* <p>
|
||||
* example for 4 dimensions (8 bytes per dimension value):
|
||||
* minD1 ... minD4 | maxD1 ... maxD4
|
||||
*/
|
||||
static void verifyAndEncode(float[] min, float[] max, byte[] bytes) {
|
||||
for (int d=0,i=0,j=min.length*BYTES; d<min.length; ++d, i+=BYTES, j+=BYTES) {
|
||||
if (Double.isNaN(min[d])) {
|
||||
throw new IllegalArgumentException("invalid min value (" + Float.NaN + ")" + " in FloatRangeField");
|
||||
}
|
||||
if (Double.isNaN(max[d])) {
|
||||
throw new IllegalArgumentException("invalid max value (" + Float.NaN + ")" + " in FloatRangeField");
|
||||
}
|
||||
if (min[d] > max[d]) {
|
||||
throw new IllegalArgumentException("min value (" + min[d] + ") is greater than max value (" + max[d] + ")");
|
||||
}
|
||||
encode(min[d], bytes, i);
|
||||
encode(max[d], bytes, j);
|
||||
}
|
||||
}
|
||||
|
||||
/** encode the given value into the byte array at the defined offset */
|
||||
private static void encode(float val, byte[] bytes, int offset) {
|
||||
NumericUtils.intToSortableBytes(NumericUtils.floatToSortableInt(val), bytes, offset);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the min value for the given dimension
|
||||
* @param dimension the dimension, always positive
|
||||
* @return the decoded min value
|
||||
*/
|
||||
public float getMin(int dimension) {
|
||||
if (dimension < 0 || dimension >= type.pointDimensionCount()/2) {
|
||||
throw new IllegalArgumentException("dimension request (" + dimension +
|
||||
") out of bounds for field (name=" + name + " dimensions=" + type.pointDimensionCount()/2 + "). ");
|
||||
}
|
||||
return decodeMin(((BytesRef)fieldsData).bytes, dimension);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the max value for the given dimension
|
||||
* @param dimension the dimension, always positive
|
||||
* @return the decoded max value
|
||||
*/
|
||||
public float getMax(int dimension) {
|
||||
if (dimension < 0 || dimension >= type.pointDimensionCount()/2) {
|
||||
throw new IllegalArgumentException("dimension request (" + dimension +
|
||||
") out of bounds for field (name=" + name + " dimensions=" + type.pointDimensionCount()/2 + "). ");
|
||||
}
|
||||
return decodeMax(((BytesRef)fieldsData).bytes, dimension);
|
||||
}
|
||||
|
||||
/** decodes the min value (for the defined dimension) from the encoded input byte array */
|
||||
static float decodeMin(byte[] b, int dimension) {
|
||||
int offset = dimension*BYTES;
|
||||
return NumericUtils.sortableIntToFloat(NumericUtils.sortableBytesToInt(b, offset));
|
||||
}
|
||||
|
||||
/** decodes the max value (for the defined dimension) from the encoded input byte array */
|
||||
static float decodeMax(byte[] b, int dimension) {
|
||||
int offset = b.length/2 + dimension*BYTES;
|
||||
return NumericUtils.sortableIntToFloat(NumericUtils.sortableBytesToInt(b, offset));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a query for matching indexed ranges that intersect the defined range.
|
||||
* @param field field name. must not be null.
|
||||
* @param min array of min values. (accepts {@code Float.NEGATIVE_INFINITY})
|
||||
* @param max array of max values. (accepts {@code Float.MAX_VALUE})
|
||||
* @return query for matching intersecting ranges (overlap, within, or contains)
|
||||
* @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
|
||||
*/
|
||||
public static Query newIntersectsQuery(String field, final float[] min, final float[] max) {
|
||||
return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.INTERSECTS) {
|
||||
@Override
|
||||
protected String toString(byte[] ranges, int dimension) {
|
||||
return FloatRangeField.toString(ranges, dimension);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a query for matching indexed float ranges that contain the defined range.
|
||||
* @param field field name. must not be null.
|
||||
* @param min array of min values. (accepts {@code Float.NEGATIVE_INFINITY})
|
||||
* @param max array of max values. (accepts {@code Float.POSITIVE_INFINITY})
|
||||
* @return query for matching ranges that contain the defined range
|
||||
* @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
|
||||
*/
|
||||
public static Query newContainsQuery(String field, final float[] min, final float[] max) {
|
||||
return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.CONTAINS) {
|
||||
@Override
|
||||
protected String toString(byte[] ranges, int dimension) {
|
||||
return FloatRangeField.toString(ranges, dimension);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a query for matching indexed ranges that are within the defined range.
|
||||
* @param field field name. must not be null.
|
||||
* @param min array of min values. (accepts {@code Float.NEGATIVE_INFINITY})
|
||||
* @param max array of max values. (accepts {@code Float.POSITIVE_INFINITY})
|
||||
* @return query for matching ranges within the defined range
|
||||
* @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
|
||||
*/
|
||||
public static Query newWithinQuery(String field, final float[] min, final float[] max) {
|
||||
checkArgs(min, max);
|
||||
return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.WITHIN) {
|
||||
@Override
|
||||
protected String toString(byte[] ranges, int dimension) {
|
||||
return FloatRangeField.toString(ranges, dimension);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(getClass().getSimpleName());
|
||||
sb.append(" <");
|
||||
sb.append(name);
|
||||
sb.append(':');
|
||||
byte[] b = ((BytesRef)fieldsData).bytes;
|
||||
toString(b, 0);
|
||||
for (int d=1; d<type.pointDimensionCount(); ++d) {
|
||||
sb.append(' ');
|
||||
toString(b, d);
|
||||
}
|
||||
sb.append('>');
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the String representation for the range at the given dimension
|
||||
* @param ranges the encoded ranges, never null
|
||||
* @param dimension the dimension of interest
|
||||
* @return The string representation for the range at the provided dimension
|
||||
*/
|
||||
private static String toString(byte[] ranges, int dimension) {
|
||||
return "[" + Float.toString(decodeMin(ranges, dimension)) + " : "
|
||||
+ Float.toString(decodeMax(ranges, dimension)) + "]";
|
||||
}
|
||||
}
|
@ -0,0 +1,262 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.document;
|
||||
|
||||
import org.apache.lucene.document.RangeFieldQuery.QueryType;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
|
||||
/**
|
||||
* An indexed Integer Range field.
|
||||
* <p>
|
||||
* This field indexes dimensional ranges defined as min/max pairs. It supports
|
||||
* up to a maximum of 4 dimensions (indexed as 8 numeric values). With 1 dimension representing a single integer range,
|
||||
* 2 dimensions representing a bounding box, 3 dimensions a bounding cube, and 4 dimensions a tesseract.
|
||||
* <p>
|
||||
* Multiple values for the same field in one document is supported, and open ended ranges can be defined using
|
||||
* {@code Integer.MIN_VALUE} and {@code Integer.MAX_VALUE}.
|
||||
*
|
||||
* <p>
|
||||
* This field defines the following static factory methods for common search operations over integer ranges:
|
||||
* <ul>
|
||||
* <li>{@link #newIntersectsQuery newIntersectsQuery()} matches ranges that intersect the defined search range.
|
||||
* <li>{@link #newWithinQuery newWithinQuery()} matches ranges that are within the defined search range.
|
||||
* <li>{@link #newContainsQuery newContainsQuery()} matches ranges that contain the defined search range.
|
||||
* </ul>
|
||||
*/
|
||||
public class IntRangeField extends Field {
|
||||
/** stores integer values so number of bytes is 4 */
|
||||
public static final int BYTES = Integer.BYTES;
|
||||
|
||||
/**
|
||||
* Create a new IntRangeField type, from min/max parallel arrays
|
||||
*
|
||||
* @param name field name. must not be null.
|
||||
* @param min range min values; each entry is the min value for the dimension
|
||||
* @param max range max values; each entry is the max value for the dimension
|
||||
*/
|
||||
public IntRangeField(String name, final int[] min, final int[] max) {
|
||||
super(name, getType(min.length));
|
||||
setRangeValues(min, max);
|
||||
}
|
||||
|
||||
/** set the field type */
|
||||
private static FieldType getType(int dimensions) {
|
||||
if (dimensions > 4) {
|
||||
throw new IllegalArgumentException("IntRangeField does not support greater than 4 dimensions");
|
||||
}
|
||||
|
||||
FieldType ft = new FieldType();
|
||||
// dimensions is set as 2*dimension size (min/max per dimension)
|
||||
ft.setDimensions(dimensions*2, BYTES);
|
||||
ft.freeze();
|
||||
return ft;
|
||||
}
|
||||
|
||||
/**
|
||||
* Changes the values of the field.
|
||||
* @param min array of min values. (accepts {@code Integer.NEGATIVE_INFINITY})
|
||||
* @param max array of max values. (accepts {@code Integer.POSITIVE_INFINITY})
|
||||
* @throws IllegalArgumentException if {@code min} or {@code max} is invalid
|
||||
*/
|
||||
public void setRangeValues(int[] min, int[] max) {
|
||||
checkArgs(min, max);
|
||||
if (min.length*2 != type.pointDimensionCount() || max.length*2 != type.pointDimensionCount()) {
|
||||
throw new IllegalArgumentException("field (name=" + name + ") uses " + type.pointDimensionCount()/2
|
||||
+ " dimensions; cannot change to (incoming) " + min.length + " dimensions");
|
||||
}
|
||||
|
||||
final byte[] bytes;
|
||||
if (fieldsData == null) {
|
||||
bytes = new byte[BYTES*2*min.length];
|
||||
fieldsData = new BytesRef(bytes);
|
||||
} else {
|
||||
bytes = ((BytesRef)fieldsData).bytes;
|
||||
}
|
||||
verifyAndEncode(min, max, bytes);
|
||||
}
|
||||
|
||||
/** validate the arguments */
|
||||
private static void checkArgs(final int[] min, final int[] max) {
|
||||
if (min == null || max == null || min.length == 0 || max.length == 0) {
|
||||
throw new IllegalArgumentException("min/max range values cannot be null or empty");
|
||||
}
|
||||
if (min.length != max.length) {
|
||||
throw new IllegalArgumentException("min/max ranges must agree");
|
||||
}
|
||||
if (min.length > 4) {
|
||||
throw new IllegalArgumentException("IntRangeField does not support greater than 4 dimensions");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encodes the min, max ranges into a byte array
|
||||
*/
|
||||
private static byte[] encode(int[] min, int[] max) {
|
||||
checkArgs(min, max);
|
||||
byte[] b = new byte[BYTES*2*min.length];
|
||||
verifyAndEncode(min, max, b);
|
||||
return b;
|
||||
}
|
||||
|
||||
/**
|
||||
* encode the ranges into a sortable byte array ({@code Double.NaN} not allowed)
|
||||
* <p>
|
||||
* example for 4 dimensions (8 bytes per dimension value):
|
||||
* minD1 ... minD4 | maxD1 ... maxD4
|
||||
*/
|
||||
static void verifyAndEncode(int[] min, int[] max, byte[] bytes) {
|
||||
for (int d=0,i=0,j=min.length*BYTES; d<min.length; ++d, i+=BYTES, j+=BYTES) {
|
||||
if (Double.isNaN(min[d])) {
|
||||
throw new IllegalArgumentException("invalid min value (" + Double.NaN + ")" + " in IntRangeField");
|
||||
}
|
||||
if (Double.isNaN(max[d])) {
|
||||
throw new IllegalArgumentException("invalid max value (" + Double.NaN + ")" + " in IntRangeField");
|
||||
}
|
||||
if (min[d] > max[d]) {
|
||||
throw new IllegalArgumentException("min value (" + min[d] + ") is greater than max value (" + max[d] + ")");
|
||||
}
|
||||
encode(min[d], bytes, i);
|
||||
encode(max[d], bytes, j);
|
||||
}
|
||||
}
|
||||
|
||||
/** encode the given value into the byte array at the defined offset */
|
||||
private static void encode(int val, byte[] bytes, int offset) {
|
||||
NumericUtils.intToSortableBytes(val, bytes, offset);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the min value for the given dimension
|
||||
* @param dimension the dimension, always positive
|
||||
* @return the decoded min value
|
||||
*/
|
||||
public int getMin(int dimension) {
|
||||
if (dimension < 0 || dimension >= type.pointDimensionCount()/2) {
|
||||
throw new IllegalArgumentException("dimension request (" + dimension +
|
||||
") out of bounds for field (name=" + name + " dimensions=" + type.pointDimensionCount()/2 + "). ");
|
||||
}
|
||||
return decodeMin(((BytesRef)fieldsData).bytes, dimension);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the max value for the given dimension
|
||||
* @param dimension the dimension, always positive
|
||||
* @return the decoded max value
|
||||
*/
|
||||
public int getMax(int dimension) {
|
||||
if (dimension < 0 || dimension >= type.pointDimensionCount()/2) {
|
||||
throw new IllegalArgumentException("dimension request (" + dimension +
|
||||
") out of bounds for field (name=" + name + " dimensions=" + type.pointDimensionCount()/2 + "). ");
|
||||
}
|
||||
return decodeMax(((BytesRef)fieldsData).bytes, dimension);
|
||||
}
|
||||
|
||||
/** decodes the min value (for the defined dimension) from the encoded input byte array */
|
||||
static int decodeMin(byte[] b, int dimension) {
|
||||
int offset = dimension*BYTES;
|
||||
return NumericUtils.sortableBytesToInt(b, offset);
|
||||
}
|
||||
|
||||
/** decodes the max value (for the defined dimension) from the encoded input byte array */
|
||||
static int decodeMax(byte[] b, int dimension) {
|
||||
int offset = b.length/2 + dimension*BYTES;
|
||||
return NumericUtils.sortableBytesToInt(b, offset);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a query for matching indexed ranges that intersect the defined range.
|
||||
* @param field field name. must not be null.
|
||||
* @param min array of min values. (accepts {@code Integer.MIN_VALUE})
|
||||
* @param max array of max values. (accepts {@code Integer.MAX_VALUE})
|
||||
* @return query for matching intersecting ranges (overlap, within, or contains)
|
||||
* @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
|
||||
*/
|
||||
public static Query newIntersectsQuery(String field, final int[] min, final int[] max) {
|
||||
return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.INTERSECTS) {
|
||||
@Override
|
||||
protected String toString(byte[] ranges, int dimension) {
|
||||
return IntRangeField.toString(ranges, dimension);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a query for matching indexed ranges that contain the defined range.
|
||||
* @param field field name. must not be null.
|
||||
* @param min array of min values. (accepts {@code Integer.MIN_VALUE})
|
||||
* @param max array of max values. (accepts {@code Integer.MAX_VALUE})
|
||||
* @return query for matching ranges that contain the defined range
|
||||
* @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
|
||||
*/
|
||||
public static Query newContainsQuery(String field, final int[] min, final int[] max) {
|
||||
return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.CONTAINS) {
|
||||
@Override
|
||||
protected String toString(byte[] ranges, int dimension) {
|
||||
return IntRangeField.toString(ranges, dimension);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a query for matching indexed ranges that are within the defined range.
|
||||
* @param field field name. must not be null.
|
||||
* @param min array of min values. (accepts {@code Integer.MIN_VALUE})
|
||||
* @param max array of max values. (accepts {@code Integer.MAX_VALUE})
|
||||
* @return query for matching ranges within the defined range
|
||||
* @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
|
||||
*/
|
||||
public static Query newWithinQuery(String field, final int[] min, final int[] max) {
|
||||
checkArgs(min, max);
|
||||
return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.WITHIN) {
|
||||
@Override
|
||||
protected String toString(byte[] ranges, int dimension) {
|
||||
return IntRangeField.toString(ranges, dimension);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(getClass().getSimpleName());
|
||||
sb.append(" <");
|
||||
sb.append(name);
|
||||
sb.append(':');
|
||||
byte[] b = ((BytesRef)fieldsData).bytes;
|
||||
toString(b, 0);
|
||||
for (int d=1; d<type.pointDimensionCount(); ++d) {
|
||||
sb.append(' ');
|
||||
toString(b, d);
|
||||
}
|
||||
sb.append('>');
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the String representation for the range at the given dimension
|
||||
* @param ranges the encoded ranges, never null
|
||||
* @param dimension the dimension of interest
|
||||
* @return The string representation for the range at the provided dimension
|
||||
*/
|
||||
private static String toString(byte[] ranges, int dimension) {
|
||||
return "[" + Integer.toString(decodeMin(ranges, dimension)) + " : "
|
||||
+ Integer.toString(decodeMax(ranges, dimension)) + "]";
|
||||
}
|
||||
}
|
@ -0,0 +1,260 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.document;
|
||||
|
||||
import org.apache.lucene.document.RangeFieldQuery.QueryType;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
|
||||
/**
|
||||
* An indexed Long Range field.
|
||||
* <p>
|
||||
* This field indexes dimensional ranges defined as min/max pairs. It supports
|
||||
* up to a maximum of 4 dimensions (indexed as 8 numeric values). With 1 dimension representing a single long range,
|
||||
* 2 dimensions representing a bounding box, 3 dimensions a bounding cube, and 4 dimensions a tesseract.
|
||||
* <p>
|
||||
* Multiple values for the same field in one document is supported, and open ended ranges can be defined using
|
||||
* {@code Long.MIN_VALUE} and {@code Long.MAX_VALUE}.
|
||||
*
|
||||
* <p>
|
||||
* This field defines the following static factory methods for common search operations over long ranges:
|
||||
* <ul>
|
||||
* <li>{@link #newIntersectsQuery newIntersectsQuery()} matches ranges that intersect the defined search range.
|
||||
* <li>{@link #newWithinQuery newWithinQuery()} matches ranges that are within the defined search range.
|
||||
* <li>{@link #newContainsQuery newContainsQuery()} matches ranges that contain the defined search range.
|
||||
* </ul>
|
||||
*/
|
||||
public class LongRangeField extends Field {
|
||||
/** stores long values so number of bytes is 8 */
|
||||
public static final int BYTES = Long.BYTES;
|
||||
|
||||
/**
|
||||
* Create a new LongRangeField type, from min/max parallel arrays
|
||||
*
|
||||
* @param name field name. must not be null.
|
||||
* @param min range min values; each entry is the min value for the dimension
|
||||
* @param max range max values; each entry is the max value for the dimension
|
||||
*/
|
||||
public LongRangeField(String name, final long[] min, final long[] max) {
|
||||
super(name, getType(min.length));
|
||||
setRangeValues(min, max);
|
||||
}
|
||||
|
||||
/** set the field type */
|
||||
private static FieldType getType(int dimensions) {
|
||||
if (dimensions > 4) {
|
||||
throw new IllegalArgumentException("LongRangeField does not support greater than 4 dimensions");
|
||||
}
|
||||
|
||||
FieldType ft = new FieldType();
|
||||
// dimensions is set as 2*dimension size (min/max per dimension)
|
||||
ft.setDimensions(dimensions*2, BYTES);
|
||||
ft.freeze();
|
||||
return ft;
|
||||
}
|
||||
|
||||
/**
|
||||
* Changes the values of the field.
|
||||
* @param min array of min values. (accepts {@code Long.MIN_VALUE})
|
||||
* @param max array of max values. (accepts {@code Long.MAX_VALUE})
|
||||
* @throws IllegalArgumentException if {@code min} or {@code max} is invalid
|
||||
*/
|
||||
public void setRangeValues(long[] min, long[] max) {
|
||||
checkArgs(min, max);
|
||||
if (min.length*2 != type.pointDimensionCount() || max.length*2 != type.pointDimensionCount()) {
|
||||
throw new IllegalArgumentException("field (name=" + name + ") uses " + type.pointDimensionCount()/2
|
||||
+ " dimensions; cannot change to (incoming) " + min.length + " dimensions");
|
||||
}
|
||||
|
||||
final byte[] bytes;
|
||||
if (fieldsData == null) {
|
||||
bytes = new byte[BYTES*2*min.length];
|
||||
fieldsData = new BytesRef(bytes);
|
||||
} else {
|
||||
bytes = ((BytesRef)fieldsData).bytes;
|
||||
}
|
||||
verifyAndEncode(min, max, bytes);
|
||||
}
|
||||
|
||||
/** validate the arguments */
|
||||
private static void checkArgs(final long[] min, final long[] max) {
|
||||
if (min == null || max == null || min.length == 0 || max.length == 0) {
|
||||
throw new IllegalArgumentException("min/max range values cannot be null or empty");
|
||||
}
|
||||
if (min.length != max.length) {
|
||||
throw new IllegalArgumentException("min/max ranges must agree");
|
||||
}
|
||||
if (min.length > 4) {
|
||||
throw new IllegalArgumentException("LongRangeField does not support greater than 4 dimensions");
|
||||
}
|
||||
}
|
||||
|
||||
/** Encodes the min, max ranges into a byte array */
|
||||
private static byte[] encode(long[] min, long[] max) {
|
||||
checkArgs(min, max);
|
||||
byte[] b = new byte[BYTES*2*min.length];
|
||||
verifyAndEncode(min, max, b);
|
||||
return b;
|
||||
}
|
||||
|
||||
/**
|
||||
* encode the ranges into a sortable byte array ({@code Double.NaN} not allowed)
|
||||
* <p>
|
||||
* example for 4 dimensions (8 bytes per dimension value):
|
||||
* minD1 ... minD4 | maxD1 ... maxD4
|
||||
*/
|
||||
static void verifyAndEncode(long[] min, long[] max, byte[] bytes) {
|
||||
for (int d=0,i=0,j=min.length*BYTES; d<min.length; ++d, i+=BYTES, j+=BYTES) {
|
||||
if (Double.isNaN(min[d])) {
|
||||
throw new IllegalArgumentException("invalid min value (" + Double.NaN + ")" + " in IntRangeField");
|
||||
}
|
||||
if (Double.isNaN(max[d])) {
|
||||
throw new IllegalArgumentException("invalid max value (" + Double.NaN + ")" + " in IntRangeField");
|
||||
}
|
||||
if (min[d] > max[d]) {
|
||||
throw new IllegalArgumentException("min value (" + min[d] + ") is greater than max value (" + max[d] + ")");
|
||||
}
|
||||
encode(min[d], bytes, i);
|
||||
encode(max[d], bytes, j);
|
||||
}
|
||||
}
|
||||
|
||||
/** encode the given value into the byte array at the defined offset */
|
||||
private static void encode(long val, byte[] bytes, int offset) {
|
||||
NumericUtils.longToSortableBytes(val, bytes, offset);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the min value for the given dimension
|
||||
* @param dimension the dimension, always positive
|
||||
* @return the decoded min value
|
||||
*/
|
||||
public long getMin(int dimension) {
|
||||
if (dimension < 0 || dimension >= type.pointDimensionCount()/2) {
|
||||
throw new IllegalArgumentException("dimension request (" + dimension +
|
||||
") out of bounds for field (name=" + name + " dimensions=" + type.pointDimensionCount()/2 + "). ");
|
||||
}
|
||||
return decodeMin(((BytesRef)fieldsData).bytes, dimension);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the max value for the given dimension
|
||||
* @param dimension the dimension, always positive
|
||||
* @return the decoded max value
|
||||
*/
|
||||
public long getMax(int dimension) {
|
||||
if (dimension < 0 || dimension >= type.pointDimensionCount()/2) {
|
||||
throw new IllegalArgumentException("dimension request (" + dimension +
|
||||
") out of bounds for field (name=" + name + " dimensions=" + type.pointDimensionCount()/2 + "). ");
|
||||
}
|
||||
return decodeMax(((BytesRef)fieldsData).bytes, dimension);
|
||||
}
|
||||
|
||||
/** decodes the min value (for the defined dimension) from the encoded input byte array */
|
||||
static long decodeMin(byte[] b, int dimension) {
|
||||
int offset = dimension*BYTES;
|
||||
return NumericUtils.sortableBytesToLong(b, offset);
|
||||
}
|
||||
|
||||
/** decodes the max value (for the defined dimension) from the encoded input byte array */
|
||||
static long decodeMax(byte[] b, int dimension) {
|
||||
int offset = b.length/2 + dimension*BYTES;
|
||||
return NumericUtils.sortableBytesToLong(b, offset);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a query for matching indexed ranges that intersect the defined range.
|
||||
* @param field field name. must not be null.
|
||||
* @param min array of min values. (accepts {@code Long.MIN_VALUE})
|
||||
* @param max array of max values. (accepts {@code Long.MAX_VALUE})
|
||||
* @return query for matching intersecting ranges (overlap, within, or contains)
|
||||
* @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
|
||||
*/
|
||||
public static Query newIntersectsQuery(String field, final long[] min, final long[] max) {
|
||||
return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.INTERSECTS) {
|
||||
@Override
|
||||
protected String toString(byte[] ranges, int dimension) {
|
||||
return LongRangeField.toString(ranges, dimension);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a query for matching indexed ranges that contain the defined range.
|
||||
* @param field field name. must not be null.
|
||||
* @param min array of min values. (accepts {@code Long.MIN_VALUE})
|
||||
* @param max array of max values. (accepts {@code Long.MAX_VALUE})
|
||||
* @return query for matching ranges that contain the defined range
|
||||
* @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
|
||||
*/
|
||||
public static Query newContainsQuery(String field, final long[] min, final long[] max) {
|
||||
return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.CONTAINS) {
|
||||
@Override
|
||||
protected String toString(byte[] ranges, int dimension) {
|
||||
return LongRangeField.toString(ranges, dimension);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a query for matching indexed ranges that are within the defined range.
|
||||
* @param field field name. must not be null.
|
||||
* @param min array of min values. (accepts {@code Long.MIN_VALUE})
|
||||
* @param max array of max values. (accepts {@code Long.MAX_VALUE})
|
||||
* @return query for matching ranges within the defined range
|
||||
* @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
|
||||
*/
|
||||
public static Query newWithinQuery(String field, final long[] min, final long[] max) {
|
||||
checkArgs(min, max);
|
||||
return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.WITHIN) {
|
||||
@Override
|
||||
protected String toString(byte[] ranges, int dimension) {
|
||||
return LongRangeField.toString(ranges, dimension);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(getClass().getSimpleName());
|
||||
sb.append(" <");
|
||||
sb.append(name);
|
||||
sb.append(':');
|
||||
byte[] b = ((BytesRef)fieldsData).bytes;
|
||||
toString(b, 0);
|
||||
for (int d=1; d<type.pointDimensionCount(); ++d) {
|
||||
sb.append(' ');
|
||||
toString(b, d);
|
||||
}
|
||||
sb.append('>');
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the String representation for the range at the given dimension
|
||||
* @param ranges the encoded ranges, never null
|
||||
* @param dimension the dimension of interest
|
||||
* @return The string representation for the range at the provided dimension
|
||||
*/
|
||||
private static String toString(byte[] ranges, int dimension) {
|
||||
return "[" + Long.toString(decodeMin(ranges, dimension)) + " : "
|
||||
+ Long.toString(decodeMax(ranges, dimension)) + "]";
|
||||
}
|
||||
}
|
@ -17,7 +17,6 @@
|
||||
package org.apache.lucene.search;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
@ -41,16 +40,18 @@ import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
/**
|
||||
* Abstract class to do basic tests for a RangeField query.
|
||||
* Abstract class to do basic tests for a RangeField query. Testing rigor inspired by {@code BaseGeoPointTestCase}
|
||||
*/
|
||||
public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
|
||||
protected abstract Field newRangeField(double[] min, double[] max);
|
||||
protected abstract Field newRangeField(Range box);
|
||||
|
||||
protected abstract Query newIntersectsQuery(double[] min, double[] max);
|
||||
protected abstract Query newIntersectsQuery(Range box);
|
||||
|
||||
protected abstract Query newContainsQuery(double[] min, double[] max);
|
||||
protected abstract Query newContainsQuery(Range box);
|
||||
|
||||
protected abstract Query newWithinQuery(double[] min, double[] max);
|
||||
protected abstract Query newWithinQuery(Range box);
|
||||
|
||||
protected abstract Range nextRange(int dimensions);
|
||||
|
||||
protected int dimension() {
|
||||
return random().nextInt(4) + 1;
|
||||
@ -82,18 +83,18 @@ public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
|
||||
System.out.println("TEST: numDocs=" + numDocs);
|
||||
}
|
||||
|
||||
Box[][] boxes = new Box[numDocs][];
|
||||
Range[][] ranges = new Range[numDocs][];
|
||||
|
||||
boolean haveRealDoc = true;
|
||||
|
||||
nextdoc: for (int id=0; id<numDocs; ++id) {
|
||||
int x = random().nextInt(20);
|
||||
if (boxes[id] == null) {
|
||||
boxes[id] = new Box[] {nextBox(dimensions)};
|
||||
if (ranges[id] == null) {
|
||||
ranges[id] = new Range[] {nextRange(dimensions)};
|
||||
}
|
||||
if (x == 17) {
|
||||
// dome docs don't have a box:
|
||||
boxes[id][0].min[0] = Double.NaN;
|
||||
ranges[id][0].isMissing = true;
|
||||
if (VERBOSE) {
|
||||
System.out.println(" id=" + id + " is missing");
|
||||
}
|
||||
@ -103,19 +104,19 @@ public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
|
||||
if (multiValued == true && random().nextBoolean()) {
|
||||
// randomly add multi valued documents (up to 2 fields)
|
||||
int n = random().nextInt(2) + 1;
|
||||
boxes[id] = new Box[n];
|
||||
ranges[id] = new Range[n];
|
||||
for (int i=0; i<n; ++i) {
|
||||
boxes[id][i] = nextBox(dimensions);
|
||||
ranges[id][i] = nextRange(dimensions);
|
||||
}
|
||||
}
|
||||
|
||||
if (id > 0 && x < 9 && haveRealDoc) {
|
||||
int oldID;
|
||||
int i=0;
|
||||
// don't step on missing boxes:
|
||||
// don't step on missing ranges:
|
||||
while (true) {
|
||||
oldID = random().nextInt(id);
|
||||
if (Double.isNaN(boxes[oldID][0].min[0]) == false) {
|
||||
if (ranges[oldID][0].isMissing == false) {
|
||||
break;
|
||||
} else if (++i > id) {
|
||||
continue nextdoc;
|
||||
@ -125,11 +126,11 @@ public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
|
||||
if (x == dimensions*2) {
|
||||
// Fully identical box (use first box in case current is multivalued but old is not)
|
||||
for (int d=0; d<dimensions; ++d) {
|
||||
boxes[id][0].min[d] = boxes[oldID][0].min[d];
|
||||
boxes[id][0].max[d] = boxes[oldID][0].max[d];
|
||||
ranges[id][0].setMin(d, ranges[oldID][0].getMin(d));
|
||||
ranges[id][0].setMax(d, ranges[oldID][0].getMax(d));
|
||||
}
|
||||
if (VERBOSE) {
|
||||
System.out.println(" id=" + id + " box=" + boxes[id] + " (same box as doc=" + oldID + ")");
|
||||
System.out.println(" id=" + id + " box=" + ranges[id] + " (same box as doc=" + oldID + ")");
|
||||
}
|
||||
} else {
|
||||
for (int m = 0, even = dimensions % 2; m < dimensions * 2; ++m) {
|
||||
@ -137,14 +138,14 @@ public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
|
||||
int d = (int)Math.floor(m/2);
|
||||
// current could be multivalue but old may not be, so use first box
|
||||
if (even == 0) {
|
||||
boxes[id][0].setVal(d, boxes[oldID][0].min[d]);
|
||||
ranges[id][0].setMin(d, ranges[oldID][0].getMin(d));
|
||||
if (VERBOSE) {
|
||||
System.out.println(" id=" + id + " box=" + boxes[id] + " (same min[" + d + "] as doc=" + oldID + ")");
|
||||
System.out.println(" id=" + id + " box=" + ranges[id] + " (same min[" + d + "] as doc=" + oldID + ")");
|
||||
}
|
||||
} else {
|
||||
boxes[id][0].setVal(d, boxes[oldID][0].max[d]);
|
||||
ranges[id][0].setMax(d, ranges[oldID][0].getMax(d));
|
||||
if (VERBOSE) {
|
||||
System.out.println(" id=" + id + " box=" + boxes[id] + " (same max[" + d + "] as doc=" + oldID + ")");
|
||||
System.out.println(" id=" + id + " box=" + ranges[id] + " (same max[" + d + "] as doc=" + oldID + ")");
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -152,20 +153,20 @@ public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
|
||||
}
|
||||
}
|
||||
}
|
||||
verify(boxes);
|
||||
verify(ranges);
|
||||
}
|
||||
|
||||
private void verify(Box[][] boxes) throws Exception {
|
||||
private void verify(Range[][] ranges) throws Exception {
|
||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||
// Else seeds may not reproduce:
|
||||
iwc.setMergeScheduler(new SerialMergeScheduler());
|
||||
// Else we can get O(N^2) merging
|
||||
int mbd = iwc.getMaxBufferedDocs();
|
||||
if (mbd != -1 && mbd < boxes.length/100) {
|
||||
iwc.setMaxBufferedDocs(boxes.length/100);
|
||||
if (mbd != -1 && mbd < ranges.length/100) {
|
||||
iwc.setMaxBufferedDocs(ranges.length/100);
|
||||
}
|
||||
Directory dir;
|
||||
if (boxes.length > 50000) {
|
||||
if (ranges.length > 50000) {
|
||||
dir = newFSDirectory(createTempDir(getClass().getSimpleName()));
|
||||
} else {
|
||||
dir = newDirectory();
|
||||
@ -173,13 +174,13 @@ public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
|
||||
|
||||
Set<Integer> deleted = new HashSet<>();
|
||||
IndexWriter w = new IndexWriter(dir, iwc);
|
||||
for (int id=0; id < boxes.length; ++id) {
|
||||
for (int id=0; id < ranges.length; ++id) {
|
||||
Document doc = new Document();
|
||||
doc.add(newStringField("id", ""+id, Field.Store.NO));
|
||||
doc.add(new NumericDocValuesField("id", id));
|
||||
if (Double.isNaN(boxes[id][0].min[0]) == false) {
|
||||
for (int n=0; n<boxes[id].length; ++n) {
|
||||
doc.add(newRangeField(boxes[id][n].min, boxes[id][n].max));
|
||||
if (ranges[id][0].isMissing == false) {
|
||||
for (int n=0; n<ranges[id].length; ++n) {
|
||||
doc.add(newRangeField(ranges[id][n]));
|
||||
}
|
||||
}
|
||||
w.addDocument(doc);
|
||||
@ -200,7 +201,7 @@ public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
|
||||
w.close();
|
||||
IndexSearcher s = newSearcher(r);
|
||||
|
||||
int dimensions = boxes[0][0].min.length;
|
||||
int dimensions = ranges[0][0].numDimensions();
|
||||
int iters = atLeast(25);
|
||||
NumericDocValues docIDToID = MultiDocValues.getNumericValues(r, "id");
|
||||
Bits liveDocs = MultiFields.getLiveDocs(s.getIndexReader());
|
||||
@ -211,20 +212,20 @@ public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
|
||||
System.out.println("\nTEST: iter=" + iter + " s=" + s);
|
||||
}
|
||||
|
||||
// occasionally test open ended bounding boxes
|
||||
Box queryBox = nextBox(dimensions);
|
||||
// occasionally test open ended bounding ranges
|
||||
Range queryRange = nextRange(dimensions);
|
||||
int rv = random().nextInt(3);
|
||||
Query query;
|
||||
Box.QueryType queryType;
|
||||
Range.QueryType queryType;
|
||||
if (rv == 0) {
|
||||
queryType = Box.QueryType.INTERSECTS;
|
||||
query = newIntersectsQuery(queryBox.min, queryBox.max);
|
||||
queryType = Range.QueryType.INTERSECTS;
|
||||
query = newIntersectsQuery(queryRange);
|
||||
} else if (rv == 1) {
|
||||
queryType = Box.QueryType.CONTAINS;
|
||||
query = newContainsQuery(queryBox.min, queryBox.max);
|
||||
queryType = Range.QueryType.CONTAINS;
|
||||
query = newContainsQuery(queryRange);
|
||||
} else {
|
||||
queryType = Box.QueryType.WITHIN;
|
||||
query = newWithinQuery(queryBox.min, queryBox.max);
|
||||
queryType = Range.QueryType.WITHIN;
|
||||
query = newWithinQuery(queryRange);
|
||||
}
|
||||
|
||||
if (VERBOSE) {
|
||||
@ -255,25 +256,25 @@ public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
|
||||
if (liveDocs != null && liveDocs.get(docID) == false) {
|
||||
// document is deleted
|
||||
expected = false;
|
||||
} else if (Double.isNaN(boxes[id][0].min[0])) {
|
||||
} else if (ranges[id][0].isMissing) {
|
||||
expected = false;
|
||||
} else {
|
||||
expected = expectedResult(queryBox, boxes[id], queryType);
|
||||
expected = expectedResult(queryRange, ranges[id], queryType);
|
||||
}
|
||||
|
||||
if (hits.get(docID) != expected) {
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("FAIL (iter " + iter + "): ");
|
||||
if (expected == true) {
|
||||
b.append("id=" + id + (boxes[id].length > 1 ? " (MultiValue) " : " ") + "should match but did not\n");
|
||||
b.append("id=" + id + (ranges[id].length > 1 ? " (MultiValue) " : " ") + "should match but did not\n");
|
||||
} else {
|
||||
b.append("id=" + id + " should not match but did\n");
|
||||
}
|
||||
b.append(" queryBox=" + queryBox + "\n");
|
||||
b.append(" box" + ((boxes[id].length > 1) ? "es=" : "=" ) + boxes[id][0]);
|
||||
for (int n=1; n<boxes[id].length; ++n) {
|
||||
b.append(" queryRange=" + queryRange + "\n");
|
||||
b.append(" box" + ((ranges[id].length > 1) ? "es=" : "=" ) + ranges[id][0]);
|
||||
for (int n=1; n<ranges[id].length; ++n) {
|
||||
b.append(", ");
|
||||
b.append(boxes[id][n]);
|
||||
b.append(ranges[id][n]);
|
||||
}
|
||||
b.append("\n queryType=" + queryType + "\n");
|
||||
b.append(" deleted?=" + (liveDocs != null && liveDocs.get(docID) == false));
|
||||
@ -284,144 +285,51 @@ public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
|
||||
IOUtils.close(r, dir);
|
||||
}
|
||||
|
||||
protected boolean expectedResult(Box queryBox, Box[] box, Box.QueryType queryType) {
|
||||
for (int i=0; i<box.length; ++i) {
|
||||
if (expectedBBoxQueryResult(queryBox, box[i], queryType) == true) {
|
||||
protected boolean expectedResult(Range queryRange, Range[] range, Range.QueryType queryType) {
|
||||
for (int i=0; i<range.length; ++i) {
|
||||
if (expectedBBoxQueryResult(queryRange, range[i], queryType) == true) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
protected boolean expectedBBoxQueryResult(Box queryBox, Box box, Box.QueryType queryType) {
|
||||
if (box.equals(queryBox)) {
|
||||
protected boolean expectedBBoxQueryResult(Range queryRange, Range range, Range.QueryType queryType) {
|
||||
if (queryRange.isEqual(range)) {
|
||||
return true;
|
||||
}
|
||||
Box.QueryType relation = box.relate(queryBox);
|
||||
if (queryType == Box.QueryType.INTERSECTS) {
|
||||
Range.QueryType relation = range.relate(queryRange);
|
||||
if (queryType == Range.QueryType.INTERSECTS) {
|
||||
return relation != null;
|
||||
}
|
||||
return relation == queryType;
|
||||
}
|
||||
|
||||
protected double nextDoubleInternal() {
|
||||
if (rarely()) {
|
||||
return random().nextBoolean() ? Double.POSITIVE_INFINITY : Double.NEGATIVE_INFINITY;
|
||||
}
|
||||
double max = 100 / 2;
|
||||
return (max + max) * random().nextDouble() - max;
|
||||
}
|
||||
|
||||
protected Box nextBox(int dimensions) {
|
||||
double[] min = new double[dimensions];
|
||||
double[] max = new double[dimensions];
|
||||
|
||||
for (int d=0; d<dimensions; ++d) {
|
||||
min[d] = nextDoubleInternal();
|
||||
max[d] = nextDoubleInternal();
|
||||
}
|
||||
|
||||
return new Box(min, max);
|
||||
}
|
||||
|
||||
protected static class Box {
|
||||
double[] min;
|
||||
double[] max;
|
||||
abstract static class Range {
|
||||
protected boolean isMissing = false;
|
||||
|
||||
enum QueryType { INTERSECTS, WITHIN, CONTAINS }
|
||||
|
||||
Box(double[] min, double[] max) {
|
||||
assert min != null && max != null && min.length > 0 && max.length > 0
|
||||
: "test box: min/max cannot be null or empty";
|
||||
assert min.length == max.length : "test box: min/max length do not agree";
|
||||
this.min = new double[min.length];
|
||||
this.max = new double[max.length];
|
||||
for (int d=0; d<min.length; ++d) {
|
||||
this.min[d] = Math.min(min[d], max[d]);
|
||||
this.max[d] = Math.max(min[d], max[d]);
|
||||
}
|
||||
}
|
||||
protected abstract int numDimensions();
|
||||
protected abstract Object getMin(int dim);
|
||||
protected abstract void setMin(int dim, Object val);
|
||||
protected abstract Object getMax(int dim);
|
||||
protected abstract void setMax(int dim, Object val);
|
||||
protected abstract boolean isEqual(Range other);
|
||||
protected abstract boolean isDisjoint(Range other);
|
||||
protected abstract boolean isWithin(Range other);
|
||||
protected abstract boolean contains(Range other);
|
||||
|
||||
protected void setVal(int dimension, double val) {
|
||||
if (val <= min[dimension]) {
|
||||
min[dimension] = val;
|
||||
} else {
|
||||
max[dimension] = val;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
return o != null
|
||||
&& getClass() == o.getClass()
|
||||
&& equalTo(getClass().cast(o));
|
||||
}
|
||||
|
||||
private boolean equalTo(Box o) {
|
||||
return Arrays.equals(min, o.min)
|
||||
&& Arrays.equals(max, o.max);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = Arrays.hashCode(min);
|
||||
result = 31 * result + Arrays.hashCode(max);
|
||||
return result;
|
||||
}
|
||||
|
||||
QueryType relate(Box other) {
|
||||
// check disjoint
|
||||
for (int d=0; d<this.min.length; ++d) {
|
||||
if (this.min[d] > other.max[d] || this.max[d] < other.min[d]) {
|
||||
// disjoint:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// check within
|
||||
boolean within = true;
|
||||
for (int d=0; d<this.min.length; ++d) {
|
||||
if ((this.min[d] >= other.min[d] && this.max[d] <= other.max[d]) == false) {
|
||||
// not within:
|
||||
within = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (within == true) {
|
||||
protected QueryType relate(Range other) {
|
||||
if (isDisjoint(other)) {
|
||||
// if disjoint; return null:
|
||||
return null;
|
||||
} else if (isWithin(other)) {
|
||||
return QueryType.WITHIN;
|
||||
}
|
||||
|
||||
// check contains
|
||||
boolean contains = true;
|
||||
for (int d=0; d<this.min.length; ++d) {
|
||||
if ((this.min[d] <= other.min[d] && this.max[d] >= other.max[d]) == false) {
|
||||
// not contains:
|
||||
contains = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (contains == true) {
|
||||
} else if (contains(other)) {
|
||||
return QueryType.CONTAINS;
|
||||
}
|
||||
return QueryType.INTERSECTS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("Box(");
|
||||
b.append(min[0]);
|
||||
b.append(" TO ");
|
||||
b.append(max[0]);
|
||||
for (int d=1; d<min.length; ++d) {
|
||||
b.append(", ");
|
||||
b.append(min[d]);
|
||||
b.append(" TO ");
|
||||
b.append(max[d]);
|
||||
}
|
||||
b.append(")");
|
||||
|
||||
return b.toString();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -16,6 +16,8 @@
|
||||
*/
|
||||
package org.apache.lucene.search;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.DoubleRangeField;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
@ -23,25 +25,50 @@ import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
||||
/**
|
||||
* Random testing for RangeFieldQueries. Testing rigor inspired by {@code BaseGeoPointTestCase}
|
||||
* Random testing for RangeFieldQueries.
|
||||
*/
|
||||
public class TestDoubleRangeFieldQueries extends BaseRangeFieldQueryTestCase {
|
||||
private static final String FIELD_NAME = "rangeField";
|
||||
private static final String FIELD_NAME = "doubleRangeField";
|
||||
|
||||
protected DoubleRangeField newRangeField(double[] min, double[] max) {
|
||||
return new DoubleRangeField(FIELD_NAME, min, max);
|
||||
private double nextDoubleInternal() {
|
||||
if (rarely()) {
|
||||
return random().nextBoolean() ? Double.POSITIVE_INFINITY : Double.NEGATIVE_INFINITY;
|
||||
}
|
||||
double max = Double.MAX_VALUE / 2;
|
||||
return (max + max) * random().nextDouble() - max;
|
||||
}
|
||||
|
||||
protected Query newIntersectsQuery(double[] min, double[] max) {
|
||||
return DoubleRangeField.newIntersectsQuery(FIELD_NAME, min, max);
|
||||
@Override
|
||||
protected Range nextRange(int dimensions) {
|
||||
double[] min = new double[dimensions];
|
||||
double[] max = new double[dimensions];
|
||||
|
||||
for (int d=0; d<dimensions; ++d) {
|
||||
min[d] = nextDoubleInternal();
|
||||
max[d] = nextDoubleInternal();
|
||||
}
|
||||
|
||||
return new DoubleRange(min, max);
|
||||
}
|
||||
|
||||
protected Query newContainsQuery(double[] min, double[] max) {
|
||||
return DoubleRangeField.newContainsQuery(FIELD_NAME, min, max);
|
||||
@Override
|
||||
protected DoubleRangeField newRangeField(Range r) {
|
||||
return new DoubleRangeField(FIELD_NAME, ((DoubleRange)r).min, ((DoubleRange)r).max);
|
||||
}
|
||||
|
||||
protected Query newWithinQuery(double[] min, double[] max) {
|
||||
return DoubleRangeField.newWithinQuery(FIELD_NAME, min, max);
|
||||
@Override
|
||||
protected Query newIntersectsQuery(Range r) {
|
||||
return DoubleRangeField.newIntersectsQuery(FIELD_NAME, ((DoubleRange)r).min, ((DoubleRange)r).max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newContainsQuery(Range r) {
|
||||
return DoubleRangeField.newContainsQuery(FIELD_NAME, ((DoubleRange)r).min, ((DoubleRange)r).max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newWithinQuery(Range r) {
|
||||
return DoubleRangeField.newWithinQuery(FIELD_NAME, ((DoubleRange)r).min, ((DoubleRange)r).max);
|
||||
}
|
||||
|
||||
/** Basic test */
|
||||
@ -103,4 +130,111 @@ public class TestDoubleRangeFieldQueries extends BaseRangeFieldQueryTestCase {
|
||||
writer.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** DoubleRange test class implementation - use to validate DoubleRangeField */
|
||||
private class DoubleRange extends Range {
|
||||
double[] min;
|
||||
double[] max;
|
||||
|
||||
DoubleRange(double[] min, double[] max) {
|
||||
assert min != null && max != null && min.length > 0 && max.length > 0
|
||||
: "test box: min/max cannot be null or empty";
|
||||
assert min.length == max.length : "test box: min/max length do not agree";
|
||||
this.min = new double[min.length];
|
||||
this.max = new double[max.length];
|
||||
for (int d=0; d<min.length; ++d) {
|
||||
if (min[d] > max[d]) {
|
||||
// swap if max < min:
|
||||
double temp = min[d];
|
||||
min[d] = max[d];
|
||||
max[d] = temp;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int numDimensions() {
|
||||
return min.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Double getMin(int dim) {
|
||||
return min[dim];
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setMin(int dim, Object val) {
|
||||
min[dim] = (Double)val;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Double getMax(int dim) {
|
||||
return max[dim];
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setMax(int dim, Object val) {
|
||||
max[dim] = (Double)val;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isEqual(Range other) {
|
||||
DoubleRange o = (DoubleRange)other;
|
||||
return Arrays.equals(min, o.min) && Arrays.equals(max, o.max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isDisjoint(Range o) {
|
||||
DoubleRange other = (DoubleRange)o;
|
||||
for (int d=0; d<this.min.length; ++d) {
|
||||
if (this.min[d] > other.max[d] || this.max[d] < other.min[d]) {
|
||||
// disjoint:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isWithin(Range o) {
|
||||
DoubleRange other = (DoubleRange)o;
|
||||
for (int d=0; d<this.min.length; ++d) {
|
||||
if ((this.min[d] >= other.min[d] && this.max[d] <= other.max[d]) == false) {
|
||||
// not within:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean contains(Range o) {
|
||||
DoubleRange other = (DoubleRange) o;
|
||||
for (int d=0; d<this.min.length; ++d) {
|
||||
if ((this.min[d] <= other.min[d] && this.max[d] >= other.max[d]) == false) {
|
||||
// not contains:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("Box(");
|
||||
b.append(min[0]);
|
||||
b.append(" TO ");
|
||||
b.append(max[0]);
|
||||
for (int d=1; d<min.length; ++d) {
|
||||
b.append(", ");
|
||||
b.append(min[d]);
|
||||
b.append(" TO ");
|
||||
b.append(max[d]);
|
||||
}
|
||||
b.append(")");
|
||||
|
||||
return b.toString();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,240 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.search;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.FloatRangeField;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
||||
/**
|
||||
* Random testing for FloatRangeField Queries.
|
||||
*/
|
||||
public class TestFloatRangeFieldQueries extends BaseRangeFieldQueryTestCase {
|
||||
private static final String FIELD_NAME = "floatRangeField";
|
||||
|
||||
private float nextFloatInternal() {
|
||||
if (rarely()) {
|
||||
return random().nextBoolean() ? Float.NEGATIVE_INFINITY : Float.POSITIVE_INFINITY;
|
||||
}
|
||||
float max = Float.MAX_VALUE / 2;
|
||||
return (max + max) * random().nextFloat() - max;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Range nextRange(int dimensions) {
|
||||
float[] min = new float[dimensions];
|
||||
float[] max = new float[dimensions];
|
||||
|
||||
for (int d=0; d<dimensions; ++d) {
|
||||
min[d] = nextFloatInternal();
|
||||
max[d] = nextFloatInternal();
|
||||
}
|
||||
|
||||
return new FloatRange(min, max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FloatRangeField newRangeField(Range r) {
|
||||
return new FloatRangeField(FIELD_NAME, ((FloatRange)r).min, ((FloatRange)r).max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newIntersectsQuery(Range r) {
|
||||
return FloatRangeField.newIntersectsQuery(FIELD_NAME, ((FloatRange)r).min, ((FloatRange)r).max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newContainsQuery(Range r) {
|
||||
return FloatRangeField.newContainsQuery(FIELD_NAME, ((FloatRange)r).min, ((FloatRange)r).max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newWithinQuery(Range r) {
|
||||
return FloatRangeField.newWithinQuery(FIELD_NAME, ((FloatRange)r).min, ((FloatRange)r).max);
|
||||
}
|
||||
|
||||
/** Basic test */
|
||||
public void testBasics() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
|
||||
// intersects (within)
|
||||
Document document = new Document();
|
||||
document.add(new FloatRangeField(FIELD_NAME, new float[] {-10.0f, -10.0f}, new float[] {9.1f, 10.1f}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (crosses)
|
||||
document = new Document();
|
||||
document.add(new FloatRangeField(FIELD_NAME, new float[] {10.0f, -10.0f}, new float[] {20.0f, 10.0f}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (contains)
|
||||
document = new Document();
|
||||
document.add(new FloatRangeField(FIELD_NAME, new float[] {-20.0f, -20.0f}, new float[] {30.0f, 30.1f}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (crosses)
|
||||
document = new Document();
|
||||
document.add(new FloatRangeField(FIELD_NAME, new float[] {-11.1f, -11.2f}, new float[] {1.23f, 11.5f}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (crosses)
|
||||
document = new Document();
|
||||
document.add(new FloatRangeField(FIELD_NAME, new float[] {12.33f, 1.2f}, new float[] {15.1f, 29.9f}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// disjoint
|
||||
document = new Document();
|
||||
document.add(new FloatRangeField(FIELD_NAME, new float[] {-122.33f, 1.2f}, new float[] {-115.1f, 29.9f}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (crosses)
|
||||
document = new Document();
|
||||
document.add(new FloatRangeField(FIELD_NAME, new float[] {Float.NEGATIVE_INFINITY, 1.2f}, new float[] {-11.0f, 29.9f}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// equal (within, contains, intersects)
|
||||
document = new Document();
|
||||
document.add(new FloatRangeField(FIELD_NAME, new float[] {-11f, -15f}, new float[] {15f, 20f}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// search
|
||||
IndexReader reader = writer.getReader();
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
assertEquals(7, searcher.count(FloatRangeField.newIntersectsQuery(FIELD_NAME,
|
||||
new float[] {-11.0f, -15.0f}, new float[] {15.0f, 20.0f})));
|
||||
assertEquals(2, searcher.count(FloatRangeField.newWithinQuery(FIELD_NAME,
|
||||
new float[] {-11.0f, -15.0f}, new float[] {15.0f, 20.0f})));
|
||||
assertEquals(2, searcher.count(FloatRangeField.newContainsQuery(FIELD_NAME,
|
||||
new float[] {-11.0f, -15.0f}, new float[] {15.0f, 20.0f})));
|
||||
|
||||
reader.close();
|
||||
writer.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** FloatRange test class implementation - use to validate FloatRangeField */
|
||||
private class FloatRange extends Range {
|
||||
float[] min;
|
||||
float[] max;
|
||||
|
||||
FloatRange(float[] min, float[] max) {
|
||||
assert min != null && max != null && min.length > 0 && max.length > 0
|
||||
: "test box: min/max cannot be null or empty";
|
||||
assert min.length == max.length : "test box: min/max length do not agree";
|
||||
this.min = new float[min.length];
|
||||
this.max = new float[max.length];
|
||||
for (int d=0; d<min.length; ++d) {
|
||||
if (min[d] > max[d]) {
|
||||
// swap if max < min:
|
||||
float temp = min[d];
|
||||
min[d] = max[d];
|
||||
max[d] = temp;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int numDimensions() {
|
||||
return min.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Float getMin(int dim) {
|
||||
return min[dim];
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setMin(int dim, Object val) {
|
||||
min[dim] = (Float)val;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Float getMax(int dim) {
|
||||
return max[dim];
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setMax(int dim, Object val) {
|
||||
max[dim] = (Float)val;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isEqual(Range other) {
|
||||
FloatRange o = (FloatRange)other;
|
||||
return Arrays.equals(min, o.min) && Arrays.equals(max, o.max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isDisjoint(Range o) {
|
||||
FloatRange other = (FloatRange)o;
|
||||
for (int d=0; d<this.min.length; ++d) {
|
||||
if (this.min[d] > other.max[d] || this.max[d] < other.min[d]) {
|
||||
// disjoint:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isWithin(Range o) {
|
||||
FloatRange other = (FloatRange)o;
|
||||
for (int d=0; d<this.min.length; ++d) {
|
||||
if ((this.min[d] >= other.min[d] && this.max[d] <= other.max[d]) == false) {
|
||||
// not within:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean contains(Range o) {
|
||||
FloatRange other = (FloatRange) o;
|
||||
for (int d=0; d<this.min.length; ++d) {
|
||||
if ((this.min[d] <= other.min[d] && this.max[d] >= other.max[d]) == false) {
|
||||
// not contains:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("Box(");
|
||||
b.append(min[0]);
|
||||
b.append(" TO ");
|
||||
b.append(max[0]);
|
||||
for (int d=1; d<min.length; ++d) {
|
||||
b.append(", ");
|
||||
b.append(min[d]);
|
||||
b.append(" TO ");
|
||||
b.append(max[d]);
|
||||
}
|
||||
b.append(")");
|
||||
|
||||
return b.toString();
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,240 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.search;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.IntRangeField;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
||||
/**
|
||||
* Random testing for IntRangeField Queries.
|
||||
*/
|
||||
public class TestIntRangeFieldQueries extends BaseRangeFieldQueryTestCase {
|
||||
private static final String FIELD_NAME = "intRangeField";
|
||||
|
||||
private int nextIntInternal() {
|
||||
if (rarely()) {
|
||||
return random().nextBoolean() ? Integer.MAX_VALUE : Integer.MIN_VALUE;
|
||||
}
|
||||
int max = Integer.MAX_VALUE / 2;
|
||||
return (max + max) * random().nextInt() - max;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Range nextRange(int dimensions) {
|
||||
int[] min = new int[dimensions];
|
||||
int[] max = new int[dimensions];
|
||||
|
||||
for (int d=0; d<dimensions; ++d) {
|
||||
min[d] = nextIntInternal();
|
||||
max[d] = nextIntInternal();
|
||||
}
|
||||
|
||||
return new IntRange(min, max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IntRangeField newRangeField(Range r) {
|
||||
return new IntRangeField(FIELD_NAME, ((IntRange)r).min, ((IntRange)r).max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newIntersectsQuery(Range r) {
|
||||
return IntRangeField.newIntersectsQuery(FIELD_NAME, ((IntRange)r).min, ((IntRange)r).max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newContainsQuery(Range r) {
|
||||
return IntRangeField.newContainsQuery(FIELD_NAME, ((IntRange)r).min, ((IntRange)r).max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newWithinQuery(Range r) {
|
||||
return IntRangeField.newWithinQuery(FIELD_NAME, ((IntRange)r).min, ((IntRange)r).max);
|
||||
}
|
||||
|
||||
/** Basic test */
|
||||
public void testBasics() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
|
||||
// intersects (within)
|
||||
Document document = new Document();
|
||||
document.add(new IntRangeField(FIELD_NAME, new int[] {-10, -10}, new int[] {9, 10}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (crosses)
|
||||
document = new Document();
|
||||
document.add(new IntRangeField(FIELD_NAME, new int[] {10, -10}, new int[] {20, 10}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (contains)
|
||||
document = new Document();
|
||||
document.add(new IntRangeField(FIELD_NAME, new int[] {-20, -20}, new int[] {30, 30}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (within)
|
||||
document = new Document();
|
||||
document.add(new IntRangeField(FIELD_NAME, new int[] {-11, -11}, new int[] {1, 11}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (crosses)
|
||||
document = new Document();
|
||||
document.add(new IntRangeField(FIELD_NAME, new int[] {12, 1}, new int[] {15, 29}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// disjoint
|
||||
document = new Document();
|
||||
document.add(new IntRangeField(FIELD_NAME, new int[] {-122, 1}, new int[] {-115, 29}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (crosses)
|
||||
document = new Document();
|
||||
document.add(new IntRangeField(FIELD_NAME, new int[] {Integer.MIN_VALUE, 1}, new int[] {-11, 29}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// equal (within, contains, intersects)
|
||||
document = new Document();
|
||||
document.add(new IntRangeField(FIELD_NAME, new int[] {-11, -15}, new int[] {15, 20}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// search
|
||||
IndexReader reader = writer.getReader();
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
assertEquals(7, searcher.count(IntRangeField.newIntersectsQuery(FIELD_NAME,
|
||||
new int[] {-11, -15}, new int[] {15, 20})));
|
||||
assertEquals(3, searcher.count(IntRangeField.newWithinQuery(FIELD_NAME,
|
||||
new int[] {-11, -15}, new int[] {15, 20})));
|
||||
assertEquals(2, searcher.count(IntRangeField.newContainsQuery(FIELD_NAME,
|
||||
new int[] {-11, -15}, new int[] {15, 20})));
|
||||
|
||||
reader.close();
|
||||
writer.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** IntRange test class implementation - use to validate IntRangeField */
|
||||
private class IntRange extends Range {
|
||||
int[] min;
|
||||
int[] max;
|
||||
|
||||
IntRange(int[] min, int[] max) {
|
||||
assert min != null && max != null && min.length > 0 && max.length > 0
|
||||
: "test box: min/max cannot be null or empty";
|
||||
assert min.length == max.length : "test box: min/max length do not agree";
|
||||
this.min = new int[min.length];
|
||||
this.max = new int[max.length];
|
||||
for (int d=0; d<min.length; ++d) {
|
||||
if (min[d] > max[d]) {
|
||||
// swap if max < min:
|
||||
int temp = min[d];
|
||||
min[d] = max[d];
|
||||
max[d] = temp;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int numDimensions() {
|
||||
return min.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Integer getMin(int dim) {
|
||||
return min[dim];
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setMin(int dim, Object val) {
|
||||
min[dim] = (Integer)val;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Integer getMax(int dim) {
|
||||
return max[dim];
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setMax(int dim, Object val) {
|
||||
max[dim] = (Integer)val;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isEqual(Range other) {
|
||||
IntRange o = (IntRange)other;
|
||||
return Arrays.equals(min, o.min) && Arrays.equals(max, o.max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isDisjoint(Range o) {
|
||||
IntRange other = (IntRange)o;
|
||||
for (int d=0; d<this.min.length; ++d) {
|
||||
if (this.min[d] > other.max[d] || this.max[d] < other.min[d]) {
|
||||
// disjoint:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isWithin(Range o) {
|
||||
IntRange other = (IntRange)o;
|
||||
for (int d=0; d<this.min.length; ++d) {
|
||||
if ((this.min[d] >= other.min[d] && this.max[d] <= other.max[d]) == false) {
|
||||
// not within:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean contains(Range o) {
|
||||
IntRange other = (IntRange) o;
|
||||
for (int d=0; d<this.min.length; ++d) {
|
||||
if ((this.min[d] <= other.min[d] && this.max[d] >= other.max[d]) == false) {
|
||||
// not contains:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("Box(");
|
||||
b.append(min[0]);
|
||||
b.append(" TO ");
|
||||
b.append(max[0]);
|
||||
for (int d=1; d<min.length; ++d) {
|
||||
b.append(", ");
|
||||
b.append(min[d]);
|
||||
b.append(" TO ");
|
||||
b.append(max[d]);
|
||||
}
|
||||
b.append(")");
|
||||
|
||||
return b.toString();
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,240 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.search;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.LongRangeField;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
||||
/**
|
||||
* Random testing for LongRangeField Queries.
|
||||
*/
|
||||
public class TestLongRangeFieldQueries extends BaseRangeFieldQueryTestCase {
|
||||
private static final String FIELD_NAME = "longRangeField";
|
||||
|
||||
private long nextLongInternal() {
|
||||
if (rarely()) {
|
||||
return random().nextBoolean() ? Long.MAX_VALUE : Long.MIN_VALUE;
|
||||
}
|
||||
long max = Long.MAX_VALUE / 2;
|
||||
return (max + max) * random().nextLong() - max;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Range nextRange(int dimensions) {
|
||||
long[] min = new long[dimensions];
|
||||
long[] max = new long[dimensions];
|
||||
|
||||
for (int d=0; d<dimensions; ++d) {
|
||||
min[d] = nextLongInternal();
|
||||
max[d] = nextLongInternal();
|
||||
}
|
||||
|
||||
return new LongRange(min, max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected LongRangeField newRangeField(Range r) {
|
||||
return new LongRangeField(FIELD_NAME, ((LongRange)r).min, ((LongRange)r).max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newIntersectsQuery(Range r) {
|
||||
return LongRangeField.newIntersectsQuery(FIELD_NAME, ((LongRange)r).min, ((LongRange)r).max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newContainsQuery(Range r) {
|
||||
return LongRangeField.newContainsQuery(FIELD_NAME, ((LongRange)r).min, ((LongRange)r).max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query newWithinQuery(Range r) {
|
||||
return LongRangeField.newWithinQuery(FIELD_NAME, ((LongRange)r).min, ((LongRange)r).max);
|
||||
}
|
||||
|
||||
/** Basic test */
|
||||
public void testBasics() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
|
||||
|
||||
// intersects (within)
|
||||
Document document = new Document();
|
||||
document.add(new LongRangeField(FIELD_NAME, new long[] {-10, -10}, new long[] {9, 10}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (crosses)
|
||||
document = new Document();
|
||||
document.add(new LongRangeField(FIELD_NAME, new long[] {10, -10}, new long[] {20, 10}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (contains)
|
||||
document = new Document();
|
||||
document.add(new LongRangeField(FIELD_NAME, new long[] {-20, -20}, new long[] {30, 30}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (within)
|
||||
document = new Document();
|
||||
document.add(new LongRangeField(FIELD_NAME, new long[] {-11, -11}, new long[] {1, 11}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (crosses)
|
||||
document = new Document();
|
||||
document.add(new LongRangeField(FIELD_NAME, new long[] {12, 1}, new long[] {15, 29}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// disjoint
|
||||
document = new Document();
|
||||
document.add(new LongRangeField(FIELD_NAME, new long[] {-122, 1}, new long[] {-115, 29}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// intersects (crosses)
|
||||
document = new Document();
|
||||
document.add(new LongRangeField(FIELD_NAME, new long[] {Long.MIN_VALUE, 1}, new long[] {-11, 29}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// equal (within, contains, intersects)
|
||||
document = new Document();
|
||||
document.add(new LongRangeField(FIELD_NAME, new long[] {-11, -15}, new long[] {15, 20}));
|
||||
writer.addDocument(document);
|
||||
|
||||
// search
|
||||
IndexReader reader = writer.getReader();
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
assertEquals(7, searcher.count(LongRangeField.newIntersectsQuery(FIELD_NAME,
|
||||
new long[] {-11, -15}, new long[] {15, 20})));
|
||||
assertEquals(3, searcher.count(LongRangeField.newWithinQuery(FIELD_NAME,
|
||||
new long[] {-11, -15}, new long[] {15, 20})));
|
||||
assertEquals(2, searcher.count(LongRangeField.newContainsQuery(FIELD_NAME,
|
||||
new long[] {-11, -15}, new long[] {15, 20})));
|
||||
|
||||
reader.close();
|
||||
writer.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/** LongRange test class implementation - use to validate LongRangeField */
|
||||
private class LongRange extends Range {
|
||||
long[] min;
|
||||
long[] max;
|
||||
|
||||
LongRange(long[] min, long[] max) {
|
||||
assert min != null && max != null && min.length > 0 && max.length > 0
|
||||
: "test box: min/max cannot be null or empty";
|
||||
assert min.length == max.length : "test box: min/max length do not agree";
|
||||
this.min = new long[min.length];
|
||||
this.max = new long[max.length];
|
||||
for (int d=0; d<min.length; ++d) {
|
||||
if (min[d] > max[d]) {
|
||||
// swap if max < min:
|
||||
long temp = min[d];
|
||||
min[d] = max[d];
|
||||
max[d] = temp;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int numDimensions() {
|
||||
return min.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Long getMin(int dim) {
|
||||
return min[dim];
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setMin(int dim, Object val) {
|
||||
min[dim] = (Long)val;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Long getMax(int dim) {
|
||||
return max[dim];
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setMax(int dim, Object val) {
|
||||
max[dim] = (Long)val;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isEqual(Range other) {
|
||||
LongRange o = (LongRange)other;
|
||||
return Arrays.equals(min, o.min) && Arrays.equals(max, o.max);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isDisjoint(Range o) {
|
||||
LongRange other = (LongRange)o;
|
||||
for (int d=0; d<this.min.length; ++d) {
|
||||
if (this.min[d] > other.max[d] || this.max[d] < other.min[d]) {
|
||||
// disjoint:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean isWithin(Range o) {
|
||||
LongRange other = (LongRange)o;
|
||||
for (int d=0; d<this.min.length; ++d) {
|
||||
if ((this.min[d] >= other.min[d] && this.max[d] <= other.max[d]) == false) {
|
||||
// not within:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean contains(Range o) {
|
||||
LongRange other = (LongRange) o;
|
||||
for (int d=0; d<this.min.length; ++d) {
|
||||
if ((this.min[d] <= other.min[d] && this.max[d] >= other.max[d]) == false) {
|
||||
// not contains:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("Box(");
|
||||
b.append(min[0]);
|
||||
b.append(" TO ");
|
||||
b.append(max[0]);
|
||||
for (int d=1; d<min.length; ++d) {
|
||||
b.append(", ");
|
||||
b.append(min[d]);
|
||||
b.append(" TO ");
|
||||
b.append(max[d]);
|
||||
}
|
||||
b.append(")");
|
||||
|
||||
return b.toString();
|
||||
}
|
||||
}
|
||||
}
|
@ -31,6 +31,7 @@
|
||||
<path id="classpath">
|
||||
<path refid="base.classpath"/>
|
||||
<path refid="spatialjar"/>
|
||||
<pathelement path="${backward-codecs.jar}" />
|
||||
<pathelement path="${queries.jar}" />
|
||||
<pathelement path="${misc.jar}" />
|
||||
<pathelement path="${spatial3d.jar}" />
|
||||
@ -42,16 +43,17 @@
|
||||
<pathelement path="src/test-files" />
|
||||
</path>
|
||||
|
||||
<target name="compile-core" depends="jar-queries,jar-misc,jar-spatial3d,common.compile-core" />
|
||||
<target name="compile-core" depends="jar-backward-codecs,jar-queries,jar-misc,jar-spatial3d,common.compile-core" />
|
||||
|
||||
<target name="javadocs" depends="javadocs-queries,javadocs-misc,javadocs-spatial3d,compile-core,check-javadocs-uptodate"
|
||||
<target name="javadocs" depends="javadocs-backward-codecs,javadocs-queries,javadocs-misc,javadocs-spatial3d,compile-core,check-javadocs-uptodate"
|
||||
unless="javadocs-uptodate-${name}">
|
||||
<invoke-module-javadoc>
|
||||
<links>
|
||||
<link href="../backward-codecs"/>
|
||||
<link href="../queries"/>
|
||||
<link href="../misc"/>
|
||||
<link href="../spatial3d"/>
|
||||
</links>
|
||||
</invoke-module-javadoc>
|
||||
</target>
|
||||
</project>
|
||||
</project>
|
||||
|
@ -20,17 +20,20 @@ import org.apache.lucene.document.DoubleDocValuesField;
|
||||
import org.apache.lucene.document.DoublePoint;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.LegacyDoubleField;
|
||||
import org.apache.lucene.document.StoredField;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.legacy.LegacyDoubleField;
|
||||
import org.apache.lucene.legacy.LegacyFieldType;
|
||||
import org.apache.lucene.legacy.LegacyNumericRangeQuery;
|
||||
import org.apache.lucene.legacy.LegacyNumericType;
|
||||
import org.apache.lucene.legacy.LegacyNumericUtils;
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.LegacyNumericRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.spatial.SpatialStrategy;
|
||||
@ -39,7 +42,6 @@ import org.apache.lucene.spatial.query.SpatialOperation;
|
||||
import org.apache.lucene.spatial.query.UnsupportedSpatialOperation;
|
||||
import org.apache.lucene.spatial.util.DistanceToShapeValueSource;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
@ -87,7 +89,7 @@ public class BBoxStrategy extends SpatialStrategy {
|
||||
public static FieldType DEFAULT_FIELDTYPE;
|
||||
|
||||
@Deprecated
|
||||
public static FieldType LEGACY_FIELDTYPE;
|
||||
public static LegacyFieldType LEGACY_FIELDTYPE;
|
||||
static {
|
||||
// Default: pointValues + docValues
|
||||
FieldType type = new FieldType();
|
||||
@ -97,14 +99,14 @@ public class BBoxStrategy extends SpatialStrategy {
|
||||
type.freeze();
|
||||
DEFAULT_FIELDTYPE = type;
|
||||
// Legacy default: legacyNumerics + docValues
|
||||
type = new FieldType();
|
||||
type.setIndexOptions(IndexOptions.DOCS);
|
||||
type.setNumericType(FieldType.LegacyNumericType.DOUBLE);
|
||||
type.setNumericPrecisionStep(8);// same as solr default
|
||||
type.setDocValuesType(DocValuesType.NUMERIC);//docValues
|
||||
type.setStored(false);
|
||||
type.freeze();
|
||||
LEGACY_FIELDTYPE = type;
|
||||
LegacyFieldType legacyType = new LegacyFieldType();
|
||||
legacyType.setIndexOptions(IndexOptions.DOCS);
|
||||
legacyType.setNumericType(LegacyNumericType.DOUBLE);
|
||||
legacyType.setNumericPrecisionStep(8);// same as solr default
|
||||
legacyType.setDocValuesType(DocValuesType.NUMERIC);//docValues
|
||||
legacyType.setStored(false);
|
||||
legacyType.freeze();
|
||||
LEGACY_FIELDTYPE = legacyType;
|
||||
}
|
||||
|
||||
public static final String SUFFIX_MINX = "__minX";
|
||||
@ -130,7 +132,7 @@ public class BBoxStrategy extends SpatialStrategy {
|
||||
private final boolean hasDocVals;
|
||||
private final boolean hasPointVals;
|
||||
// equiv to "hasLegacyNumerics":
|
||||
private final FieldType legacyNumericFieldType; // not stored; holds precision step.
|
||||
private final LegacyFieldType legacyNumericFieldType; // not stored; holds precision step.
|
||||
private final FieldType xdlFieldType;
|
||||
|
||||
/**
|
||||
@ -177,16 +179,17 @@ public class BBoxStrategy extends SpatialStrategy {
|
||||
if ((this.hasPointVals = fieldType.pointDimensionCount() > 0)) {
|
||||
numQuads++;
|
||||
}
|
||||
if (fieldType.indexOptions() != IndexOptions.NONE && fieldType.numericType() != null) {
|
||||
if (fieldType.indexOptions() != IndexOptions.NONE && fieldType instanceof LegacyFieldType && ((LegacyFieldType)fieldType).numericType() != null) {
|
||||
if (hasPointVals) {
|
||||
throw new IllegalArgumentException("pointValues and LegacyNumericType are mutually exclusive");
|
||||
}
|
||||
if (fieldType.numericType() != FieldType.LegacyNumericType.DOUBLE) {
|
||||
throw new IllegalArgumentException(getClass() + " does not support " + fieldType.numericType());
|
||||
final LegacyFieldType legacyType = (LegacyFieldType) fieldType;
|
||||
if (legacyType.numericType() != LegacyNumericType.DOUBLE) {
|
||||
throw new IllegalArgumentException(getClass() + " does not support " + legacyType.numericType());
|
||||
}
|
||||
numQuads++;
|
||||
legacyNumericFieldType = new FieldType(LegacyDoubleField.TYPE_NOT_STORED);
|
||||
legacyNumericFieldType.setNumericPrecisionStep(fieldType.numericPrecisionStep());
|
||||
legacyNumericFieldType = new LegacyFieldType(LegacyDoubleField.TYPE_NOT_STORED);
|
||||
legacyNumericFieldType.setNumericPrecisionStep(legacyType.numericPrecisionStep());
|
||||
legacyNumericFieldType.freeze();
|
||||
} else {
|
||||
legacyNumericFieldType = null;
|
||||
|
@ -26,7 +26,7 @@ import org.apache.lucene.util.BytesRefIterator;
|
||||
/**
|
||||
* A TokenStream used internally by {@link org.apache.lucene.spatial.prefix.PrefixTreeStrategy}.
|
||||
*
|
||||
* This is modelled after {@link org.apache.lucene.analysis.LegacyNumericTokenStream}.
|
||||
* This is modelled after {@link org.apache.lucene.legacy.LegacyNumericTokenStream}.
|
||||
*
|
||||
* @lucene.internal
|
||||
*/
|
||||
|
@ -20,16 +20,18 @@ import org.apache.lucene.document.DoubleDocValuesField;
|
||||
import org.apache.lucene.document.DoublePoint;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.LegacyDoubleField;
|
||||
import org.apache.lucene.document.StoredField;
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.legacy.LegacyDoubleField;
|
||||
import org.apache.lucene.legacy.LegacyFieldType;
|
||||
import org.apache.lucene.legacy.LegacyNumericRangeQuery;
|
||||
import org.apache.lucene.legacy.LegacyNumericType;
|
||||
import org.apache.lucene.queries.function.FunctionRangeQuery;
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.LegacyNumericRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.SpatialStrategy;
|
||||
import org.apache.lucene.spatial.query.SpatialArgs;
|
||||
@ -85,7 +87,7 @@ public class PointVectorStrategy extends SpatialStrategy {
|
||||
public static FieldType DEFAULT_FIELDTYPE;
|
||||
|
||||
@Deprecated
|
||||
public static FieldType LEGACY_FIELDTYPE;
|
||||
public static LegacyFieldType LEGACY_FIELDTYPE;
|
||||
static {
|
||||
// Default: pointValues + docValues
|
||||
FieldType type = new FieldType();
|
||||
@ -95,14 +97,14 @@ public class PointVectorStrategy extends SpatialStrategy {
|
||||
type.freeze();
|
||||
DEFAULT_FIELDTYPE = type;
|
||||
// Legacy default: legacyNumerics
|
||||
type = new FieldType();
|
||||
type.setIndexOptions(IndexOptions.DOCS);
|
||||
type.setNumericType(FieldType.LegacyNumericType.DOUBLE);
|
||||
type.setNumericPrecisionStep(8);// same as solr default
|
||||
type.setDocValuesType(DocValuesType.NONE);//no docValues!
|
||||
type.setStored(false);
|
||||
type.freeze();
|
||||
LEGACY_FIELDTYPE = type;
|
||||
LegacyFieldType legacyType = new LegacyFieldType();
|
||||
legacyType.setIndexOptions(IndexOptions.DOCS);
|
||||
legacyType.setNumericType(LegacyNumericType.DOUBLE);
|
||||
legacyType.setNumericPrecisionStep(8);// same as solr default
|
||||
legacyType.setDocValuesType(DocValuesType.NONE);//no docValues!
|
||||
legacyType.setStored(false);
|
||||
legacyType.freeze();
|
||||
LEGACY_FIELDTYPE = legacyType;
|
||||
}
|
||||
|
||||
public static final String SUFFIX_X = "__x";
|
||||
@ -116,7 +118,7 @@ public class PointVectorStrategy extends SpatialStrategy {
|
||||
private final boolean hasDocVals;
|
||||
private final boolean hasPointVals;
|
||||
// equiv to "hasLegacyNumerics":
|
||||
private final FieldType legacyNumericFieldType; // not stored; holds precision step.
|
||||
private final LegacyFieldType legacyNumericFieldType; // not stored; holds precision step.
|
||||
|
||||
/**
|
||||
* Create a new {@link PointVectorStrategy} instance that uses {@link DoublePoint} and {@link DoublePoint#newRangeQuery}
|
||||
@ -157,16 +159,17 @@ public class PointVectorStrategy extends SpatialStrategy {
|
||||
if ((this.hasPointVals = fieldType.pointDimensionCount() > 0)) {
|
||||
numPairs++;
|
||||
}
|
||||
if (fieldType.indexOptions() != IndexOptions.NONE && fieldType.numericType() != null) {
|
||||
if (fieldType.indexOptions() != IndexOptions.NONE && fieldType instanceof LegacyFieldType && ((LegacyFieldType)fieldType).numericType() != null) {
|
||||
if (hasPointVals) {
|
||||
throw new IllegalArgumentException("pointValues and LegacyNumericType are mutually exclusive");
|
||||
}
|
||||
if (fieldType.numericType() != FieldType.LegacyNumericType.DOUBLE) {
|
||||
throw new IllegalArgumentException(getClass() + " does not support " + fieldType.numericType());
|
||||
final LegacyFieldType legacyType = (LegacyFieldType) fieldType;
|
||||
if (legacyType.numericType() != LegacyNumericType.DOUBLE) {
|
||||
throw new IllegalArgumentException(getClass() + " does not support " + legacyType.numericType());
|
||||
}
|
||||
numPairs++;
|
||||
legacyNumericFieldType = new FieldType(LegacyDoubleField.TYPE_NOT_STORED);
|
||||
legacyNumericFieldType.setNumericPrecisionStep(fieldType.numericPrecisionStep());
|
||||
legacyNumericFieldType = new LegacyFieldType(LegacyDoubleField.TYPE_NOT_STORED);
|
||||
legacyNumericFieldType.setNumericPrecisionStep(legacyType.numericPrecisionStep());
|
||||
legacyNumericFieldType.freeze();
|
||||
} else {
|
||||
legacyNumericFieldType = null;
|
||||
|
@ -22,6 +22,7 @@ import com.carrotsearch.randomizedtesting.annotations.Repeat;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.legacy.LegacyFieldType;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.SpatialMatchConcern;
|
||||
import org.apache.lucene.spatial.prefix.RandomSpatialOpStrategyTestCase;
|
||||
@ -100,7 +101,12 @@ public class TestBBoxStrategy extends RandomSpatialOpStrategyTestCase {
|
||||
}
|
||||
//test we can disable docValues for predicate tests
|
||||
if (random().nextBoolean()) {
|
||||
FieldType fieldType = new FieldType(((BBoxStrategy)strategy).getFieldType());
|
||||
FieldType fieldType = ((BBoxStrategy)strategy).getFieldType();
|
||||
if (fieldType instanceof LegacyFieldType) {
|
||||
fieldType = new LegacyFieldType((LegacyFieldType)fieldType);
|
||||
} else {
|
||||
fieldType = new FieldType(fieldType);
|
||||
}
|
||||
fieldType.setDocValuesType(DocValuesType.NONE);
|
||||
strategy = new BBoxStrategy(ctx, strategy.getFieldName(), fieldType);
|
||||
}
|
||||
|
@ -838,7 +838,7 @@ public class MockDirectoryWrapper extends BaseDirectoryWrapper {
|
||||
}
|
||||
// RuntimeException instead of IOException because
|
||||
// super() does not throw IOException currently:
|
||||
throw new RuntimeException("MockDirectoryWrapper: cannot close: there are still open files: " + openFiles, cause);
|
||||
throw new RuntimeException("MockDirectoryWrapper: cannot close: there are still " + openFiles.size() + " open files: " + openFiles, cause);
|
||||
}
|
||||
if (openLocks.size() > 0) {
|
||||
Exception cause = null;
|
||||
|
@ -177,11 +177,14 @@ public final class TestUtil {
|
||||
assert hasNext;
|
||||
T v = iterator.next();
|
||||
assert allowNull || v != null;
|
||||
try {
|
||||
iterator.remove();
|
||||
throw new AssertionError("broken iterator (supports remove): " + iterator);
|
||||
} catch (UnsupportedOperationException expected) {
|
||||
// ok
|
||||
// for the first element, check that remove is not supported
|
||||
if (i == 0) {
|
||||
try {
|
||||
iterator.remove();
|
||||
throw new AssertionError("broken iterator (supports remove): " + iterator);
|
||||
} catch (UnsupportedOperationException expected) {
|
||||
// ok
|
||||
}
|
||||
}
|
||||
}
|
||||
assert !iterator.hasNext();
|
||||
|
@ -28,10 +28,6 @@ grant {
|
||||
// should be enclosed within common.dir, but just in case:
|
||||
permission java.io.FilePermission "${junit4.childvm.cwd}", "read";
|
||||
|
||||
// jenkins wants to read outside its sandbox, to use a special linedocs file.
|
||||
// this is best effort and not really supported.
|
||||
permission java.io.FilePermission "/home/jenkins/lucene-data/enwiki.random.lines.txt", "read";
|
||||
|
||||
// write only to sandbox
|
||||
permission java.io.FilePermission "${junit4.childvm.cwd}${/}temp", "read,write,delete";
|
||||
permission java.io.FilePermission "${junit4.childvm.cwd}${/}temp${/}-", "read,write,delete";
|
||||
|
@ -50,6 +50,23 @@ Optimizations
|
||||
check on every request and move connection lifecycle management towards the client.
|
||||
(Ryan Zezeski, Mark Miller, Shawn Heisey, Steve Davids)
|
||||
|
||||
================== 6.3.0 ==================
|
||||
|
||||
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
|
||||
|
||||
Versions of Major Components
|
||||
---------------------
|
||||
Apache Tika 1.13
|
||||
Carrot2 3.12.0
|
||||
Velocity 1.7 and Velocity Tools 2.0
|
||||
Apache UIMA 2.3.1
|
||||
Apache ZooKeeper 3.4.6
|
||||
Jetty 9.3.8.v20160314
|
||||
|
||||
|
||||
(No Changes)
|
||||
|
||||
|
||||
================== 6.2.0 ==================
|
||||
|
||||
Versions of Major Components
|
||||
@ -119,6 +136,11 @@ New Features
|
||||
* SOLR-6465: CDCR: fall back to whole-index replication when tlogs are insufficient.
|
||||
(Noble Paul, Renaud Delbru, shalin)
|
||||
|
||||
* SOLR-9320: A REPLACENODE command to decommission an existing node with another new node
|
||||
(noble, Nitin Sharma, Varun Thacker)
|
||||
|
||||
* SOLR-9318: A DELETENODE command to delete all replicas in that node (noble, Nitin Sharma, Varun Thacker)
|
||||
|
||||
Bug Fixes
|
||||
----------------------
|
||||
|
||||
@ -190,10 +212,32 @@ Bug Fixes
|
||||
|
||||
* SOLR-8379: UI Cloud->Tree view now shows .txt files correctly (Alexandre Rafalovitch via janhoy)
|
||||
|
||||
* SOLR-9003: New Admin UI's Dataimport screen now correctly displays DIH Debug output (Alexandre Rafalovitch)
|
||||
|
||||
* SOLR-9308: Fix distributed RTG to forward request params, fixes fq and non-default fl params (hossman)
|
||||
|
||||
* SOLR-9179: NPE in IndexSchema using IBM JDK (noble, Colvin Cowie)
|
||||
|
||||
* SOLR-9397: Config API does not support adding caches (noble)
|
||||
|
||||
* SOLR-9405: ConcurrentModificationException in ZkStateReader.getStateWatchers.
|
||||
(Alan Woodward, Edward Ribeiro, shalin)
|
||||
|
||||
* SOLR-9232: Admin UI now fully implements Swap Cores interface (Alexandre Rafalovitch)
|
||||
|
||||
* SOLR-8715: Admin UI's Schema screen now works for fields with stored=false and some content indexed (Alexandre Rafalovitch)
|
||||
|
||||
* SOLR-8911: In Admin UI, enable scrolling for overflowing Versions and JVM property values (Alexandre Rafalovitch)
|
||||
|
||||
* SOLR-9002: Admin UI now correctly displays json and text files in the collection/Files screen (Upayavira, Alexandre Rafalovitch)
|
||||
|
||||
* SOLR-8993: Admin UI now correctly supports multiple DIH handler end-points (Upayavira, Alexandre Rafalovitch)
|
||||
|
||||
* SOLR-9032: Admin UI now correctly implements Create Alias command (Upayavira, Alexandre Rafalovitch)
|
||||
|
||||
* SOLR-9391: LBHttpSolrClient.request now correctly returns Rsp.server when
|
||||
previously skipped servers were successfully tried. (Christine Poerschke)
|
||||
|
||||
Optimizations
|
||||
----------------------
|
||||
|
||||
@ -249,6 +293,21 @@ Other Changes
|
||||
|
||||
* SOLR-9367: Improved TestInjection's randomization logic to use LuceneTestCase.random() (hossman)
|
||||
|
||||
* SOLR-9331: Remove ReRankQuery's length constructor argument and member. (Christine Poerschke)
|
||||
|
||||
* SOLR-9092: For the delete replica command we attempt to send the core admin delete request only
|
||||
if that node is actually up. (Jessica Cheng Mallet, Varun Thacker)
|
||||
|
||||
* SOLR-9410: Make ReRankQParserPlugin's private ReRankWeight a public class of its own. (Christine Poerschke)
|
||||
|
||||
* SOLR-9404: Refactor move/renames in JSON FacetProcessor and FacetFieldProcessor. (David Smiley)
|
||||
|
||||
* SOLR-9421: Refactored out OverseerCollectionMessageHandler to smaller classes (noble)
|
||||
|
||||
* SOLR-8643: BlockJoinFacetComponent is substituted by BlockJoinFacetDocSetComponent. It doesn't need to change solrconfig.xml (Mikhail Khludnev)
|
||||
|
||||
* SOLR-8644: Test asserts that block join facets work with parent level fq exclusions. (Dr. Oleg Savrasov via Mikhail Khludnev)
|
||||
|
||||
================== 6.1.0 ==================
|
||||
|
||||
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
|
||||
|
@ -20,8 +20,8 @@ import java.io.IOException;
|
||||
import java.time.Instant;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.lucene.legacy.LegacyNumericUtils;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.solr.schema.FieldType;
|
||||
import org.apache.solr.schema.TrieDateField;
|
||||
|
@ -24,12 +24,12 @@ import java.util.Map;
|
||||
import org.apache.lucene.index.DocValues;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.NumericDocValues;
|
||||
import org.apache.lucene.legacy.LegacyNumericUtils;
|
||||
import org.apache.lucene.queries.function.FunctionValues;
|
||||
import org.apache.lucene.queries.function.docvalues.LongDocValues;
|
||||
import org.apache.lucene.queries.function.valuesource.LongFieldSource;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.LegacyNumericUtils;
|
||||
import org.apache.lucene.util.mutable.MutableValue;
|
||||
import org.apache.lucene.util.mutable.MutableValueDate;
|
||||
|
||||
|
192
solr/core/src/java/org/apache/solr/cloud/AddReplicaCmd.java
Normal file
192
solr/core/src/java/org/apache/solr/cloud/AddReplicaCmd.java
Normal file
@ -0,0 +1,192 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.solr.cloud;
|
||||
|
||||
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.cloud.ClusterState;
|
||||
import org.apache.solr.common.cloud.DocCollection;
|
||||
import org.apache.solr.common.cloud.Replica;
|
||||
import org.apache.solr.common.cloud.Slice;
|
||||
import org.apache.solr.common.cloud.ZkNodeProps;
|
||||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.apache.solr.common.params.CoreAdminParams;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.params.ShardParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.Utils;
|
||||
import org.apache.solr.handler.component.ShardHandler;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static org.apache.solr.cloud.Assign.getNodesForNewReplicas;
|
||||
import static org.apache.solr.cloud.OverseerCollectionMessageHandler.COLL_CONF;
|
||||
import static org.apache.solr.cloud.OverseerCollectionMessageHandler.SKIP_CREATE_REPLICA_IN_CLUSTER_STATE;
|
||||
import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
|
||||
import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP;
|
||||
import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
|
||||
import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA;
|
||||
import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
|
||||
|
||||
public class AddReplicaCmd implements OverseerCollectionMessageHandler.Cmd {
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
|
||||
private final OverseerCollectionMessageHandler ocmh;
|
||||
|
||||
public AddReplicaCmd(OverseerCollectionMessageHandler ocmh) {
|
||||
this.ocmh = ocmh;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception {
|
||||
addReplica(ocmh.zkStateReader.getClusterState(), message, results, null);
|
||||
}
|
||||
|
||||
ZkNodeProps addReplica(ClusterState clusterState, ZkNodeProps message, NamedList results, Runnable onComplete)
|
||||
throws KeeperException, InterruptedException {
|
||||
log.info("addReplica() : {}", Utils.toJSONString(message));
|
||||
String collection = message.getStr(COLLECTION_PROP);
|
||||
String node = message.getStr(CoreAdminParams.NODE);
|
||||
String shard = message.getStr(SHARD_ID_PROP);
|
||||
String coreName = message.getStr(CoreAdminParams.NAME);
|
||||
boolean parallel = message.getBool("parallel", false);
|
||||
if (StringUtils.isBlank(coreName)) {
|
||||
coreName = message.getStr(CoreAdminParams.PROPERTY_PREFIX + CoreAdminParams.NAME);
|
||||
}
|
||||
|
||||
final String asyncId = message.getStr(ASYNC);
|
||||
|
||||
DocCollection coll = clusterState.getCollection(collection);
|
||||
if (coll == null) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Collection: " + collection + " does not exist");
|
||||
}
|
||||
if (coll.getSlice(shard) == null) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
|
||||
"Collection: " + collection + " shard: " + shard + " does not exist");
|
||||
}
|
||||
ShardHandler shardHandler = ocmh.shardHandlerFactory.getShardHandler();
|
||||
boolean skipCreateReplicaInClusterState = message.getBool(SKIP_CREATE_REPLICA_IN_CLUSTER_STATE, false);
|
||||
|
||||
// Kind of unnecessary, but it does put the logic of whether to override maxShardsPerNode in one place.
|
||||
if (!skipCreateReplicaInClusterState) {
|
||||
node = getNodesForNewReplicas(clusterState, collection, shard, 1, node,
|
||||
ocmh.overseer.getZkController().getCoreContainer()).get(0).nodeName;
|
||||
}
|
||||
log.info("Node Identified {} for creating new replica", node);
|
||||
|
||||
if (!clusterState.liveNodesContain(node)) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Node: " + node + " is not live");
|
||||
}
|
||||
if (coreName == null) {
|
||||
coreName = Assign.buildCoreName(coll, shard);
|
||||
} else if (!skipCreateReplicaInClusterState) {
|
||||
//Validate that the core name is unique in that collection
|
||||
for (Slice slice : coll.getSlices()) {
|
||||
for (Replica replica : slice.getReplicas()) {
|
||||
String replicaCoreName = replica.getStr(CORE_NAME_PROP);
|
||||
if (coreName.equals(replicaCoreName)) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Another replica with the same core name already exists" +
|
||||
" for this collection");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||
|
||||
ZkStateReader zkStateReader = ocmh.zkStateReader;
|
||||
if (!Overseer.isLegacy(zkStateReader)) {
|
||||
if (!skipCreateReplicaInClusterState) {
|
||||
ZkNodeProps props = new ZkNodeProps(
|
||||
Overseer.QUEUE_OPERATION, ADDREPLICA.toLower(),
|
||||
ZkStateReader.COLLECTION_PROP, collection,
|
||||
ZkStateReader.SHARD_ID_PROP, shard,
|
||||
ZkStateReader.CORE_NAME_PROP, coreName,
|
||||
ZkStateReader.STATE_PROP, Replica.State.DOWN.toString(),
|
||||
ZkStateReader.BASE_URL_PROP, zkStateReader.getBaseUrlForNodeName(node),
|
||||
ZkStateReader.NODE_NAME_PROP, node);
|
||||
Overseer.getStateUpdateQueue(zkStateReader.getZkClient()).offer(Utils.toJSON(props));
|
||||
}
|
||||
params.set(CoreAdminParams.CORE_NODE_NAME,
|
||||
ocmh.waitToSeeReplicasInState(collection, Collections.singletonList(coreName)).get(coreName).getName());
|
||||
}
|
||||
|
||||
String configName = zkStateReader.readConfigName(collection);
|
||||
String routeKey = message.getStr(ShardParams._ROUTE_);
|
||||
String dataDir = message.getStr(CoreAdminParams.DATA_DIR);
|
||||
String instanceDir = message.getStr(CoreAdminParams.INSTANCE_DIR);
|
||||
|
||||
params.set(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATE.toString());
|
||||
params.set(CoreAdminParams.NAME, coreName);
|
||||
params.set(COLL_CONF, configName);
|
||||
params.set(CoreAdminParams.COLLECTION, collection);
|
||||
if (shard != null) {
|
||||
params.set(CoreAdminParams.SHARD, shard);
|
||||
} else if (routeKey != null) {
|
||||
Collection<Slice> slices = coll.getRouter().getSearchSlicesSingle(routeKey, null, coll);
|
||||
if (slices.isEmpty()) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No active shard serving _route_=" + routeKey + " found");
|
||||
} else {
|
||||
params.set(CoreAdminParams.SHARD, slices.iterator().next().getName());
|
||||
}
|
||||
} else {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Specify either 'shard' or _route_ param");
|
||||
}
|
||||
if (dataDir != null) {
|
||||
params.set(CoreAdminParams.DATA_DIR, dataDir);
|
||||
}
|
||||
if (instanceDir != null) {
|
||||
params.set(CoreAdminParams.INSTANCE_DIR, instanceDir);
|
||||
}
|
||||
ocmh.addPropertyParams(message, params);
|
||||
|
||||
// For tracking async calls.
|
||||
Map<String,String> requestMap = new HashMap<>();
|
||||
ocmh.sendShardRequest(node, params, shardHandler, asyncId, requestMap);
|
||||
|
||||
final String fnode = node;
|
||||
final String fcoreName = coreName;
|
||||
|
||||
Runnable runnable = () -> {
|
||||
ocmh.processResponses(results, shardHandler, true, "ADDREPLICA failed to create replica", asyncId, requestMap);
|
||||
ocmh.waitForCoreNodeName(collection, fnode, fcoreName);
|
||||
if (onComplete != null) onComplete.run();
|
||||
};
|
||||
|
||||
if (!parallel) {
|
||||
runnable.run();
|
||||
} else {
|
||||
ocmh.tpe.submit(runnable);
|
||||
}
|
||||
|
||||
|
||||
return new ZkNodeProps(
|
||||
ZkStateReader.COLLECTION_PROP, collection,
|
||||
ZkStateReader.SHARD_ID_PROP, shard,
|
||||
ZkStateReader.CORE_NAME_PROP, coreName,
|
||||
ZkStateReader.NODE_NAME_PROP, node
|
||||
);
|
||||
}
|
||||
}
|
132
solr/core/src/java/org/apache/solr/cloud/BackupCmd.java
Normal file
132
solr/core/src/java/org/apache/solr/cloud/BackupCmd.java
Normal file
@ -0,0 +1,132 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.cloud;
|
||||
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.net.URI;
|
||||
import java.time.Instant;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.cloud.ClusterState;
|
||||
import org.apache.solr.common.cloud.DocCollection;
|
||||
import org.apache.solr.common.cloud.Replica;
|
||||
import org.apache.solr.common.cloud.Slice;
|
||||
import org.apache.solr.common.cloud.ZkNodeProps;
|
||||
import org.apache.solr.common.params.CoreAdminParams;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.core.CoreContainer;
|
||||
import org.apache.solr.core.backup.BackupManager;
|
||||
import org.apache.solr.core.backup.repository.BackupRepository;
|
||||
import org.apache.solr.handler.component.ShardHandler;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static org.apache.solr.cloud.OverseerCollectionMessageHandler.COLL_CONF;
|
||||
import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
|
||||
import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP;
|
||||
import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
|
||||
import static org.apache.solr.common.params.CommonParams.NAME;
|
||||
|
||||
public class BackupCmd implements OverseerCollectionMessageHandler.Cmd {
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
|
||||
private final OverseerCollectionMessageHandler ocmh;
|
||||
|
||||
public BackupCmd(OverseerCollectionMessageHandler ocmh) {
|
||||
this.ocmh = ocmh;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception {
|
||||
String collectionName = message.getStr(COLLECTION_PROP);
|
||||
String backupName = message.getStr(NAME);
|
||||
ShardHandler shardHandler = ocmh.shardHandlerFactory.getShardHandler();
|
||||
String asyncId = message.getStr(ASYNC);
|
||||
String repo = message.getStr(CoreAdminParams.BACKUP_REPOSITORY);
|
||||
String location = message.getStr(CoreAdminParams.BACKUP_LOCATION);
|
||||
|
||||
Map<String, String> requestMap = new HashMap<>();
|
||||
Instant startTime = Instant.now();
|
||||
|
||||
CoreContainer cc = ocmh.overseer.getZkController().getCoreContainer();
|
||||
BackupRepository repository = cc.newBackupRepository(Optional.ofNullable(repo));
|
||||
BackupManager backupMgr = new BackupManager(repository, ocmh.zkStateReader, collectionName);
|
||||
|
||||
// Backup location
|
||||
URI backupPath = repository.createURI(location, backupName);
|
||||
|
||||
//Validating if the directory already exists.
|
||||
if (repository.exists(backupPath)) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "The backup directory already exists: " + backupPath);
|
||||
}
|
||||
|
||||
// Create a directory to store backup details.
|
||||
repository.createDirectory(backupPath);
|
||||
|
||||
log.info("Starting backup of collection={} with backupName={} at location={}", collectionName, backupName,
|
||||
backupPath);
|
||||
|
||||
for (Slice slice : ocmh.zkStateReader.getClusterState().getCollection(collectionName).getActiveSlices()) {
|
||||
Replica replica = slice.getLeader();
|
||||
|
||||
String coreName = replica.getStr(CORE_NAME_PROP);
|
||||
|
||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||
params.set(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString());
|
||||
params.set(NAME, slice.getName());
|
||||
params.set(CoreAdminParams.BACKUP_REPOSITORY, repo);
|
||||
params.set(CoreAdminParams.BACKUP_LOCATION, backupPath.getPath()); // note: index dir will be here then the "snapshot." + slice name
|
||||
params.set(CORE_NAME_PROP, coreName);
|
||||
|
||||
ocmh.sendShardRequest(replica.getNodeName(), params, shardHandler, asyncId, requestMap);
|
||||
log.debug("Sent backup request to core={} for backupName={}", coreName, backupName);
|
||||
}
|
||||
log.debug("Sent backup requests to all shard leaders for backupName={}", backupName);
|
||||
|
||||
ocmh.processResponses(results, shardHandler, true, "Could not backup all replicas", asyncId, requestMap);
|
||||
|
||||
log.info("Starting to backup ZK data for backupName={}", backupName);
|
||||
|
||||
//Download the configs
|
||||
String configName = ocmh.zkStateReader.readConfigName(collectionName);
|
||||
backupMgr.downloadConfigDir(location, backupName, configName);
|
||||
|
||||
//Save the collection's state. Can be part of the monolithic clusterstate.json or a individual state.json
|
||||
//Since we don't want to distinguish we extract the state and back it up as a separate json
|
||||
DocCollection collectionState = ocmh.zkStateReader.getClusterState().getCollection(collectionName);
|
||||
backupMgr.writeCollectionState(location, backupName, collectionName, collectionState);
|
||||
|
||||
Properties properties = new Properties();
|
||||
|
||||
properties.put(BackupManager.BACKUP_NAME_PROP, backupName);
|
||||
properties.put(BackupManager.COLLECTION_NAME_PROP, collectionName);
|
||||
properties.put(COLL_CONF, configName);
|
||||
properties.put(BackupManager.START_TIME_PROP, startTime.toString());
|
||||
//TODO: Add MD5 of the configset. If during restore the same name configset exists then we can compare checksums to see if they are the same.
|
||||
//if they are not the same then we can throw an error or have an 'overwriteConfig' flag
|
||||
//TODO save numDocs for the shardLeader. We can use it to sanity check the restore.
|
||||
|
||||
backupMgr.writeBackupProperties(location, backupName, properties);
|
||||
|
||||
log.info("Completed backing up ZK data for backupName={}", backupName);
|
||||
}
|
||||
}
|
101
solr/core/src/java/org/apache/solr/cloud/CreateAliasCmd.java
Normal file
101
solr/core/src/java/org/apache/solr/cloud/CreateAliasCmd.java
Normal file
@ -0,0 +1,101 @@
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.cloud;
|
||||
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.solr.cloud.OverseerCollectionMessageHandler.Cmd;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.cloud.Aliases;
|
||||
import org.apache.solr.common.cloud.ClusterState;
|
||||
import org.apache.solr.common.cloud.ZkNodeProps;
|
||||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.Utils;
|
||||
import org.apache.solr.util.TimeOut;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static org.apache.solr.common.params.CommonParams.NAME;
|
||||
|
||||
|
||||
public class CreateAliasCmd implements Cmd {
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
private final OverseerCollectionMessageHandler ocmh;
|
||||
|
||||
public CreateAliasCmd(OverseerCollectionMessageHandler ocmh) {
|
||||
this.ocmh = ocmh;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void call(ClusterState state, ZkNodeProps message, NamedList results)
|
||||
throws Exception {
|
||||
String aliasName = message.getStr(NAME);
|
||||
String collections = message.getStr("collections");
|
||||
|
||||
Map<String, Map<String, String>> newAliasesMap = new HashMap<>();
|
||||
Map<String, String> newCollectionAliasesMap = new HashMap<>();
|
||||
ZkStateReader zkStateReader = ocmh.zkStateReader;
|
||||
Map<String, String> prevColAliases = zkStateReader.getAliases().getCollectionAliasMap();
|
||||
if (prevColAliases != null) {
|
||||
newCollectionAliasesMap.putAll(prevColAliases);
|
||||
}
|
||||
newCollectionAliasesMap.put(aliasName, collections);
|
||||
newAliasesMap.put("collection", newCollectionAliasesMap);
|
||||
Aliases newAliases = new Aliases(newAliasesMap);
|
||||
byte[] jsonBytes = null;
|
||||
if (newAliases.collectionAliasSize() > 0) { // only sub map right now
|
||||
jsonBytes = Utils.toJSON(newAliases.getAliasMap());
|
||||
}
|
||||
try {
|
||||
zkStateReader.getZkClient().setData(ZkStateReader.ALIASES, jsonBytes, true);
|
||||
|
||||
checkForAlias(aliasName, collections);
|
||||
// some fudge for other nodes
|
||||
Thread.sleep(100);
|
||||
} catch (KeeperException e) {
|
||||
log.error("", e);
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
|
||||
} catch (InterruptedException e) {
|
||||
log.warn("", e);
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
|
||||
}
|
||||
}
|
||||
|
||||
private void checkForAlias(String name, String value) {
|
||||
|
||||
TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS);
|
||||
boolean success = false;
|
||||
Aliases aliases;
|
||||
while (!timeout.hasTimedOut()) {
|
||||
aliases = ocmh.zkStateReader.getAliases();
|
||||
String collections = aliases.getCollectionAlias(name);
|
||||
if (collections != null && collections.equals(value)) {
|
||||
success = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!success) {
|
||||
log.warn("Timeout waiting to be notified of Alias change...");
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,291 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.solr.cloud;
|
||||
|
||||
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.solr.cloud.OverseerCollectionMessageHandler.Cmd;
|
||||
import org.apache.solr.cloud.overseer.ClusterStateMutator;
|
||||
import org.apache.solr.cloud.rule.ReplicaAssigner;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.cloud.ClusterState;
|
||||
import org.apache.solr.common.cloud.DocRouter;
|
||||
import org.apache.solr.common.cloud.ImplicitDocRouter;
|
||||
import org.apache.solr.common.cloud.Replica;
|
||||
import org.apache.solr.common.cloud.ZkConfigManager;
|
||||
import org.apache.solr.common.cloud.ZkNodeProps;
|
||||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.apache.solr.common.params.CoreAdminParams;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.SimpleOrderedMap;
|
||||
import org.apache.solr.common.util.Utils;
|
||||
import org.apache.solr.handler.component.ShardHandler;
|
||||
import org.apache.solr.handler.component.ShardRequest;
|
||||
import org.apache.solr.util.TimeOut;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static org.apache.solr.cloud.OverseerCollectionMessageHandler.COLL_CONF;
|
||||
import static org.apache.solr.cloud.OverseerCollectionMessageHandler.CREATE_NODE_SET;
|
||||
import static org.apache.solr.cloud.OverseerCollectionMessageHandler.NUM_SLICES;
|
||||
import static org.apache.solr.cloud.OverseerCollectionMessageHandler.RANDOM;
|
||||
import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
|
||||
import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
|
||||
import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA;
|
||||
import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
|
||||
import static org.apache.solr.common.params.CommonParams.NAME;
|
||||
import static org.apache.solr.common.util.StrUtils.formatString;
|
||||
|
||||
public class CreateCollectionCmd implements Cmd {
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
private final OverseerCollectionMessageHandler ocmh;
|
||||
|
||||
public CreateCollectionCmd(OverseerCollectionMessageHandler ocmh) {
|
||||
this.ocmh = ocmh;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void call(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception {
|
||||
final String collectionName = message.getStr(NAME);
|
||||
log.info("Create collection {}", collectionName);
|
||||
if (clusterState.hasCollection(collectionName)) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "collection already exists: " + collectionName);
|
||||
}
|
||||
|
||||
String configName = getConfigName(collectionName, message);
|
||||
if (configName == null) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No config set found to associate with the collection.");
|
||||
}
|
||||
|
||||
ocmh.validateConfigOrThrowSolrException(configName);
|
||||
|
||||
|
||||
try {
|
||||
// look at the replication factor and see if it matches reality
|
||||
// if it does not, find best nodes to create more cores
|
||||
|
||||
int repFactor = message.getInt(REPLICATION_FACTOR, 1);
|
||||
|
||||
ShardHandler shardHandler = ocmh.shardHandlerFactory.getShardHandler();
|
||||
final String async = message.getStr(ASYNC);
|
||||
|
||||
Integer numSlices = message.getInt(NUM_SLICES, null);
|
||||
String router = message.getStr("router.name", DocRouter.DEFAULT_NAME);
|
||||
List<String> shardNames = new ArrayList<>();
|
||||
if(ImplicitDocRouter.NAME.equals(router)){
|
||||
ClusterStateMutator.getShardNames(shardNames, message.getStr("shards", null));
|
||||
numSlices = shardNames.size();
|
||||
} else {
|
||||
if (numSlices == null ) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, NUM_SLICES + " is a required param (when using CompositeId router).");
|
||||
}
|
||||
ClusterStateMutator.getShardNames(numSlices, shardNames);
|
||||
}
|
||||
|
||||
int maxShardsPerNode = message.getInt(MAX_SHARDS_PER_NODE, 1);
|
||||
|
||||
if (repFactor <= 0) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, REPLICATION_FACTOR + " must be greater than 0");
|
||||
}
|
||||
|
||||
if (numSlices <= 0) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, NUM_SLICES + " must be > 0");
|
||||
}
|
||||
|
||||
// we need to look at every node and see how many cores it serves
|
||||
// add our new cores to existing nodes serving the least number of cores
|
||||
// but (for now) require that each core goes on a distinct node.
|
||||
|
||||
final List<String> nodeList = OverseerCollectionMessageHandler.getLiveOrLiveAndCreateNodeSetList(clusterState.getLiveNodes(), message, RANDOM);
|
||||
Map<ReplicaAssigner.Position, String> positionVsNodes;
|
||||
if (nodeList.isEmpty()) {
|
||||
log.warn("It is unusual to create a collection ("+collectionName+") without cores.");
|
||||
|
||||
positionVsNodes = new HashMap<>();
|
||||
} else {
|
||||
if (repFactor > nodeList.size()) {
|
||||
log.warn("Specified "
|
||||
+ REPLICATION_FACTOR
|
||||
+ " of "
|
||||
+ repFactor
|
||||
+ " on collection "
|
||||
+ collectionName
|
||||
+ " is higher than or equal to the number of Solr instances currently live or live and part of your " + CREATE_NODE_SET + "("
|
||||
+ nodeList.size()
|
||||
+ "). It's unusual to run two replica of the same slice on the same Solr-instance.");
|
||||
}
|
||||
|
||||
int maxShardsAllowedToCreate = maxShardsPerNode * nodeList.size();
|
||||
int requestedShardsToCreate = numSlices * repFactor;
|
||||
if (maxShardsAllowedToCreate < requestedShardsToCreate) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Cannot create collection " + collectionName + ". Value of "
|
||||
+ MAX_SHARDS_PER_NODE + " is " + maxShardsPerNode
|
||||
+ ", and the number of nodes currently live or live and part of your "+CREATE_NODE_SET+" is " + nodeList.size()
|
||||
+ ". This allows a maximum of " + maxShardsAllowedToCreate
|
||||
+ " to be created. Value of " + NUM_SLICES + " is " + numSlices
|
||||
+ " and value of " + REPLICATION_FACTOR + " is " + repFactor
|
||||
+ ". This requires " + requestedShardsToCreate
|
||||
+ " shards to be created (higher than the allowed number)");
|
||||
}
|
||||
|
||||
positionVsNodes = ocmh.identifyNodes(clusterState, nodeList, message, shardNames, repFactor);
|
||||
}
|
||||
|
||||
ZkStateReader zkStateReader = ocmh.zkStateReader;
|
||||
boolean isLegacyCloud = Overseer.isLegacy(zkStateReader);
|
||||
|
||||
ocmh.createConfNode(configName, collectionName, isLegacyCloud);
|
||||
|
||||
Overseer.getStateUpdateQueue(zkStateReader.getZkClient()).offer(Utils.toJSON(message));
|
||||
|
||||
// wait for a while until we don't see the collection
|
||||
TimeOut waitUntil = new TimeOut(30, TimeUnit.SECONDS);
|
||||
boolean created = false;
|
||||
while (! waitUntil.hasTimedOut()) {
|
||||
Thread.sleep(100);
|
||||
created = zkStateReader.getClusterState().hasCollection(collectionName);
|
||||
if(created) break;
|
||||
}
|
||||
if (!created)
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Could not fully create collection: " + collectionName);
|
||||
|
||||
if (nodeList.isEmpty()) {
|
||||
log.info("Finished create command for collection: {}", collectionName);
|
||||
return;
|
||||
}
|
||||
|
||||
// For tracking async calls.
|
||||
Map<String, String> requestMap = new HashMap<>();
|
||||
|
||||
|
||||
log.info(formatString("Creating SolrCores for new collection {0}, shardNames {1} , replicationFactor : {2}",
|
||||
collectionName, shardNames, repFactor));
|
||||
Map<String,ShardRequest> coresToCreate = new LinkedHashMap<>();
|
||||
for (Map.Entry<ReplicaAssigner.Position, String> e : positionVsNodes.entrySet()) {
|
||||
ReplicaAssigner.Position position = e.getKey();
|
||||
String nodeName = e.getValue();
|
||||
String coreName = collectionName + "_" + position.shard + "_replica" + (position.index + 1);
|
||||
log.info(formatString("Creating core {0} as part of shard {1} of collection {2} on {3}"
|
||||
, coreName, position.shard, collectionName, nodeName));
|
||||
|
||||
|
||||
String baseUrl = zkStateReader.getBaseUrlForNodeName(nodeName);
|
||||
//in the new mode, create the replica in clusterstate prior to creating the core.
|
||||
// Otherwise the core creation fails
|
||||
if (!isLegacyCloud) {
|
||||
ZkNodeProps props = new ZkNodeProps(
|
||||
Overseer.QUEUE_OPERATION, ADDREPLICA.toString(),
|
||||
ZkStateReader.COLLECTION_PROP, collectionName,
|
||||
ZkStateReader.SHARD_ID_PROP, position.shard,
|
||||
ZkStateReader.CORE_NAME_PROP, coreName,
|
||||
ZkStateReader.STATE_PROP, Replica.State.DOWN.toString(),
|
||||
ZkStateReader.BASE_URL_PROP, baseUrl);
|
||||
Overseer.getStateUpdateQueue(zkStateReader.getZkClient()).offer(Utils.toJSON(props));
|
||||
}
|
||||
|
||||
// Need to create new params for each request
|
||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||
params.set(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATE.toString());
|
||||
|
||||
params.set(CoreAdminParams.NAME, coreName);
|
||||
params.set(COLL_CONF, configName);
|
||||
params.set(CoreAdminParams.COLLECTION, collectionName);
|
||||
params.set(CoreAdminParams.SHARD, position.shard);
|
||||
params.set(ZkStateReader.NUM_SHARDS_PROP, numSlices);
|
||||
|
||||
if (async != null) {
|
||||
String coreAdminAsyncId = async + Math.abs(System.nanoTime());
|
||||
params.add(ASYNC, coreAdminAsyncId);
|
||||
requestMap.put(nodeName, coreAdminAsyncId);
|
||||
}
|
||||
ocmh.addPropertyParams(message, params);
|
||||
|
||||
ShardRequest sreq = new ShardRequest();
|
||||
sreq.nodeName = nodeName;
|
||||
params.set("qt", ocmh.adminPath);
|
||||
sreq.purpose = 1;
|
||||
sreq.shards = new String[]{baseUrl};
|
||||
sreq.actualShards = sreq.shards;
|
||||
sreq.params = params;
|
||||
|
||||
if (isLegacyCloud) {
|
||||
shardHandler.submit(sreq, sreq.shards[0], sreq.params);
|
||||
} else {
|
||||
coresToCreate.put(coreName, sreq);
|
||||
}
|
||||
}
|
||||
|
||||
if(!isLegacyCloud) {
|
||||
// wait for all replica entries to be created
|
||||
Map<String, Replica> replicas = ocmh.waitToSeeReplicasInState(collectionName, coresToCreate.keySet());
|
||||
for (Map.Entry<String, ShardRequest> e : coresToCreate.entrySet()) {
|
||||
ShardRequest sreq = e.getValue();
|
||||
sreq.params.set(CoreAdminParams.CORE_NODE_NAME, replicas.get(e.getKey()).getName());
|
||||
shardHandler.submit(sreq, sreq.shards[0], sreq.params);
|
||||
}
|
||||
}
|
||||
|
||||
ocmh.processResponses(results, shardHandler, false, null, async, requestMap, Collections.emptySet());
|
||||
if(results.get("failure") != null && ((SimpleOrderedMap)results.get("failure")).size() > 0) {
|
||||
// Let's cleanup as we hit an exception
|
||||
// We shouldn't be passing 'results' here for the cleanup as the response would then contain 'success'
|
||||
// element, which may be interpreted by the user as a positive ack
|
||||
ocmh.cleanupCollection(collectionName, new NamedList());
|
||||
log.info("Cleaned up artifacts for failed create collection for [" + collectionName + "]");
|
||||
} else {
|
||||
log.debug("Finished create command on all shards for collection: "
|
||||
+ collectionName);
|
||||
}
|
||||
} catch (SolrException ex) {
|
||||
throw ex;
|
||||
} catch (Exception ex) {
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, null, ex);
|
||||
}
|
||||
}
|
||||
String getConfigName(String coll, ZkNodeProps message) throws KeeperException, InterruptedException {
|
||||
String configName = message.getStr(COLL_CONF);
|
||||
|
||||
if (configName == null) {
|
||||
// if there is only one conf, use that
|
||||
List<String> configNames = null;
|
||||
try {
|
||||
configNames = ocmh.zkStateReader.getZkClient().getChildren(ZkConfigManager.CONFIGS_ZKNODE, null, true);
|
||||
if (configNames != null && configNames.size() == 1) {
|
||||
configName = configNames.get(0);
|
||||
// no config set named, but there is only 1 - use it
|
||||
log.info("Only one config set found in zk - using it:" + configName);
|
||||
} else if (configNames.contains(coll)) {
|
||||
configName = coll;
|
||||
}
|
||||
} catch (KeeperException.NoNodeException e) {
|
||||
|
||||
}
|
||||
}
|
||||
return configName;
|
||||
}
|
||||
}
|
120
solr/core/src/java/org/apache/solr/cloud/CreateShardCmd.java
Normal file
120
solr/core/src/java/org/apache/solr/cloud/CreateShardCmd.java
Normal file
@ -0,0 +1,120 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.cloud;
|
||||
|
||||
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.solr.cloud.OverseerCollectionMessageHandler.Cmd;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.cloud.ClusterState;
|
||||
import org.apache.solr.common.cloud.DocCollection;
|
||||
import org.apache.solr.common.cloud.ZkNodeProps;
|
||||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.apache.solr.common.params.CoreAdminParams;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.Utils;
|
||||
import org.apache.solr.handler.component.ShardHandler;
|
||||
import org.apache.solr.util.TimeOut;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static org.apache.solr.cloud.Assign.getNodesForNewReplicas;
|
||||
import static org.apache.solr.cloud.OverseerCollectionMessageHandler.COLL_CONF;
|
||||
import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
|
||||
import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
|
||||
import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
|
||||
import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
|
||||
import static org.apache.solr.common.params.CommonParams.NAME;
|
||||
|
||||
public class CreateShardCmd implements Cmd {
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
private final OverseerCollectionMessageHandler ocmh;
|
||||
|
||||
public CreateShardCmd(OverseerCollectionMessageHandler ocmh) {
|
||||
this.ocmh = ocmh;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void call(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception {
|
||||
String collectionName = message.getStr(COLLECTION_PROP);
|
||||
String sliceName = message.getStr(SHARD_ID_PROP);
|
||||
|
||||
log.info("Create shard invoked: {}", message);
|
||||
if (collectionName == null || sliceName == null)
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'collection' and 'shard' are required parameters");
|
||||
int numSlices = 1;
|
||||
|
||||
ShardHandler shardHandler = ocmh.shardHandlerFactory.getShardHandler();
|
||||
DocCollection collection = clusterState.getCollection(collectionName);
|
||||
int repFactor = message.getInt(REPLICATION_FACTOR, collection.getInt(REPLICATION_FACTOR, 1));
|
||||
String createNodeSetStr = message.getStr(OverseerCollectionMessageHandler.CREATE_NODE_SET);
|
||||
List<Assign.ReplicaCount> sortedNodeList = getNodesForNewReplicas(clusterState, collectionName, sliceName, repFactor,
|
||||
createNodeSetStr, ocmh.overseer.getZkController().getCoreContainer());
|
||||
|
||||
ZkStateReader zkStateReader = ocmh.zkStateReader;
|
||||
Overseer.getStateUpdateQueue(zkStateReader.getZkClient()).offer(Utils.toJSON(message));
|
||||
// wait for a while until we see the shard
|
||||
TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS);
|
||||
boolean created = false;
|
||||
while (!timeout.hasTimedOut()) {
|
||||
Thread.sleep(100);
|
||||
created = zkStateReader.getClusterState().getCollection(collectionName).getSlice(sliceName) != null;
|
||||
if (created) break;
|
||||
}
|
||||
if (!created)
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Could not fully create shard: " + message.getStr(NAME));
|
||||
|
||||
String configName = message.getStr(COLL_CONF);
|
||||
|
||||
String async = message.getStr(ASYNC);
|
||||
Map<String, String> requestMap = null;
|
||||
if (async != null) {
|
||||
requestMap = new HashMap<>(repFactor, 1.0f);
|
||||
}
|
||||
|
||||
for (int j = 1; j <= repFactor; j++) {
|
||||
String nodeName = sortedNodeList.get(((j - 1)) % sortedNodeList.size()).nodeName;
|
||||
String shardName = collectionName + "_" + sliceName + "_replica" + j;
|
||||
log.info("Creating shard " + shardName + " as part of slice " + sliceName + " of collection " + collectionName
|
||||
+ " on " + nodeName);
|
||||
|
||||
// Need to create new params for each request
|
||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||
params.set(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATE.toString());
|
||||
params.set(CoreAdminParams.NAME, shardName);
|
||||
params.set(COLL_CONF, configName);
|
||||
params.set(CoreAdminParams.COLLECTION, collectionName);
|
||||
params.set(CoreAdminParams.SHARD, sliceName);
|
||||
params.set(ZkStateReader.NUM_SHARDS_PROP, numSlices);
|
||||
ocmh.addPropertyParams(message, params);
|
||||
|
||||
ocmh.sendShardRequest(nodeName, params, shardHandler, async, requestMap);
|
||||
}
|
||||
|
||||
ocmh.processResponses(results, shardHandler, true, "Failed to create shard", async, requestMap, Collections.emptySet());
|
||||
|
||||
log.info("Finished create command on all shards for collection: " + collectionName);
|
||||
|
||||
}
|
||||
}
|
95
solr/core/src/java/org/apache/solr/cloud/DeleteAliasCmd.java
Normal file
95
solr/core/src/java/org/apache/solr/cloud/DeleteAliasCmd.java
Normal file
@ -0,0 +1,95 @@
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.solr.cloud;
|
||||
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.cloud.Aliases;
|
||||
import org.apache.solr.common.cloud.ClusterState;
|
||||
import org.apache.solr.common.cloud.ZkNodeProps;
|
||||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.Utils;
|
||||
import org.apache.solr.util.TimeOut;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static org.apache.solr.common.params.CommonParams.NAME;
|
||||
|
||||
public class DeleteAliasCmd implements OverseerCollectionMessageHandler.Cmd {
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
private final OverseerCollectionMessageHandler ocmh;
|
||||
|
||||
public DeleteAliasCmd(OverseerCollectionMessageHandler ocmh) {
|
||||
this.ocmh = ocmh;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception {
|
||||
String aliasName = message.getStr(NAME);
|
||||
|
||||
Map<String,Map<String,String>> newAliasesMap = new HashMap<>();
|
||||
Map<String,String> newCollectionAliasesMap = new HashMap<>();
|
||||
ZkStateReader zkStateReader = ocmh.zkStateReader;
|
||||
newCollectionAliasesMap.putAll(zkStateReader.getAliases().getCollectionAliasMap());
|
||||
newCollectionAliasesMap.remove(aliasName);
|
||||
newAliasesMap.put("collection", newCollectionAliasesMap);
|
||||
Aliases newAliases = new Aliases(newAliasesMap);
|
||||
byte[] jsonBytes = null;
|
||||
if (newAliases.collectionAliasSize() > 0) { // only sub map right now
|
||||
jsonBytes = Utils.toJSON(newAliases.getAliasMap());
|
||||
}
|
||||
try {
|
||||
zkStateReader.getZkClient().setData(ZkStateReader.ALIASES,
|
||||
jsonBytes, true);
|
||||
checkForAliasAbsence(aliasName);
|
||||
// some fudge for other nodes
|
||||
Thread.sleep(100);
|
||||
} catch (KeeperException e) {
|
||||
log.error("", e);
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
|
||||
} catch (InterruptedException e) {
|
||||
log.warn("", e);
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
|
||||
}
|
||||
|
||||
}
|
||||
private void checkForAliasAbsence(String name) {
|
||||
|
||||
TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS);
|
||||
boolean success = false;
|
||||
Aliases aliases = null;
|
||||
while (! timeout.hasTimedOut()) {
|
||||
aliases = ocmh.zkStateReader.getAliases();
|
||||
String collections = aliases.getCollectionAlias(name);
|
||||
if (collections == null) {
|
||||
success = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!success) {
|
||||
log.warn("Timeout waiting to be notified of Alias change...");
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,121 @@
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.solr.cloud;
|
||||
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.solr.common.NonExistentCoreException;
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.cloud.ClusterState;
|
||||
import org.apache.solr.common.cloud.ZkNodeProps;
|
||||
import org.apache.solr.common.cloud.ZkStateReader;
|
||||
import org.apache.solr.common.params.CoreAdminParams;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.solr.common.util.Utils;
|
||||
import org.apache.solr.util.TimeOut;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETE;
|
||||
import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
|
||||
import static org.apache.solr.common.params.CommonParams.NAME;
|
||||
|
||||
public class DeleteCollectionCmd implements OverseerCollectionMessageHandler.Cmd {
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
private final OverseerCollectionMessageHandler ocmh;
|
||||
|
||||
public DeleteCollectionCmd(OverseerCollectionMessageHandler ocmh) {
|
||||
this.ocmh = ocmh;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception {
|
||||
ZkStateReader zkStateReader = ocmh.zkStateReader;
|
||||
final String collection = message.getStr(NAME);
|
||||
try {
|
||||
if (zkStateReader.getClusterState().getCollectionOrNull(collection) == null) {
|
||||
if (zkStateReader.getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection, true)) {
|
||||
// if the collection is not in the clusterstate, but is listed in zk, do nothing, it will just
|
||||
// be removed in the finally - we cannot continue, because the below code will error if the collection
|
||||
// is not in the clusterstate
|
||||
return;
|
||||
}
|
||||
}
|
||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||
params.set(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.UNLOAD.toString());
|
||||
params.set(CoreAdminParams.DELETE_INSTANCE_DIR, true);
|
||||
params.set(CoreAdminParams.DELETE_DATA_DIR, true);
|
||||
|
||||
String asyncId = message.getStr(ASYNC);
|
||||
Map<String, String> requestMap = null;
|
||||
if (asyncId != null) {
|
||||
requestMap = new HashMap<>();
|
||||
}
|
||||
|
||||
Set<String> okayExceptions = new HashSet<>(1);
|
||||
okayExceptions.add(NonExistentCoreException.class.getName());
|
||||
|
||||
ocmh.collectionCmd(message, params, results, null, asyncId, requestMap, okayExceptions);
|
||||
|
||||
ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, DELETE.toLower(), NAME, collection);
|
||||
Overseer.getStateUpdateQueue(zkStateReader.getZkClient()).offer(Utils.toJSON(m));
|
||||
|
||||
// wait for a while until we don't see the collection
|
||||
TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS);
|
||||
boolean removed = false;
|
||||
while (! timeout.hasTimedOut()) {
|
||||
Thread.sleep(100);
|
||||
removed = !zkStateReader.getClusterState().hasCollection(collection);
|
||||
if (removed) {
|
||||
Thread.sleep(500); // just a bit of time so it's more likely other
|
||||
// readers see on return
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!removed) {
|
||||
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
|
||||
"Could not fully remove collection: " + collection);
|
||||
}
|
||||
|
||||
} finally {
|
||||
|
||||
try {
|
||||
if (zkStateReader.getZkClient().exists(
|
||||
ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection, true)) {
|
||||
zkStateReader.getZkClient().clean(
|
||||
ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection);
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
SolrException.log(log, "Cleaning up collection in zk was interrupted:"
|
||||
+ collection, e);
|
||||
Thread.currentThread().interrupt();
|
||||
} catch (KeeperException e) {
|
||||
SolrException.log(log, "Problem cleaning up collection in zk:"
|
||||
+ collection, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
91
solr/core/src/java/org/apache/solr/cloud/DeleteNodeCmd.java
Normal file
91
solr/core/src/java/org/apache/solr/cloud/DeleteNodeCmd.java
Normal file
@ -0,0 +1,91 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.solr.cloud;
|
||||
|
||||
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.cloud.ClusterState;
|
||||
import org.apache.solr.common.cloud.ZkNodeProps;
|
||||
import org.apache.solr.common.util.NamedList;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
|
||||
import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
|
||||
import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEREPLICA;
|
||||
|
||||
public class DeleteNodeCmd implements OverseerCollectionMessageHandler.Cmd {
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
|
||||
private final OverseerCollectionMessageHandler ocmh;
|
||||
|
||||
public DeleteNodeCmd(OverseerCollectionMessageHandler ocmh) {
|
||||
this.ocmh = ocmh;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception {
|
||||
ocmh.checkRequired(message, "node");
|
||||
String node = message.getStr("node");
|
||||
if (!state.liveNodesContain(node)) {
|
||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Source Node: " + node + " is not live");
|
||||
}
|
||||
List<ZkNodeProps> sourceReplicas = ReplaceNodeCmd.getReplicasOfNode(node, state);
|
||||
cleanupReplicas(results, state, sourceReplicas, ocmh, node);
|
||||
}
|
||||
|
||||
static void cleanupReplicas(NamedList results,
|
||||
ClusterState clusterState,
|
||||
List<ZkNodeProps> sourceReplicas,
|
||||
OverseerCollectionMessageHandler ocmh, String node) throws InterruptedException {
|
||||
CountDownLatch cleanupLatch = new CountDownLatch(sourceReplicas.size());
|
||||
for (ZkNodeProps sourceReplica : sourceReplicas) {
|
||||
log.info("Deleting replica for collection={} shard={} on node={}", sourceReplica.getStr(COLLECTION_PROP), sourceReplica.getStr(SHARD_ID_PROP), node);
|
||||
NamedList deleteResult = new NamedList();
|
||||
try {
|
||||
((DeleteReplicaCmd)ocmh.commandMap.get(DELETEREPLICA)).deleteReplica(clusterState, sourceReplica.plus("parallel", "true"), deleteResult, () -> {
|
||||
cleanupLatch.countDown();
|
||||
if (deleteResult.get("failure") != null) {
|
||||
synchronized (results) {
|
||||
results.add("failure", String.format(Locale.ROOT, "Failed to delete replica for collection=%s shard=%s" +
|
||||
" on node=%s", sourceReplica.getStr(COLLECTION_PROP), sourceReplica.getStr(SHARD_ID_PROP), node));
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (KeeperException e) {
|
||||
log.warn("Error deleting ", e);
|
||||
cleanupLatch.countDown();
|
||||
} catch (Exception e) {
|
||||
log.warn("Error deleting ", e);
|
||||
cleanupLatch.countDown();
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
log.debug("Waiting for delete node action to complete");
|
||||
cleanupLatch.await(5, TimeUnit.MINUTES);
|
||||
}
|
||||
|
||||
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user