diff --git a/dev-tools/idea/lucene/join/join.iml b/dev-tools/idea/lucene/join/join.iml
index 1f9e80b8adb..6de5e90a06e 100644
--- a/dev-tools/idea/lucene/join/join.iml
+++ b/dev-tools/idea/lucene/join/join.iml
@@ -14,6 +14,7 @@
+
diff --git a/dev-tools/idea/lucene/queryparser/queryparser.iml b/dev-tools/idea/lucene/queryparser/queryparser.iml
index cd2915fe3a9..86a50a58aab 100644
--- a/dev-tools/idea/lucene/queryparser/queryparser.iml
+++ b/dev-tools/idea/lucene/queryparser/queryparser.iml
@@ -17,5 +17,6 @@
+
diff --git a/dev-tools/idea/lucene/spatial-extras/spatial-extras.iml b/dev-tools/idea/lucene/spatial-extras/spatial-extras.iml
index 5694371fd6f..6285d261ee4 100644
--- a/dev-tools/idea/lucene/spatial-extras/spatial-extras.iml
+++ b/dev-tools/idea/lucene/spatial-extras/spatial-extras.iml
@@ -27,6 +27,7 @@
+
\ No newline at end of file
diff --git a/dev-tools/idea/solr/contrib/analytics/analytics.iml b/dev-tools/idea/solr/contrib/analytics/analytics.iml
index 2ff93365d37..10f51a7ef25 100644
--- a/dev-tools/idea/solr/contrib/analytics/analytics.iml
+++ b/dev-tools/idea/solr/contrib/analytics/analytics.iml
@@ -20,6 +20,7 @@
+
diff --git a/dev-tools/idea/solr/core/src/java/solr-core.iml b/dev-tools/idea/solr/core/src/java/solr-core.iml
index 822b24f6cab..6cf1ab175f4 100644
--- a/dev-tools/idea/solr/core/src/java/solr-core.iml
+++ b/dev-tools/idea/solr/core/src/java/solr-core.iml
@@ -31,5 +31,6 @@
+
diff --git a/dev-tools/idea/solr/core/src/solr-core-tests.iml b/dev-tools/idea/solr/core/src/solr-core-tests.iml
index 56f768b49f3..99297d0a70d 100644
--- a/dev-tools/idea/solr/core/src/solr-core-tests.iml
+++ b/dev-tools/idea/solr/core/src/solr-core-tests.iml
@@ -32,5 +32,6 @@
+
diff --git a/dev-tools/scripts/addVersion.py b/dev-tools/scripts/addVersion.py
index 262e099a166..e95a51f4181 100644
--- a/dev-tools/scripts/addVersion.py
+++ b/dev-tools/scripts/addVersion.py
@@ -217,7 +217,9 @@ def main():
update_changes('lucene/CHANGES.txt', c.version)
update_changes('solr/CHANGES.txt', c.version, get_solr_init_changes())
- if current_version.is_back_compat_with(c.version):
+ is_back_compat = current_version.major == c.version.major or current_version.is_back_compat_with(c.version)
+
+ if is_back_compat:
add_constant(c.version, not c.is_latest_version)
else:
print('\nNot adding constant for version %s because it is no longer supported' % c.version)
@@ -232,7 +234,7 @@ def main():
print('\nTODO: ')
print(' - Move backcompat oldIndexes to unsupportedIndexes in TestBackwardsCompatibility')
print(' - Update IndexFormatTooOldException throw cases')
- elif current_version.is_back_compat_with(c.version):
+ elif is_back_compat:
print('\nTesting changes')
check_lucene_version_tests()
check_solr_version_tests()
diff --git a/dev-tools/scripts/buildAndPushRelease.py b/dev-tools/scripts/buildAndPushRelease.py
index 1deb7985de5..e34c94316d9 100644
--- a/dev-tools/scripts/buildAndPushRelease.py
+++ b/dev-tools/scripts/buildAndPushRelease.py
@@ -218,7 +218,7 @@ def check_cmdline_tools(): # Fail fast if there are cmdline tool problems
if os.system('git --version >/dev/null 2>/dev/null'):
raise RuntimeError('"git --version" returned a non-zero exit code.')
antVersion = os.popen('ant -version').read().strip()
- if not antVersion.startswith('Apache Ant(TM) version 1.8'):
+ if not antVersion.startswith('Apache Ant(TM) version 1.8') and not antVersion.startswith('Apache Ant(TM) version 1.9'):
raise RuntimeError('ant version is not 1.8.X: "%s"' % antVersion)
def main():
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index c5db1438d8b..fbe016bedec 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -30,6 +30,9 @@ Other
* LUCENE-7360: Remove Explanation.toHtml() (Alan Woodward)
+======================= Lucene 6.3.0 =======================
+(No Changes)
+
======================= Lucene 6.2.0 =======================
API Changes
@@ -38,6 +41,9 @@ API Changes
New Features
+* LUCENE-7388: Add point based IntRangeField, FloatRangeField, LongRangeField along with
+ supporting queries and tests (Nick Knize)
+
* LUCENE-7381: Add point based DoubleRangeField and RangeFieldQuery for
indexing and querying on Ranges up to 4 dimensions (Nick Knize)
@@ -85,6 +91,12 @@ Bug Fixes
* LUCENE-7391: Fix performance regression in MemoryIndex's fields() introduced
in Lucene 6. (Steve Mason via David Smiley)
+* SOLR-9413: Fix analysis/kuromoji's CSVUtil.quoteEscape logic, add TestCSVUtil test.
+ (AppChecker, Christine Poerschke)
+
+* LUCENE-7419: Fix performance bug with TokenStream.end(), where it would lookup
+ PositionIncrementAttribute every time. (Mike McCandless, Robert Muir)
+
Improvements
* LUCENE-7323: Compound file writing now verifies the incoming
@@ -142,6 +154,13 @@ Improvements
because the ICU word-breaking algorithm has some issues. This allows for the previous
tokenization used before Lucene 5. (AM, Robert Muir)
+* LUCENE-7409: Changed MMapDirectory's unmapping to work safer, but still with
+ no guarantees. This uses a store-store barrier and yields the current thread
+ before unmapping to allow in-flight requests to finish. The new code no longer
+ uses WeakIdentityMap as it delegates all ByteBuffer reads throgh a new
+ ByteBufferGuard wrapper that is shared between all ByteBufferIndexInput clones.
+ (Robert Muir, Uwe Schindler)
+
Optimizations
* LUCENE-7330, LUCENE-7339: Speed up conjunction queries. (Adrien Grand)
diff --git a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/util/CSVUtil.java b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/util/CSVUtil.java
index 6301d2c05ca..04f86038d4a 100644
--- a/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/util/CSVUtil.java
+++ b/lucene/analysis/kuromoji/src/java/org/apache/lucene/analysis/ja/util/CSVUtil.java
@@ -101,7 +101,7 @@ public final class CSVUtil {
String result = original;
if (result.indexOf('\"') >= 0) {
- result.replace("\"", ESCAPED_QUOTE);
+ result = result.replace("\"", ESCAPED_QUOTE);
}
if(result.indexOf(COMMA) >= 0) {
result = "\"" + result + "\"";
diff --git a/lucene/analysis/kuromoji/src/test/org/apache/lucene/analysis/ja/TestCSVUtil.java b/lucene/analysis/kuromoji/src/test/org/apache/lucene/analysis/ja/TestCSVUtil.java
new file mode 100644
index 00000000000..01545dbfe36
--- /dev/null
+++ b/lucene/analysis/kuromoji/src/test/org/apache/lucene/analysis/ja/TestCSVUtil.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.analysis.ja;
+
+import java.io.IOException;
+
+import org.apache.lucene.analysis.ja.util.CSVUtil;
+import org.apache.lucene.util.LuceneTestCase;
+
+/*
+ * Tests for the CSVUtil class.
+ */
+public class TestCSVUtil extends LuceneTestCase {
+
+ public void testQuoteEscapeQuotes() throws IOException {
+ final String input = "\"Let It Be\" is a song and album by the The Beatles.";
+ final String expectedOutput = input.replace("\"", "\"\"");
+ implTestQuoteEscape(input, expectedOutput);
+ }
+
+ public void testQuoteEscapeComma() throws IOException {
+ final String input = "To be, or not to be ...";
+ final String expectedOutput = '"'+input+'"';
+ implTestQuoteEscape(input, expectedOutput);
+ }
+
+ public void testQuoteEscapeQuotesAndComma() throws IOException {
+ final String input = "\"To be, or not to be ...\" is a well-known phrase from Shakespeare's Hamlet.";
+ final String expectedOutput = '"'+input.replace("\"", "\"\"")+'"';
+ implTestQuoteEscape(input, expectedOutput);
+ }
+
+ private void implTestQuoteEscape(String input, String expectedOutput) throws IOException {
+ final String actualOutput = CSVUtil.quoteEscape(input);
+ assertEquals(expectedOutput, actualOutput);
+ }
+
+}
diff --git a/lucene/core/src/java/org/apache/lucene/document/LegacyDoubleField.java b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyDoubleField.java
similarity index 81%
rename from lucene/core/src/java/org/apache/lucene/document/LegacyDoubleField.java
rename to lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyDoubleField.java
index 55ba81cb120..e98a4f0f567 100644
--- a/lucene/core/src/java/org/apache/lucene/document/LegacyDoubleField.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyDoubleField.java
@@ -14,9 +14,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.lucene.document;
+package org.apache.lucene.legacy;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoublePoint;
import org.apache.lucene.index.IndexOptions;
@@ -49,7 +51,7 @@ import org.apache.lucene.index.IndexOptions;
* LegacyFloatField}.
*
*
To perform range querying or filtering against a
- * LegacyDoubleField, use {@link org.apache.lucene.search.LegacyNumericRangeQuery}.
+ * LegacyDoubleField, use {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}.
* To sort according to a
* LegacyDoubleField, use the normal numeric sort types, eg
* {@link org.apache.lucene.search.SortField.Type#DOUBLE}. LegacyDoubleField
@@ -79,11 +81,11 @@ import org.apache.lucene.index.IndexOptions;
* but may result in faster range search performance. The
* default value, 16, was selected for a reasonable tradeoff
* of disk space consumption versus performance. You can
- * create a custom {@link FieldType} and invoke the {@link
- * FieldType#setNumericPrecisionStep} method if you'd
+ * create a custom {@link LegacyFieldType} and invoke the {@link
+ * LegacyFieldType#setNumericPrecisionStep} method if you'd
* like to change the value. Note that you must also
* specify a congruent value when creating {@link
- * org.apache.lucene.search.LegacyNumericRangeQuery}.
+ * org.apache.lucene.legacy.LegacyNumericRangeQuery}.
* For low cardinality fields larger precision steps are good.
* If the cardinality is < 100, it is fair
* to use {@link Integer#MAX_VALUE}, which produces one
@@ -91,9 +93,9 @@ import org.apache.lucene.index.IndexOptions;
*
*
For more information on the internals of numeric trie
* indexing, including the precisionStep
- * configuration, see {@link org.apache.lucene.search.LegacyNumericRangeQuery}. The format of
- * indexed values is described in {@link org.apache.lucene.util.LegacyNumericUtils}.
+ * href="LegacyNumericRangeQuery.html#precisionStepDesc">precisionStep
+ * configuration, see {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}. The format of
+ * indexed values is described in {@link org.apache.lucene.legacy.LegacyNumericUtils}.
*
*
If you only need to sort by numeric value, and never
* run range querying/filtering, you can index using a
@@ -101,7 +103,7 @@ import org.apache.lucene.index.IndexOptions;
* This will minimize disk space consumed.
*
*
More advanced users can instead use {@link
- * org.apache.lucene.analysis.LegacyNumericTokenStream} directly, when indexing numbers. This
+ * org.apache.lucene.legacy.LegacyNumericTokenStream} directly, when indexing numbers. This
* class is a wrapper around this token stream type for
* easier, more intuitive usage.
*
@@ -111,18 +113,18 @@ import org.apache.lucene.index.IndexOptions;
*/
@Deprecated
-public final class LegacyDoubleField extends Field {
+public final class LegacyDoubleField extends LegacyField {
/**
* Type for a LegacyDoubleField that is not stored:
* normalization factors, frequencies, and positions are omitted.
*/
- public static final FieldType TYPE_NOT_STORED = new FieldType();
+ public static final LegacyFieldType TYPE_NOT_STORED = new LegacyFieldType();
static {
TYPE_NOT_STORED.setTokenized(true);
TYPE_NOT_STORED.setOmitNorms(true);
TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS);
- TYPE_NOT_STORED.setNumericType(FieldType.LegacyNumericType.DOUBLE);
+ TYPE_NOT_STORED.setNumericType(LegacyNumericType.DOUBLE);
TYPE_NOT_STORED.freeze();
}
@@ -130,19 +132,19 @@ public final class LegacyDoubleField extends Field {
* Type for a stored LegacyDoubleField:
* normalization factors, frequencies, and positions are omitted.
*/
- public static final FieldType TYPE_STORED = new FieldType();
+ public static final LegacyFieldType TYPE_STORED = new LegacyFieldType();
static {
TYPE_STORED.setTokenized(true);
TYPE_STORED.setOmitNorms(true);
TYPE_STORED.setIndexOptions(IndexOptions.DOCS);
- TYPE_STORED.setNumericType(FieldType.LegacyNumericType.DOUBLE);
+ TYPE_STORED.setNumericType(LegacyNumericType.DOUBLE);
TYPE_STORED.setStored(true);
TYPE_STORED.freeze();
}
/** Creates a stored or un-stored LegacyDoubleField with the provided value
* and default precisionStep {@link
- * org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16).
+ * org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16).
* @param name field name
* @param value 64-bit double value
* @param stored Store.YES if the content should also be stored
@@ -154,17 +156,17 @@ public final class LegacyDoubleField extends Field {
}
/** Expert: allows you to customize the {@link
- * FieldType}.
+ * LegacyFieldType}.
* @param name field name
* @param value 64-bit double value
- * @param type customized field type: must have {@link FieldType#numericType()}
- * of {@link org.apache.lucene.document.FieldType.LegacyNumericType#DOUBLE}.
+ * @param type customized field type: must have {@link LegacyFieldType#numericType()}
+ * of {@link LegacyNumericType#DOUBLE}.
* @throws IllegalArgumentException if the field name or type is null, or
* if the field type does not have a DOUBLE numericType()
*/
- public LegacyDoubleField(String name, double value, FieldType type) {
+ public LegacyDoubleField(String name, double value, LegacyFieldType type) {
super(name, type);
- if (type.numericType() != FieldType.LegacyNumericType.DOUBLE) {
+ if (type.numericType() != LegacyNumericType.DOUBLE) {
throw new IllegalArgumentException("type.numericType() must be DOUBLE but got " + type.numericType());
}
fieldsData = Double.valueOf(value);
diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyField.java b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyField.java
new file mode 100644
index 00000000000..87ac0e566cf
--- /dev/null
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyField.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.legacy;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexOptions;
+
+/**
+ * Field extension with support for legacy numerics
+ * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
+ */
+@Deprecated
+public class LegacyField extends Field {
+
+ /**
+ * Expert: creates a field with no initial value.
+ * Intended only for custom LegacyField subclasses.
+ * @param name field name
+ * @param type field type
+ * @throws IllegalArgumentException if either the name or type
+ * is null.
+ */
+ public LegacyField(String name, LegacyFieldType type) {
+ super(name, type);
+ }
+
+ @Override
+ public TokenStream tokenStream(Analyzer analyzer, TokenStream reuse) {
+ if (fieldType().indexOptions() == IndexOptions.NONE) {
+ // Not indexed
+ return null;
+ }
+ final LegacyFieldType fieldType = (LegacyFieldType) fieldType();
+ final LegacyNumericType numericType = fieldType.numericType();
+ if (numericType != null) {
+ if (!(reuse instanceof LegacyNumericTokenStream && ((LegacyNumericTokenStream)reuse).getPrecisionStep() == fieldType.numericPrecisionStep())) {
+ // lazy init the TokenStream as it is heavy to instantiate
+ // (attributes,...) if not needed (stored field loading)
+ reuse = new LegacyNumericTokenStream(fieldType.numericPrecisionStep());
+ }
+ final LegacyNumericTokenStream nts = (LegacyNumericTokenStream) reuse;
+ // initialize value in TokenStream
+ final Number val = (Number) fieldsData;
+ switch (numericType) {
+ case INT:
+ nts.setIntValue(val.intValue());
+ break;
+ case LONG:
+ nts.setLongValue(val.longValue());
+ break;
+ case FLOAT:
+ nts.setFloatValue(val.floatValue());
+ break;
+ case DOUBLE:
+ nts.setDoubleValue(val.doubleValue());
+ break;
+ default:
+ throw new AssertionError("Should never get here");
+ }
+ return reuse;
+ }
+ return super.tokenStream(analyzer, reuse);
+ }
+
+ @Override
+ public void setTokenStream(TokenStream tokenStream) {
+ final LegacyFieldType fieldType = (LegacyFieldType) fieldType();
+ if (fieldType.numericType() != null) {
+ throw new IllegalArgumentException("cannot set private TokenStream on numeric fields");
+ }
+ super.setTokenStream(tokenStream);
+ }
+
+}
diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyFieldType.java b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyFieldType.java
new file mode 100644
index 00000000000..1f4b0af4768
--- /dev/null
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyFieldType.java
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.legacy;
+
+import org.apache.lucene.document.FieldType;
+import org.apache.lucene.index.IndexOptions;
+
+/**
+ * FieldType extension with support for legacy numerics
+ * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
+ */
+@Deprecated
+public final class LegacyFieldType extends FieldType {
+ private LegacyNumericType numericType;
+ private int numericPrecisionStep = LegacyNumericUtils.PRECISION_STEP_DEFAULT;
+
+ /**
+ * Create a new mutable LegacyFieldType with all of the properties from ref
+ */
+ public LegacyFieldType(LegacyFieldType ref) {
+ super(ref);
+ this.numericType = ref.numericType;
+ this.numericPrecisionStep = ref.numericPrecisionStep;
+ }
+
+ /**
+ * Create a new FieldType with default properties.
+ */
+ public LegacyFieldType() {
+ }
+
+ /**
+ * Specifies the field's numeric type.
+ * @param type numeric type, or null if the field has no numeric type.
+ * @throws IllegalStateException if this FieldType is frozen against
+ * future modifications.
+ * @see #numericType()
+ *
+ * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
+ */
+ @Deprecated
+ public void setNumericType(LegacyNumericType type) {
+ checkIfFrozen();
+ numericType = type;
+ }
+
+ /**
+ * LegacyNumericType: if non-null then the field's value will be indexed
+ * numerically so that {@link org.apache.lucene.legacy.LegacyNumericRangeQuery} can be used at
+ * search time.
+ *
+ * The default is null (no numeric type)
+ * @see #setNumericType(LegacyNumericType)
+ *
+ * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
+ */
+ @Deprecated
+ public LegacyNumericType numericType() {
+ return numericType;
+ }
+
+ /**
+ * Sets the numeric precision step for the field.
+ * @param precisionStep numeric precision step for the field
+ * @throws IllegalArgumentException if precisionStep is less than 1.
+ * @throws IllegalStateException if this FieldType is frozen against
+ * future modifications.
+ * @see #numericPrecisionStep()
+ *
+ * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
+ */
+ @Deprecated
+ public void setNumericPrecisionStep(int precisionStep) {
+ checkIfFrozen();
+ if (precisionStep < 1) {
+ throw new IllegalArgumentException("precisionStep must be >= 1 (got " + precisionStep + ")");
+ }
+ this.numericPrecisionStep = precisionStep;
+ }
+
+ /**
+ * Precision step for numeric field.
+ *
+ * This has no effect if {@link #numericType()} returns null.
+ *
+ * The default is {@link org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT}
+ * @see #setNumericPrecisionStep(int)
+ *
+ * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
+ */
+ @Deprecated
+ public int numericPrecisionStep() {
+ return numericPrecisionStep;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = super.hashCode();
+ result = prime * result + numericPrecisionStep;
+ result = prime * result + ((numericType == null) ? 0 : numericType.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (!super.equals(obj)) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) return false;
+ LegacyFieldType other = (LegacyFieldType) obj;
+ if (numericPrecisionStep != other.numericPrecisionStep) return false;
+ if (numericType != other.numericType) return false;
+ return true;
+ }
+
+ /** Prints a Field for human consumption. */
+ @Override
+ public String toString() {
+ StringBuilder result = new StringBuilder();
+ result.append(super.toString());
+ if (indexOptions() != IndexOptions.NONE) {
+ if (result.length() > 0) {
+ result.append(",");
+ }
+ if (numericType != null) {
+ result.append(",numericType=");
+ result.append(numericType);
+ result.append(",numericPrecisionStep=");
+ result.append(numericPrecisionStep);
+ }
+ }
+ return result.toString();
+ }
+}
diff --git a/lucene/core/src/java/org/apache/lucene/document/LegacyFloatField.java b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyFloatField.java
similarity index 80%
rename from lucene/core/src/java/org/apache/lucene/document/LegacyFloatField.java
rename to lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyFloatField.java
index e24bf30fa54..ea3b84ab65f 100644
--- a/lucene/core/src/java/org/apache/lucene/document/LegacyFloatField.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyFloatField.java
@@ -14,11 +14,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.lucene.document;
-
+package org.apache.lucene.legacy;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.FloatPoint;
import org.apache.lucene.index.IndexOptions;
-import org.apache.lucene.util.LegacyNumericUtils;
/**
*
To perform range querying or filtering against a
- * LegacyFloatField, use {@link org.apache.lucene.search.LegacyNumericRangeQuery}.
+ * LegacyFloatField, use {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}.
* To sort according to a
* LegacyFloatField, use the normal numeric sort types, eg
* {@link org.apache.lucene.search.SortField.Type#FLOAT}. LegacyFloatField
@@ -79,11 +79,11 @@ import org.apache.lucene.util.LegacyNumericUtils;
* but may result in faster range search performance. The
* default value, 8, was selected for a reasonable tradeoff
* of disk space consumption versus performance. You can
- * create a custom {@link FieldType} and invoke the {@link
- * FieldType#setNumericPrecisionStep} method if you'd
+ * create a custom {@link LegacyFieldType} and invoke the {@link
+ * LegacyFieldType#setNumericPrecisionStep} method if you'd
* like to change the value. Note that you must also
* specify a congruent value when creating {@link
- * org.apache.lucene.search.LegacyNumericRangeQuery}.
+ * org.apache.lucene.legacy.LegacyNumericRangeQuery}.
* For low cardinality fields larger precision steps are good.
* If the cardinality is < 100, it is fair
* to use {@link Integer#MAX_VALUE}, which produces one
@@ -91,9 +91,9 @@ import org.apache.lucene.util.LegacyNumericUtils;
*
*
For more information on the internals of numeric trie
* indexing, including the precisionStep
- * configuration, see {@link org.apache.lucene.search.LegacyNumericRangeQuery}. The format of
- * indexed values is described in {@link org.apache.lucene.util.LegacyNumericUtils}.
+ * href="LegacyNumericRangeQuery.html#precisionStepDesc">precisionStep
+ * configuration, see {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}. The format of
+ * indexed values is described in {@link org.apache.lucene.legacy.LegacyNumericUtils}.
*
*
If you only need to sort by numeric value, and never
* run range querying/filtering, you can index using a
@@ -101,7 +101,7 @@ import org.apache.lucene.util.LegacyNumericUtils;
* This will minimize disk space consumed.
*
*
More advanced users can instead use {@link
- * org.apache.lucene.analysis.LegacyNumericTokenStream} directly, when indexing numbers. This
+ * org.apache.lucene.legacy.LegacyNumericTokenStream} directly, when indexing numbers. This
* class is a wrapper around this token stream type for
* easier, more intuitive usage.
*
@@ -111,18 +111,18 @@ import org.apache.lucene.util.LegacyNumericUtils;
*/
@Deprecated
-public final class LegacyFloatField extends Field {
+public final class LegacyFloatField extends LegacyField {
/**
* Type for a LegacyFloatField that is not stored:
* normalization factors, frequencies, and positions are omitted.
*/
- public static final FieldType TYPE_NOT_STORED = new FieldType();
+ public static final LegacyFieldType TYPE_NOT_STORED = new LegacyFieldType();
static {
TYPE_NOT_STORED.setTokenized(true);
TYPE_NOT_STORED.setOmitNorms(true);
TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS);
- TYPE_NOT_STORED.setNumericType(FieldType.LegacyNumericType.FLOAT);
+ TYPE_NOT_STORED.setNumericType(LegacyNumericType.FLOAT);
TYPE_NOT_STORED.setNumericPrecisionStep(LegacyNumericUtils.PRECISION_STEP_DEFAULT_32);
TYPE_NOT_STORED.freeze();
}
@@ -131,12 +131,12 @@ public final class LegacyFloatField extends Field {
* Type for a stored LegacyFloatField:
* normalization factors, frequencies, and positions are omitted.
*/
- public static final FieldType TYPE_STORED = new FieldType();
+ public static final LegacyFieldType TYPE_STORED = new LegacyFieldType();
static {
TYPE_STORED.setTokenized(true);
TYPE_STORED.setOmitNorms(true);
TYPE_STORED.setIndexOptions(IndexOptions.DOCS);
- TYPE_STORED.setNumericType(FieldType.LegacyNumericType.FLOAT);
+ TYPE_STORED.setNumericType(LegacyNumericType.FLOAT);
TYPE_STORED.setNumericPrecisionStep(LegacyNumericUtils.PRECISION_STEP_DEFAULT_32);
TYPE_STORED.setStored(true);
TYPE_STORED.freeze();
@@ -144,7 +144,7 @@ public final class LegacyFloatField extends Field {
/** Creates a stored or un-stored LegacyFloatField with the provided value
* and default precisionStep {@link
- * org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT_32} (8).
+ * org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT_32} (8).
* @param name field name
* @param value 32-bit double value
* @param stored Store.YES if the content should also be stored
@@ -156,17 +156,17 @@ public final class LegacyFloatField extends Field {
}
/** Expert: allows you to customize the {@link
- * FieldType}.
+ * LegacyFieldType}.
* @param name field name
* @param value 32-bit float value
- * @param type customized field type: must have {@link FieldType#numericType()}
- * of {@link org.apache.lucene.document.FieldType.LegacyNumericType#FLOAT}.
+ * @param type customized field type: must have {@link LegacyFieldType#numericType()}
+ * of {@link LegacyNumericType#FLOAT}.
* @throws IllegalArgumentException if the field name or type is null, or
* if the field type does not have a FLOAT numericType()
*/
- public LegacyFloatField(String name, float value, FieldType type) {
+ public LegacyFloatField(String name, float value, LegacyFieldType type) {
super(name, type);
- if (type.numericType() != FieldType.LegacyNumericType.FLOAT) {
+ if (type.numericType() != LegacyNumericType.FLOAT) {
throw new IllegalArgumentException("type.numericType() must be FLOAT but got " + type.numericType());
}
fieldsData = Float.valueOf(value);
diff --git a/lucene/core/src/java/org/apache/lucene/document/LegacyIntField.java b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyIntField.java
similarity index 80%
rename from lucene/core/src/java/org/apache/lucene/document/LegacyIntField.java
rename to lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyIntField.java
index 6eb0376ee64..e3ae9658b1d 100644
--- a/lucene/core/src/java/org/apache/lucene/document/LegacyIntField.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyIntField.java
@@ -14,11 +14,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.lucene.document;
+package org.apache.lucene.legacy;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.IntPoint;
import org.apache.lucene.index.IndexOptions;
-import org.apache.lucene.util.LegacyNumericUtils;
/**
*
To perform range querying or filtering against a
- * LegacyIntField, use {@link org.apache.lucene.search.LegacyNumericRangeQuery}.
+ * LegacyIntField, use {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}.
* To sort according to a
* LegacyIntField, use the normal numeric sort types, eg
* {@link org.apache.lucene.search.SortField.Type#INT}. LegacyIntField
@@ -79,11 +80,11 @@ import org.apache.lucene.util.LegacyNumericUtils;
* but may result in faster range search performance. The
* default value, 8, was selected for a reasonable tradeoff
* of disk space consumption versus performance. You can
- * create a custom {@link FieldType} and invoke the {@link
- * FieldType#setNumericPrecisionStep} method if you'd
+ * create a custom {@link LegacyFieldType} and invoke the {@link
+ * LegacyFieldType#setNumericPrecisionStep} method if you'd
* like to change the value. Note that you must also
* specify a congruent value when creating {@link
- * org.apache.lucene.search.LegacyNumericRangeQuery}.
+ * org.apache.lucene.legacy.LegacyNumericRangeQuery}.
* For low cardinality fields larger precision steps are good.
* If the cardinality is < 100, it is fair
* to use {@link Integer#MAX_VALUE}, which produces one
@@ -91,9 +92,9 @@ import org.apache.lucene.util.LegacyNumericUtils;
*
*
For more information on the internals of numeric trie
* indexing, including the precisionStep
- * configuration, see {@link org.apache.lucene.search.LegacyNumericRangeQuery}. The format of
- * indexed values is described in {@link org.apache.lucene.util.LegacyNumericUtils}.
+ * href="LegacyNumericRangeQuery.html#precisionStepDesc">precisionStep
+ * configuration, see {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}. The format of
+ * indexed values is described in {@link org.apache.lucene.legacy.LegacyNumericUtils}.
*
*
If you only need to sort by numeric value, and never
* run range querying/filtering, you can index using a
@@ -101,7 +102,7 @@ import org.apache.lucene.util.LegacyNumericUtils;
* This will minimize disk space consumed.
*
*
More advanced users can instead use {@link
- * org.apache.lucene.analysis.LegacyNumericTokenStream} directly, when indexing numbers. This
+ * org.apache.lucene.legacy.LegacyNumericTokenStream} directly, when indexing numbers. This
* class is a wrapper around this token stream type for
* easier, more intuitive usage.
*
@@ -111,18 +112,18 @@ import org.apache.lucene.util.LegacyNumericUtils;
*/
@Deprecated
-public final class LegacyIntField extends Field {
+public final class LegacyIntField extends LegacyField {
/**
* Type for an LegacyIntField that is not stored:
* normalization factors, frequencies, and positions are omitted.
*/
- public static final FieldType TYPE_NOT_STORED = new FieldType();
+ public static final LegacyFieldType TYPE_NOT_STORED = new LegacyFieldType();
static {
TYPE_NOT_STORED.setTokenized(true);
TYPE_NOT_STORED.setOmitNorms(true);
TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS);
- TYPE_NOT_STORED.setNumericType(FieldType.LegacyNumericType.INT);
+ TYPE_NOT_STORED.setNumericType(LegacyNumericType.INT);
TYPE_NOT_STORED.setNumericPrecisionStep(LegacyNumericUtils.PRECISION_STEP_DEFAULT_32);
TYPE_NOT_STORED.freeze();
}
@@ -131,12 +132,12 @@ public final class LegacyIntField extends Field {
* Type for a stored LegacyIntField:
* normalization factors, frequencies, and positions are omitted.
*/
- public static final FieldType TYPE_STORED = new FieldType();
+ public static final LegacyFieldType TYPE_STORED = new LegacyFieldType();
static {
TYPE_STORED.setTokenized(true);
TYPE_STORED.setOmitNorms(true);
TYPE_STORED.setIndexOptions(IndexOptions.DOCS);
- TYPE_STORED.setNumericType(FieldType.LegacyNumericType.INT);
+ TYPE_STORED.setNumericType(LegacyNumericType.INT);
TYPE_STORED.setNumericPrecisionStep(LegacyNumericUtils.PRECISION_STEP_DEFAULT_32);
TYPE_STORED.setStored(true);
TYPE_STORED.freeze();
@@ -144,7 +145,7 @@ public final class LegacyIntField extends Field {
/** Creates a stored or un-stored LegacyIntField with the provided value
* and default precisionStep {@link
- * org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT_32} (8).
+ * org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT_32} (8).
* @param name field name
* @param value 32-bit integer value
* @param stored Store.YES if the content should also be stored
@@ -156,17 +157,17 @@ public final class LegacyIntField extends Field {
}
/** Expert: allows you to customize the {@link
- * FieldType}.
+ * LegacyFieldType}.
* @param name field name
* @param value 32-bit integer value
- * @param type customized field type: must have {@link FieldType#numericType()}
- * of {@link org.apache.lucene.document.FieldType.LegacyNumericType#INT}.
+ * @param type customized field type: must have {@link LegacyFieldType#numericType()}
+ * of {@link LegacyNumericType#INT}.
* @throws IllegalArgumentException if the field name or type is null, or
* if the field type does not have a INT numericType()
*/
- public LegacyIntField(String name, int value, FieldType type) {
+ public LegacyIntField(String name, int value, LegacyFieldType type) {
super(name, type);
- if (type.numericType() != FieldType.LegacyNumericType.INT) {
+ if (type.numericType() != LegacyNumericType.INT) {
throw new IllegalArgumentException("type.numericType() must be INT but got " + type.numericType());
}
fieldsData = Integer.valueOf(value);
diff --git a/lucene/core/src/java/org/apache/lucene/document/LegacyLongField.java b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyLongField.java
similarity index 81%
rename from lucene/core/src/java/org/apache/lucene/document/LegacyLongField.java
rename to lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyLongField.java
index fa1851fe7e6..3e20b448b96 100644
--- a/lucene/core/src/java/org/apache/lucene/document/LegacyLongField.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyLongField.java
@@ -14,9 +14,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.lucene.document;
+package org.apache.lucene.legacy;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.LongPoint;
import org.apache.lucene.index.IndexOptions;
@@ -59,7 +61,7 @@ import org.apache.lucene.index.IndexOptions;
* long value.
*
*
To perform range querying or filtering against a
- * LegacyLongField, use {@link org.apache.lucene.search.LegacyNumericRangeQuery}.
+ * LegacyLongField, use {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}.
* To sort according to a
* LegacyLongField, use the normal numeric sort types, eg
* {@link org.apache.lucene.search.SortField.Type#LONG}. LegacyLongField
@@ -89,11 +91,11 @@ import org.apache.lucene.index.IndexOptions;
* but may result in faster range search performance. The
* default value, 16, was selected for a reasonable tradeoff
* of disk space consumption versus performance. You can
- * create a custom {@link FieldType} and invoke the {@link
- * FieldType#setNumericPrecisionStep} method if you'd
+ * create a custom {@link LegacyFieldType} and invoke the {@link
+ * LegacyFieldType#setNumericPrecisionStep} method if you'd
* like to change the value. Note that you must also
* specify a congruent value when creating {@link
- * org.apache.lucene.search.LegacyNumericRangeQuery}.
+ * org.apache.lucene.legacy.LegacyNumericRangeQuery}.
* For low cardinality fields larger precision steps are good.
* If the cardinality is < 100, it is fair
* to use {@link Integer#MAX_VALUE}, which produces one
@@ -101,9 +103,9 @@ import org.apache.lucene.index.IndexOptions;
*
*
For more information on the internals of numeric trie
* indexing, including the precisionStep
- * configuration, see {@link org.apache.lucene.search.LegacyNumericRangeQuery}. The format of
- * indexed values is described in {@link org.apache.lucene.util.LegacyNumericUtils}.
+ * href="LegacyNumericRangeQuery.html#precisionStepDesc">precisionStep
+ * configuration, see {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}. The format of
+ * indexed values is described in {@link org.apache.lucene.legacy.LegacyNumericUtils}.
*
*
If you only need to sort by numeric value, and never
* run range querying/filtering, you can index using a
@@ -111,7 +113,7 @@ import org.apache.lucene.index.IndexOptions;
* This will minimize disk space consumed.
*
*
More advanced users can instead use {@link
- * org.apache.lucene.analysis.LegacyNumericTokenStream} directly, when indexing numbers. This
+ * org.apache.lucene.legacy.LegacyNumericTokenStream} directly, when indexing numbers. This
* class is a wrapper around this token stream type for
* easier, more intuitive usage.
*
@@ -121,18 +123,18 @@ import org.apache.lucene.index.IndexOptions;
*/
@Deprecated
-public final class LegacyLongField extends Field {
+public final class LegacyLongField extends LegacyField {
/**
* Type for a LegacyLongField that is not stored:
* normalization factors, frequencies, and positions are omitted.
*/
- public static final FieldType TYPE_NOT_STORED = new FieldType();
+ public static final LegacyFieldType TYPE_NOT_STORED = new LegacyFieldType();
static {
TYPE_NOT_STORED.setTokenized(true);
TYPE_NOT_STORED.setOmitNorms(true);
TYPE_NOT_STORED.setIndexOptions(IndexOptions.DOCS);
- TYPE_NOT_STORED.setNumericType(FieldType.LegacyNumericType.LONG);
+ TYPE_NOT_STORED.setNumericType(LegacyNumericType.LONG);
TYPE_NOT_STORED.freeze();
}
@@ -140,19 +142,19 @@ public final class LegacyLongField extends Field {
* Type for a stored LegacyLongField:
* normalization factors, frequencies, and positions are omitted.
*/
- public static final FieldType TYPE_STORED = new FieldType();
+ public static final LegacyFieldType TYPE_STORED = new LegacyFieldType();
static {
TYPE_STORED.setTokenized(true);
TYPE_STORED.setOmitNorms(true);
TYPE_STORED.setIndexOptions(IndexOptions.DOCS);
- TYPE_STORED.setNumericType(FieldType.LegacyNumericType.LONG);
+ TYPE_STORED.setNumericType(LegacyNumericType.LONG);
TYPE_STORED.setStored(true);
TYPE_STORED.freeze();
}
/** Creates a stored or un-stored LegacyLongField with the provided value
* and default precisionStep {@link
- * org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16).
+ * org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16).
* @param name field name
* @param value 64-bit long value
* @param stored Store.YES if the content should also be stored
@@ -164,17 +166,17 @@ public final class LegacyLongField extends Field {
}
/** Expert: allows you to customize the {@link
- * FieldType}.
+ * LegacyFieldType}.
* @param name field name
* @param value 64-bit long value
- * @param type customized field type: must have {@link FieldType#numericType()}
- * of {@link org.apache.lucene.document.FieldType.LegacyNumericType#LONG}.
+ * @param type customized field type: must have {@link LegacyFieldType#numericType()}
+ * of {@link LegacyNumericType#LONG}.
* @throws IllegalArgumentException if the field name or type is null, or
* if the field type does not have a LONG numericType()
*/
- public LegacyLongField(String name, long value, FieldType type) {
+ public LegacyLongField(String name, long value, LegacyFieldType type) {
super(name, type);
- if (type.numericType() != FieldType.LegacyNumericType.LONG) {
+ if (type.numericType() != LegacyNumericType.LONG) {
throw new IllegalArgumentException("type.numericType() must be LONG but got " + type.numericType());
}
fieldsData = Long.valueOf(value);
diff --git a/lucene/core/src/java/org/apache/lucene/search/LegacyNumericRangeQuery.java b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyNumericRangeQuery.java
similarity index 89%
rename from lucene/core/src/java/org/apache/lucene/search/LegacyNumericRangeQuery.java
rename to lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyNumericRangeQuery.java
index fe6c9e24864..f172a200779 100644
--- a/lucene/core/src/java/org/apache/lucene/search/LegacyNumericRangeQuery.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyNumericRangeQuery.java
@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.lucene.search;
+package org.apache.lucene.legacy;
import java.io.IOException;
@@ -22,8 +22,6 @@ import java.util.LinkedList;
import java.util.Objects;
import org.apache.lucene.document.DoublePoint;
-import org.apache.lucene.document.FieldType;
-import org.apache.lucene.document.FieldType.LegacyNumericType;
import org.apache.lucene.document.FloatPoint;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.LongPoint;
@@ -31,18 +29,21 @@ import org.apache.lucene.index.FilteredTermsEnum;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.MultiTermQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.LegacyNumericUtils;
import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.index.Term; // for javadocs
/**
*
A {@link Query} that matches numeric values within a
* specified range. To use this, you must first index the
- * numeric values using {@link org.apache.lucene.document.LegacyIntField}, {@link
- * org.apache.lucene.document.LegacyFloatField}, {@link org.apache.lucene.document.LegacyLongField} or {@link org.apache.lucene.document.LegacyDoubleField} (expert: {@link
- * org.apache.lucene.analysis.LegacyNumericTokenStream}). If your terms are instead textual,
+ * numeric values using {@link org.apache.lucene.legacy.LegacyIntField}, {@link
+ * org.apache.lucene.legacy.LegacyFloatField}, {@link org.apache.lucene.legacy.LegacyLongField} or {@link org.apache.lucene.legacy.LegacyDoubleField} (expert: {@link
+ * org.apache.lucene.legacy.LegacyNumericTokenStream}). If your terms are instead textual,
* you should use {@link TermRangeQuery}.
*
*
You create a new LegacyNumericRangeQuery with the static
@@ -96,7 +97,7 @@ import org.apache.lucene.index.Term; // for javadocs
* (all numerical values like doubles, longs, floats, and ints are converted to
* lexicographic sortable string representations and stored with different precisions
* (for a more detailed description of how the values are stored,
- * see {@link org.apache.lucene.util.LegacyNumericUtils}). A range is then divided recursively into multiple intervals for searching:
+ * see {@link org.apache.lucene.legacy.LegacyNumericUtils}). A range is then divided recursively into multiple intervals for searching:
* The center of the range is searched only with the lowest possible precision in the trie,
* while the boundaries are matched more exactly. This reduces the number of terms dramatically.
You can choose any precisionStep when encoding values.
* Lower step values mean more precisions and so more terms in index (and index gets larger). The number
- * of indexed terms per value is (those are generated by {@link org.apache.lucene.analysis.LegacyNumericTokenStream}):
+ * of indexed terms per value is (those are generated by {@link org.apache.lucene.legacy.LegacyNumericTokenStream}):
*
@@ -148,8 +149,8 @@ import org.apache.lucene.index.Term; // for javadocs
*
Steps ≥64 for long/double and ≥32 for int/float produces one token
* per value in the index and querying is as slow as a conventional {@link TermRangeQuery}. But it can be used
* to produce fields, that are solely used for sorting (in this case simply use {@link Integer#MAX_VALUE} as
- * precisionStep). Using {@link org.apache.lucene.document.LegacyIntField},
- * {@link org.apache.lucene.document.LegacyLongField}, {@link org.apache.lucene.document.LegacyFloatField} or {@link org.apache.lucene.document.LegacyDoubleField} for sorting
+ * precisionStep). Using {@link org.apache.lucene.legacy.LegacyIntField},
+ * {@link org.apache.lucene.legacy.LegacyLongField}, {@link org.apache.lucene.legacy.LegacyFloatField} or {@link org.apache.lucene.legacy.LegacyDoubleField} for sorting
* is ideal, because building the field cache is much faster than with text-only numbers.
* These fields have one term per value and therefore also work with term enumeration for building distinct lists
* (e.g. facets / preselected values to search for).
@@ -199,12 +200,12 @@ public final class LegacyNumericRangeQuery extends MultiTermQu
public static LegacyNumericRangeQuery newLongRange(final String field, final int precisionStep,
Long min, Long max, final boolean minInclusive, final boolean maxInclusive
) {
- return new LegacyNumericRangeQuery<>(field, precisionStep, FieldType.LegacyNumericType.LONG, min, max, minInclusive, maxInclusive);
+ return new LegacyNumericRangeQuery<>(field, precisionStep, LegacyNumericType.LONG, min, max, minInclusive, maxInclusive);
}
/**
* Factory that creates a LegacyNumericRangeQuery, that queries a long
- * range using the default precisionStep {@link org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16).
+ * range using the default precisionStep {@link org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16).
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
* by setting the min or max value to null. By setting inclusive to false, it will
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
@@ -212,7 +213,7 @@ public final class LegacyNumericRangeQuery extends MultiTermQu
public static LegacyNumericRangeQuery newLongRange(final String field,
Long min, Long max, final boolean minInclusive, final boolean maxInclusive
) {
- return new LegacyNumericRangeQuery<>(field, LegacyNumericUtils.PRECISION_STEP_DEFAULT, FieldType.LegacyNumericType.LONG, min, max, minInclusive, maxInclusive);
+ return new LegacyNumericRangeQuery<>(field, LegacyNumericUtils.PRECISION_STEP_DEFAULT, LegacyNumericType.LONG, min, max, minInclusive, maxInclusive);
}
/**
@@ -225,12 +226,12 @@ public final class LegacyNumericRangeQuery extends MultiTermQu
public static LegacyNumericRangeQuery newIntRange(final String field, final int precisionStep,
Integer min, Integer max, final boolean minInclusive, final boolean maxInclusive
) {
- return new LegacyNumericRangeQuery<>(field, precisionStep, FieldType.LegacyNumericType.INT, min, max, minInclusive, maxInclusive);
+ return new LegacyNumericRangeQuery<>(field, precisionStep, LegacyNumericType.INT, min, max, minInclusive, maxInclusive);
}
/**
* Factory that creates a LegacyNumericRangeQuery, that queries a int
- * range using the default precisionStep {@link org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT_32} (8).
+ * range using the default precisionStep {@link org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT_32} (8).
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
* by setting the min or max value to null. By setting inclusive to false, it will
* match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
@@ -238,7 +239,7 @@ public final class LegacyNumericRangeQuery extends MultiTermQu
public static LegacyNumericRangeQuery newIntRange(final String field,
Integer min, Integer max, final boolean minInclusive, final boolean maxInclusive
) {
- return new LegacyNumericRangeQuery<>(field, LegacyNumericUtils.PRECISION_STEP_DEFAULT_32, FieldType.LegacyNumericType.INT, min, max, minInclusive, maxInclusive);
+ return new LegacyNumericRangeQuery<>(field, LegacyNumericUtils.PRECISION_STEP_DEFAULT_32, LegacyNumericType.INT, min, max, minInclusive, maxInclusive);
}
/**
@@ -253,12 +254,12 @@ public final class LegacyNumericRangeQuery extends MultiTermQu
public static LegacyNumericRangeQuery newDoubleRange(final String field, final int precisionStep,
Double min, Double max, final boolean minInclusive, final boolean maxInclusive
) {
- return new LegacyNumericRangeQuery<>(field, precisionStep, FieldType.LegacyNumericType.DOUBLE, min, max, minInclusive, maxInclusive);
+ return new LegacyNumericRangeQuery<>(field, precisionStep, LegacyNumericType.DOUBLE, min, max, minInclusive, maxInclusive);
}
/**
* Factory that creates a LegacyNumericRangeQuery, that queries a double
- * range using the default precisionStep {@link org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16).
+ * range using the default precisionStep {@link org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16).
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
* by setting the min or max value to null.
* {@link Double#NaN} will never match a half-open range, to hit {@code NaN} use a query
@@ -268,7 +269,7 @@ public final class LegacyNumericRangeQuery extends MultiTermQu
public static LegacyNumericRangeQuery newDoubleRange(final String field,
Double min, Double max, final boolean minInclusive, final boolean maxInclusive
) {
- return new LegacyNumericRangeQuery<>(field, LegacyNumericUtils.PRECISION_STEP_DEFAULT, FieldType.LegacyNumericType.DOUBLE, min, max, minInclusive, maxInclusive);
+ return new LegacyNumericRangeQuery<>(field, LegacyNumericUtils.PRECISION_STEP_DEFAULT, LegacyNumericType.DOUBLE, min, max, minInclusive, maxInclusive);
}
/**
@@ -283,12 +284,12 @@ public final class LegacyNumericRangeQuery extends MultiTermQu
public static LegacyNumericRangeQuery newFloatRange(final String field, final int precisionStep,
Float min, Float max, final boolean minInclusive, final boolean maxInclusive
) {
- return new LegacyNumericRangeQuery<>(field, precisionStep, FieldType.LegacyNumericType.FLOAT, min, max, minInclusive, maxInclusive);
+ return new LegacyNumericRangeQuery<>(field, precisionStep, LegacyNumericType.FLOAT, min, max, minInclusive, maxInclusive);
}
/**
* Factory that creates a LegacyNumericRangeQuery, that queries a float
- * range using the default precisionStep {@link org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT_32} (8).
+ * range using the default precisionStep {@link org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT_32} (8).
* You can have half-open ranges (which are in fact </≤ or >/≥ queries)
* by setting the min or max value to null.
* {@link Float#NaN} will never match a half-open range, to hit {@code NaN} use a query
@@ -298,7 +299,7 @@ public final class LegacyNumericRangeQuery extends MultiTermQu
public static LegacyNumericRangeQuery newFloatRange(final String field,
Float min, Float max, final boolean minInclusive, final boolean maxInclusive
) {
- return new LegacyNumericRangeQuery<>(field, LegacyNumericUtils.PRECISION_STEP_DEFAULT_32, FieldType.LegacyNumericType.FLOAT, min, max, minInclusive, maxInclusive);
+ return new LegacyNumericRangeQuery<>(field, LegacyNumericUtils.PRECISION_STEP_DEFAULT_32, LegacyNumericType.FLOAT, min, max, minInclusive, maxInclusive);
}
@Override @SuppressWarnings("unchecked")
@@ -369,7 +370,7 @@ public final class LegacyNumericRangeQuery extends MultiTermQu
// members (package private, to be also fast accessible by NumericRangeTermEnum)
final int precisionStep;
- final FieldType.LegacyNumericType dataType;
+ final LegacyNumericType dataType;
final T min, max;
final boolean minInclusive,maxInclusive;
@@ -389,8 +390,8 @@ public final class LegacyNumericRangeQuery extends MultiTermQu
*
* WARNING: This term enumeration is not guaranteed to be always ordered by
* {@link Term#compareTo}.
- * The ordering depends on how {@link org.apache.lucene.util.LegacyNumericUtils#splitLongRange} and
- * {@link org.apache.lucene.util.LegacyNumericUtils#splitIntRange} generates the sub-ranges. For
+ * The ordering depends on how {@link org.apache.lucene.legacy.LegacyNumericUtils#splitLongRange} and
+ * {@link org.apache.lucene.legacy.LegacyNumericUtils#splitIntRange} generates the sub-ranges. For
* {@link MultiTermQuery} ordering is not relevant.
*/
private final class NumericRangeTermsEnum extends FilteredTermsEnum {
@@ -406,10 +407,10 @@ public final class LegacyNumericRangeQuery extends MultiTermQu
case DOUBLE: {
// lower
long minBound;
- if (dataType == FieldType.LegacyNumericType.LONG) {
+ if (dataType == LegacyNumericType.LONG) {
minBound = (min == null) ? Long.MIN_VALUE : min.longValue();
} else {
- assert dataType == FieldType.LegacyNumericType.DOUBLE;
+ assert dataType == LegacyNumericType.DOUBLE;
minBound = (min == null) ? LONG_NEGATIVE_INFINITY
: NumericUtils.doubleToSortableLong(min.doubleValue());
}
@@ -420,10 +421,10 @@ public final class LegacyNumericRangeQuery extends MultiTermQu
// upper
long maxBound;
- if (dataType == FieldType.LegacyNumericType.LONG) {
+ if (dataType == LegacyNumericType.LONG) {
maxBound = (max == null) ? Long.MAX_VALUE : max.longValue();
} else {
- assert dataType == FieldType.LegacyNumericType.DOUBLE;
+ assert dataType == LegacyNumericType.DOUBLE;
maxBound = (max == null) ? LONG_POSITIVE_INFINITY
: NumericUtils.doubleToSortableLong(max.doubleValue());
}
@@ -446,10 +447,10 @@ public final class LegacyNumericRangeQuery extends MultiTermQu
case FLOAT: {
// lower
int minBound;
- if (dataType == FieldType.LegacyNumericType.INT) {
+ if (dataType == LegacyNumericType.INT) {
minBound = (min == null) ? Integer.MIN_VALUE : min.intValue();
} else {
- assert dataType == FieldType.LegacyNumericType.FLOAT;
+ assert dataType == LegacyNumericType.FLOAT;
minBound = (min == null) ? INT_NEGATIVE_INFINITY
: NumericUtils.floatToSortableInt(min.floatValue());
}
@@ -463,7 +464,7 @@ public final class LegacyNumericRangeQuery extends MultiTermQu
if (dataType == LegacyNumericType.INT) {
maxBound = (max == null) ? Integer.MAX_VALUE : max.intValue();
} else {
- assert dataType == FieldType.LegacyNumericType.FLOAT;
+ assert dataType == LegacyNumericType.FLOAT;
maxBound = (max == null) ? INT_POSITIVE_INFINITY
: NumericUtils.floatToSortableInt(max.floatValue());
}
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/LegacyNumericTokenStream.java b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyNumericTokenStream.java
similarity index 94%
rename from lucene/core/src/java/org/apache/lucene/analysis/LegacyNumericTokenStream.java
rename to lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyNumericTokenStream.java
index 19f7d37e31f..a2aba19e2ac 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/LegacyNumericTokenStream.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyNumericTokenStream.java
@@ -14,11 +14,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.lucene.analysis;
+package org.apache.lucene.legacy;
import java.util.Objects;
+import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
@@ -29,16 +30,15 @@ import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.AttributeReflector;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
-import org.apache.lucene.util.LegacyNumericUtils;
import org.apache.lucene.util.NumericUtils;
/**
* Expert: This class provides a {@link TokenStream}
* for indexing numeric values that can be used by {@link
- * org.apache.lucene.search.LegacyNumericRangeQuery}.
+ * org.apache.lucene.legacy.LegacyNumericRangeQuery}.
*
- *
Note that for simple usage, {@link org.apache.lucene.document.LegacyIntField}, {@link
- * org.apache.lucene.document.LegacyLongField}, {@link org.apache.lucene.document.LegacyFloatField} or {@link org.apache.lucene.document.LegacyDoubleField} is
+ *
Note that for simple usage, {@link org.apache.lucene.legacy.LegacyIntField}, {@link
+ * org.apache.lucene.legacy.LegacyLongField}, {@link org.apache.lucene.legacy.LegacyFloatField} or {@link org.apache.lucene.legacy.LegacyDoubleField} is
* recommended. These fields disable norms and
* term freqs, as they are not usually needed during
* searching. If you need to change these settings, you
@@ -81,9 +81,9 @@ import org.apache.lucene.util.NumericUtils;
* than one numeric field, use a separate LegacyNumericTokenStream
* instance for each.
*
- *
See {@link org.apache.lucene.search.LegacyNumericRangeQuery} for more details on the
+ *
See {@link org.apache.lucene.legacy.LegacyNumericRangeQuery} for more details on the
* precisionStep
+ * href="LegacyNumericRangeQuery.html#precisionStepDesc">precisionStep
* parameter as well as how numeric fields work under the hood.
*
* @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
@@ -140,7 +140,7 @@ public final class LegacyNumericTokenStream extends TokenStream {
}
}
- /** Implementation of {@link org.apache.lucene.analysis.LegacyNumericTokenStream.LegacyNumericTermAttribute}.
+ /** Implementation of {@link org.apache.lucene.legacy.LegacyNumericTokenStream.LegacyNumericTermAttribute}.
* @lucene.internal
* @since 4.0
*/
@@ -240,7 +240,7 @@ public final class LegacyNumericTokenStream extends TokenStream {
/**
* Creates a token stream for numeric values using the default precisionStep
- * {@link org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16). The stream is not yet initialized,
+ * {@link org.apache.lucene.legacy.LegacyNumericUtils#PRECISION_STEP_DEFAULT} (16). The stream is not yet initialized,
* before using set a value using the various set???Value() methods.
*/
public LegacyNumericTokenStream() {
diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyNumericType.java b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyNumericType.java
new file mode 100644
index 00000000000..345b4974b02
--- /dev/null
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyNumericType.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.legacy;
+
+/** Data type of the numeric value
+ * @since 3.2
+ *
+ * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
+ */
+@Deprecated
+public enum LegacyNumericType {
+ /** 32-bit integer numeric type */
+ INT,
+ /** 64-bit long numeric type */
+ LONG,
+ /** 32-bit float numeric type */
+ FLOAT,
+ /** 64-bit double numeric type */
+ DOUBLE
+}
diff --git a/lucene/core/src/java/org/apache/lucene/util/LegacyNumericUtils.java b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyNumericUtils.java
similarity index 95%
rename from lucene/core/src/java/org/apache/lucene/util/LegacyNumericUtils.java
rename to lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyNumericUtils.java
index 9a26bfa3f2b..e6659d7e102 100644
--- a/lucene/core/src/java/org/apache/lucene/util/LegacyNumericUtils.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/LegacyNumericUtils.java
@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.lucene.util;
+package org.apache.lucene.legacy;
import java.io.IOException;
@@ -23,6 +23,8 @@ import org.apache.lucene.index.FilterLeafReader;
import org.apache.lucene.index.FilteredTermsEnum;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.BytesRefBuilder;
/**
* This is a helper class to generate prefix-encoded representations for numerical values
@@ -41,9 +43,9 @@ import org.apache.lucene.index.TermsEnum;
* during encoding.
*
*
For easy usage, the trie algorithm is implemented for indexing inside
- * {@link org.apache.lucene.analysis.LegacyNumericTokenStream} that can index int, long,
+ * {@link org.apache.lucene.legacy.LegacyNumericTokenStream} that can index int, long,
* float, and double. For querying,
- * {@link org.apache.lucene.search.LegacyNumericRangeQuery} implements the query part
+ * {@link org.apache.lucene.legacy.LegacyNumericRangeQuery} implements the query part
* for the same data types.
*
* @lucene.internal
@@ -59,15 +61,15 @@ public final class LegacyNumericUtils {
private LegacyNumericUtils() {} // no instance!
/**
- * The default precision step used by {@link org.apache.lucene.document.LegacyLongField},
- * {@link org.apache.lucene.document.LegacyDoubleField}, {@link org.apache.lucene.analysis.LegacyNumericTokenStream}, {@link
- * org.apache.lucene.search.LegacyNumericRangeQuery}.
+ * The default precision step used by {@link org.apache.lucene.legacy.LegacyLongField},
+ * {@link org.apache.lucene.legacy.LegacyDoubleField}, {@link org.apache.lucene.legacy.LegacyNumericTokenStream}, {@link
+ * org.apache.lucene.legacy.LegacyNumericRangeQuery}.
*/
public static final int PRECISION_STEP_DEFAULT = 16;
/**
- * The default precision step used by {@link org.apache.lucene.document.LegacyIntField} and
- * {@link org.apache.lucene.document.LegacyFloatField}.
+ * The default precision step used by {@link org.apache.lucene.legacy.LegacyIntField} and
+ * {@link org.apache.lucene.legacy.LegacyFloatField}.
*/
public static final int PRECISION_STEP_DEFAULT_32 = 8;
@@ -99,7 +101,7 @@ public final class LegacyNumericUtils {
/**
* Returns prefix coded bits after reducing the precision by shift bits.
- * This is method is used by {@link org.apache.lucene.analysis.LegacyNumericTokenStream}.
+ * This is method is used by {@link org.apache.lucene.legacy.LegacyNumericTokenStream}.
* After encoding, {@code bytes.offset} will always be 0.
* @param val the numeric value
* @param shift how many bits to strip from the right
@@ -126,7 +128,7 @@ public final class LegacyNumericUtils {
/**
* Returns prefix coded bits after reducing the precision by shift bits.
- * This is method is used by {@link org.apache.lucene.analysis.LegacyNumericTokenStream}.
+ * This is method is used by {@link org.apache.lucene.legacy.LegacyNumericTokenStream}.
* After encoding, {@code bytes.offset} will always be 0.
* @param val the numeric value
* @param shift how many bits to strip from the right
@@ -230,7 +232,7 @@ public final class LegacyNumericUtils {
* {@link org.apache.lucene.search.BooleanQuery} for each call to its
* {@link LongRangeBuilder#addRange(BytesRef,BytesRef)}
* method.
- *
This method is used by {@link org.apache.lucene.search.LegacyNumericRangeQuery}.
+ *
This method is used by {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}.
*/
public static void splitLongRange(final LongRangeBuilder builder,
final int precisionStep, final long minBound, final long maxBound
@@ -244,7 +246,7 @@ public final class LegacyNumericUtils {
* {@link org.apache.lucene.search.BooleanQuery} for each call to its
* {@link IntRangeBuilder#addRange(BytesRef,BytesRef)}
* method.
- *
This method is used by {@link org.apache.lucene.search.LegacyNumericRangeQuery}.
+ *
This method is used by {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}.
*/
public static void splitIntRange(final IntRangeBuilder builder,
final int precisionStep, final int minBound, final int maxBound
diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/legacy/package-info.java b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/package-info.java
new file mode 100644
index 00000000000..d0167f80023
--- /dev/null
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/legacy/package-info.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Deprecated stuff!
+ */
+package org.apache.lucene.legacy;
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
index 8226022e6d8..03480d779ed 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
@@ -47,8 +47,6 @@ import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.FloatDocValuesField;
import org.apache.lucene.document.FloatPoint;
import org.apache.lucene.document.IntPoint;
-import org.apache.lucene.document.LegacyIntField;
-import org.apache.lucene.document.LegacyLongField;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedDocValuesField;
@@ -57,9 +55,12 @@ import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
+import org.apache.lucene.legacy.LegacyIntField;
+import org.apache.lucene.legacy.LegacyLongField;
+import org.apache.lucene.legacy.LegacyNumericRangeQuery;
+import org.apache.lucene.legacy.LegacyNumericUtils;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.BaseDirectoryWrapper;
@@ -72,7 +73,6 @@ import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.InfoStream;
-import org.apache.lucene.util.LegacyNumericUtils;
import org.apache.lucene.util.LineFileDocs;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestLegacyField.java b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestLegacyField.java
new file mode 100644
index 00000000000..65ff0969d9d
--- /dev/null
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestLegacyField.java
@@ -0,0 +1,196 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.legacy;
+
+import java.io.StringReader;
+
+import org.apache.lucene.analysis.CannedTokenStream;
+import org.apache.lucene.analysis.Token;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestLegacyField extends LuceneTestCase {
+
+ public void testLegacyDoubleField() throws Exception {
+ Field fields[] = new Field[] {
+ new LegacyDoubleField("foo", 5d, Field.Store.NO),
+ new LegacyDoubleField("foo", 5d, Field.Store.YES)
+ };
+
+ for (Field field : fields) {
+ trySetBoost(field);
+ trySetByteValue(field);
+ trySetBytesValue(field);
+ trySetBytesRefValue(field);
+ field.setDoubleValue(6d); // ok
+ trySetIntValue(field);
+ trySetFloatValue(field);
+ trySetLongValue(field);
+ trySetReaderValue(field);
+ trySetShortValue(field);
+ trySetStringValue(field);
+ trySetTokenStreamValue(field);
+
+ assertEquals(6d, field.numericValue().doubleValue(), 0.0d);
+ }
+ }
+
+ public void testLegacyFloatField() throws Exception {
+ Field fields[] = new Field[] {
+ new LegacyFloatField("foo", 5f, Field.Store.NO),
+ new LegacyFloatField("foo", 5f, Field.Store.YES)
+ };
+
+ for (Field field : fields) {
+ trySetBoost(field);
+ trySetByteValue(field);
+ trySetBytesValue(field);
+ trySetBytesRefValue(field);
+ trySetDoubleValue(field);
+ trySetIntValue(field);
+ field.setFloatValue(6f); // ok
+ trySetLongValue(field);
+ trySetReaderValue(field);
+ trySetShortValue(field);
+ trySetStringValue(field);
+ trySetTokenStreamValue(field);
+
+ assertEquals(6f, field.numericValue().floatValue(), 0.0f);
+ }
+ }
+
+ public void testLegacyIntField() throws Exception {
+ Field fields[] = new Field[] {
+ new LegacyIntField("foo", 5, Field.Store.NO),
+ new LegacyIntField("foo", 5, Field.Store.YES)
+ };
+
+ for (Field field : fields) {
+ trySetBoost(field);
+ trySetByteValue(field);
+ trySetBytesValue(field);
+ trySetBytesRefValue(field);
+ trySetDoubleValue(field);
+ field.setIntValue(6); // ok
+ trySetFloatValue(field);
+ trySetLongValue(field);
+ trySetReaderValue(field);
+ trySetShortValue(field);
+ trySetStringValue(field);
+ trySetTokenStreamValue(field);
+
+ assertEquals(6, field.numericValue().intValue());
+ }
+ }
+
+ public void testLegacyLongField() throws Exception {
+ Field fields[] = new Field[] {
+ new LegacyLongField("foo", 5L, Field.Store.NO),
+ new LegacyLongField("foo", 5L, Field.Store.YES)
+ };
+
+ for (Field field : fields) {
+ trySetBoost(field);
+ trySetByteValue(field);
+ trySetBytesValue(field);
+ trySetBytesRefValue(field);
+ trySetDoubleValue(field);
+ trySetIntValue(field);
+ trySetFloatValue(field);
+ field.setLongValue(6); // ok
+ trySetReaderValue(field);
+ trySetShortValue(field);
+ trySetStringValue(field);
+ trySetTokenStreamValue(field);
+
+ assertEquals(6L, field.numericValue().longValue());
+ }
+ }
+
+ private void trySetByteValue(Field f) {
+ expectThrows(IllegalArgumentException.class, () -> {
+ f.setByteValue((byte) 10);
+ });
+ }
+
+ private void trySetBytesValue(Field f) {
+ expectThrows(IllegalArgumentException.class, () -> {
+ f.setBytesValue(new byte[] { 5, 5 });
+ });
+ }
+
+ private void trySetBytesRefValue(Field f) {
+ expectThrows(IllegalArgumentException.class, () -> {
+ f.setBytesValue(new BytesRef("bogus"));
+ });
+ }
+
+ private void trySetDoubleValue(Field f) {
+ expectThrows(IllegalArgumentException.class, () -> {
+ f.setDoubleValue(Double.MAX_VALUE);
+ });
+ }
+
+ private void trySetIntValue(Field f) {
+ expectThrows(IllegalArgumentException.class, () -> {
+ f.setIntValue(Integer.MAX_VALUE);
+ });
+ }
+
+ private void trySetLongValue(Field f) {
+ expectThrows(IllegalArgumentException.class, () -> {
+ f.setLongValue(Long.MAX_VALUE);
+ });
+ }
+
+ private void trySetFloatValue(Field f) {
+ expectThrows(IllegalArgumentException.class, () -> {
+ f.setFloatValue(Float.MAX_VALUE);
+ });
+ }
+
+ private void trySetReaderValue(Field f) {
+ expectThrows(IllegalArgumentException.class, () -> {
+ f.setReaderValue(new StringReader("BOO!"));
+ });
+ }
+
+ private void trySetShortValue(Field f) {
+ expectThrows(IllegalArgumentException.class, () -> {
+ f.setShortValue(Short.MAX_VALUE);
+ });
+ }
+
+ private void trySetStringValue(Field f) {
+ expectThrows(IllegalArgumentException.class, () -> {
+ f.setStringValue("BOO!");
+ });
+ }
+
+ private void trySetTokenStreamValue(Field f) {
+ expectThrows(IllegalArgumentException.class, () -> {
+ f.setTokenStream(new CannedTokenStream(new Token("foo", 0, 3)));
+ });
+ }
+
+ private void trySetBoost(Field f) {
+ expectThrows(IllegalArgumentException.class, () -> {
+ f.setBoost(5.0f);
+ });
+ }
+}
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestLegacyFieldReuse.java b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestLegacyFieldReuse.java
new file mode 100644
index 00000000000..9335290247d
--- /dev/null
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestLegacyFieldReuse.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.legacy;
+
+
+import java.io.IOException;
+
+import org.apache.lucene.analysis.BaseTokenStreamTestCase;
+import org.apache.lucene.analysis.CannedTokenStream;
+import org.apache.lucene.analysis.Token;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.legacy.LegacyIntField;
+import org.apache.lucene.legacy.LegacyNumericTokenStream;
+import org.apache.lucene.legacy.LegacyNumericUtils;
+import org.apache.lucene.legacy.LegacyNumericTokenStream.LegacyNumericTermAttribute;
+
+/** test tokenstream reuse by DefaultIndexingChain */
+public class TestLegacyFieldReuse extends BaseTokenStreamTestCase {
+
+ public void testNumericReuse() throws IOException {
+ LegacyIntField legacyIntField = new LegacyIntField("foo", 5, Field.Store.NO);
+
+ // passing null
+ TokenStream ts = legacyIntField.tokenStream(null, null);
+ assertTrue(ts instanceof LegacyNumericTokenStream);
+ assertEquals(LegacyNumericUtils.PRECISION_STEP_DEFAULT_32, ((LegacyNumericTokenStream)ts).getPrecisionStep());
+ assertNumericContents(5, ts);
+
+ // now reuse previous stream
+ legacyIntField = new LegacyIntField("foo", 20, Field.Store.NO);
+ TokenStream ts2 = legacyIntField.tokenStream(null, ts);
+ assertSame(ts, ts2);
+ assertNumericContents(20, ts);
+
+ // pass a bogus stream and ensure it's still ok
+ legacyIntField = new LegacyIntField("foo", 2343, Field.Store.NO);
+ TokenStream bogus = new CannedTokenStream(new Token("bogus", 0, 5));
+ ts = legacyIntField.tokenStream(null, bogus);
+ assertNotSame(bogus, ts);
+ assertNumericContents(2343, ts);
+
+ // pass another bogus stream (numeric, but different precision step!)
+ legacyIntField = new LegacyIntField("foo", 42, Field.Store.NO);
+ assert 3 != LegacyNumericUtils.PRECISION_STEP_DEFAULT;
+ bogus = new LegacyNumericTokenStream(3);
+ ts = legacyIntField.tokenStream(null, bogus);
+ assertNotSame(bogus, ts);
+ assertNumericContents(42, ts);
+ }
+
+ private void assertNumericContents(int value, TokenStream ts) throws IOException {
+ assertTrue(ts instanceof LegacyNumericTokenStream);
+ LegacyNumericTermAttribute numericAtt = ts.getAttribute(LegacyNumericTermAttribute.class);
+ ts.reset();
+ boolean seen = false;
+ while (ts.incrementToken()) {
+ if (numericAtt.getShift() == 0) {
+ assertEquals(value, numericAtt.getRawValue());
+ seen = true;
+ }
+ }
+ ts.end();
+ ts.close();
+ assertTrue(seen);
+ }
+}
diff --git a/lucene/core/src/test/org/apache/lucene/util/TestLegacyNumericUtils.java b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestLegacyNumericUtils.java
similarity index 98%
rename from lucene/core/src/test/org/apache/lucene/util/TestLegacyNumericUtils.java
rename to lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestLegacyNumericUtils.java
index 2fb20d11db7..8607efdc893 100644
--- a/lucene/core/src/test/org/apache/lucene/util/TestLegacyNumericUtils.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestLegacyNumericUtils.java
@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.lucene.util;
+package org.apache.lucene.legacy;
import java.util.Arrays;
@@ -22,6 +22,13 @@ import java.util.Collections;
import java.util.Iterator;
import java.util.Random;
+import org.apache.lucene.legacy.LegacyNumericUtils;
+import org.apache.lucene.util.BytesRefBuilder;
+import org.apache.lucene.util.FixedBitSet;
+import org.apache.lucene.util.LongBitSet;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.NumericUtils;
+
public class TestLegacyNumericUtils extends LuceneTestCase {
public void testLongConversionAndOrdering() throws Exception {
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestLegacyTerms.java b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestLegacyTerms.java
new file mode 100644
index 00000000000..27fae15e916
--- /dev/null
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestLegacyTerms.java
@@ -0,0 +1,164 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.legacy;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.MultiFields;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.legacy.LegacyDoubleField;
+import org.apache.lucene.legacy.LegacyFloatField;
+import org.apache.lucene.legacy.LegacyIntField;
+import org.apache.lucene.legacy.LegacyLongField;
+import org.apache.lucene.legacy.LegacyNumericUtils;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.NumericUtils;
+
+public class TestLegacyTerms extends LuceneTestCase {
+
+ public void testEmptyIntFieldMinMax() throws Exception {
+ assertNull(LegacyNumericUtils.getMinInt(EMPTY_TERMS));
+ assertNull(LegacyNumericUtils.getMaxInt(EMPTY_TERMS));
+ }
+
+ public void testIntFieldMinMax() throws Exception {
+ Directory dir = newDirectory();
+ RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+ int numDocs = atLeast(100);
+ int minValue = Integer.MAX_VALUE;
+ int maxValue = Integer.MIN_VALUE;
+ for(int i=0;iupper) {
- int a=lower; lower=upper; upper=a;
- }
- final BytesRef lowerBytes, upperBytes;
- BytesRefBuilder b = new BytesRefBuilder();
- LegacyNumericUtils.intToPrefixCoded(lower, 0, b);
- lowerBytes = b.toBytesRef();
- LegacyNumericUtils.intToPrefixCoded(upper, 0, b);
- upperBytes = b.toBytesRef();
-
- // test inclusive range
- LegacyNumericRangeQuery tq= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, true);
- TermRangeQuery cq=new TermRangeQuery(field, lowerBytes, upperBytes, true, true);
- TopDocs tTopDocs = searcher.search(tq, 1);
- TopDocs cTopDocs = searcher.search(cq, 1);
- assertEquals("Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", cTopDocs.totalHits, tTopDocs.totalHits );
- totalTermCountT += termCountT = countTerms(tq);
- totalTermCountC += termCountC = countTerms(cq);
- checkTermCounts(precisionStep, termCountT, termCountC);
- // test exclusive range
- tq= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, false, false);
- cq=new TermRangeQuery(field, lowerBytes, upperBytes, false, false);
- tTopDocs = searcher.search(tq, 1);
- cTopDocs = searcher.search(cq, 1);
- assertEquals("Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", cTopDocs.totalHits, tTopDocs.totalHits );
- totalTermCountT += termCountT = countTerms(tq);
- totalTermCountC += termCountC = countTerms(cq);
- checkTermCounts(precisionStep, termCountT, termCountC);
- // test left exclusive range
- tq= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, false, true);
- cq=new TermRangeQuery(field, lowerBytes, upperBytes, false, true);
- tTopDocs = searcher.search(tq, 1);
- cTopDocs = searcher.search(cq, 1);
- assertEquals("Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", cTopDocs.totalHits, tTopDocs.totalHits );
- totalTermCountT += termCountT = countTerms(tq);
- totalTermCountC += termCountC = countTerms(cq);
- checkTermCounts(precisionStep, termCountT, termCountC);
- // test right exclusive range
- tq= LegacyNumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, false);
- cq=new TermRangeQuery(field, lowerBytes, upperBytes, true, false);
- tTopDocs = searcher.search(tq, 1);
- cTopDocs = searcher.search(cq, 1);
- assertEquals("Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", cTopDocs.totalHits, tTopDocs.totalHits );
- totalTermCountT += termCountT = countTerms(tq);
- totalTermCountC += termCountC = countTerms(cq);
- checkTermCounts(precisionStep, termCountT, termCountC);
- }
-
- checkTermCounts(precisionStep, totalTermCountT, totalTermCountC);
- if (VERBOSE && precisionStep != Integer.MAX_VALUE) {
- System.out.println("Average number of terms during random search on '" + field + "':");
- System.out.println(" Numeric query: " + (((double)totalTermCountT)/(num * 4)));
- System.out.println(" Classical query: " + (((double)totalTermCountC)/(num * 4)));
- }
- }
-
- @Test
- public void testEmptyEnums() throws Exception {
- int count=3000;
- int lower=(distance*3/2)+startOffset, upper=lower + count*distance + (distance/3);
- // test empty enum
- assert lower < upper;
- assertTrue(0 < countTerms(LegacyNumericRangeQuery.newIntRange("field4", 4, lower, upper, true, true)));
- assertEquals(0, countTerms(LegacyNumericRangeQuery.newIntRange("field4", 4, upper, lower, true, true)));
- // test empty enum outside of bounds
- lower = distance*noDocs+startOffset;
- upper = 2 * lower;
- assert lower < upper;
- assertEquals(0, countTerms(LegacyNumericRangeQuery.newIntRange("field4", 4, lower, upper, true, true)));
- }
-
- private int countTerms(MultiTermQuery q) throws Exception {
- final Terms terms = MultiFields.getTerms(reader, q.getField());
- if (terms == null)
- return 0;
- final TermsEnum termEnum = q.getTermsEnum(terms);
- assertNotNull(termEnum);
- int count = 0;
- BytesRef cur, last = null;
- while ((cur = termEnum.next()) != null) {
- count++;
- if (last != null) {
- assertTrue(last.compareTo(cur) < 0);
- }
- last = BytesRef.deepCopyOf(cur);
- }
- // LUCENE-3314: the results after next() already returned null are undefined,
- // assertNull(termEnum.next());
- return count;
- }
-
- private void checkTermCounts(int precisionStep, int termCountT, int termCountC) {
- if (precisionStep == Integer.MAX_VALUE) {
- assertEquals("Number of terms should be equal for unlimited precStep", termCountC, termCountT);
- } else {
- assertTrue("Number of terms for NRQ should be <= compared to classical TRQ", termCountT <= termCountC);
- }
- }
-
- @Test
- public void testRandomTrieAndClassicRangeQuery_8bit() throws Exception {
- testRandomTrieAndClassicRangeQuery(8);
- }
-
- @Test
- public void testRandomTrieAndClassicRangeQuery_4bit() throws Exception {
- testRandomTrieAndClassicRangeQuery(4);
- }
-
- @Test
- public void testRandomTrieAndClassicRangeQuery_2bit() throws Exception {
- testRandomTrieAndClassicRangeQuery(2);
- }
-
- @Test
- public void testRandomTrieAndClassicRangeQuery_NoTrie() throws Exception {
- testRandomTrieAndClassicRangeQuery(Integer.MAX_VALUE);
- }
-
private void testRangeSplit(int precisionStep) throws Exception {
String field="ascfield"+precisionStep;
// 10 random tests
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericRangeQuery64.java
similarity index 72%
rename from lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java
rename to lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericRangeQuery64.java
index 7f63fbc0136..b3ce55aa66d 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericRangeQuery64.java
@@ -14,28 +14,26 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.lucene.search;
+package org.apache.lucene.legacy;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
-import org.apache.lucene.document.FieldType;
-import org.apache.lucene.document.LegacyDoubleField;
-import org.apache.lucene.document.LegacyLongField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MultiTermQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.QueryUtils;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.BytesRefBuilder;
-import org.apache.lucene.util.LegacyNumericUtils;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.NumericUtils;
-import org.apache.lucene.util.TestLegacyNumericUtils;
import org.apache.lucene.util.TestUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -63,37 +61,37 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
.setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000))
.setMergePolicy(newLogMergePolicy()));
- final FieldType storedLong = new FieldType(LegacyLongField.TYPE_NOT_STORED);
+ final LegacyFieldType storedLong = new LegacyFieldType(LegacyLongField.TYPE_NOT_STORED);
storedLong.setStored(true);
storedLong.freeze();
- final FieldType storedLong8 = new FieldType(storedLong);
+ final LegacyFieldType storedLong8 = new LegacyFieldType(storedLong);
storedLong8.setNumericPrecisionStep(8);
- final FieldType storedLong4 = new FieldType(storedLong);
+ final LegacyFieldType storedLong4 = new LegacyFieldType(storedLong);
storedLong4.setNumericPrecisionStep(4);
- final FieldType storedLong6 = new FieldType(storedLong);
+ final LegacyFieldType storedLong6 = new LegacyFieldType(storedLong);
storedLong6.setNumericPrecisionStep(6);
- final FieldType storedLong2 = new FieldType(storedLong);
+ final LegacyFieldType storedLong2 = new LegacyFieldType(storedLong);
storedLong2.setNumericPrecisionStep(2);
- final FieldType storedLongNone = new FieldType(storedLong);
+ final LegacyFieldType storedLongNone = new LegacyFieldType(storedLong);
storedLongNone.setNumericPrecisionStep(Integer.MAX_VALUE);
- final FieldType unstoredLong = LegacyLongField.TYPE_NOT_STORED;
+ final LegacyFieldType unstoredLong = LegacyLongField.TYPE_NOT_STORED;
- final FieldType unstoredLong8 = new FieldType(unstoredLong);
+ final LegacyFieldType unstoredLong8 = new LegacyFieldType(unstoredLong);
unstoredLong8.setNumericPrecisionStep(8);
- final FieldType unstoredLong6 = new FieldType(unstoredLong);
+ final LegacyFieldType unstoredLong6 = new LegacyFieldType(unstoredLong);
unstoredLong6.setNumericPrecisionStep(6);
- final FieldType unstoredLong4 = new FieldType(unstoredLong);
+ final LegacyFieldType unstoredLong4 = new LegacyFieldType(unstoredLong);
unstoredLong4.setNumericPrecisionStep(4);
- final FieldType unstoredLong2 = new FieldType(unstoredLong);
+ final LegacyFieldType unstoredLong2 = new LegacyFieldType(unstoredLong);
unstoredLong2.setNumericPrecisionStep(2);
LegacyLongField
@@ -374,137 +372,6 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
dir.close();
}
- private void testRandomTrieAndClassicRangeQuery(int precisionStep) throws Exception {
- String field="field"+precisionStep;
- int totalTermCountT=0,totalTermCountC=0,termCountT,termCountC;
- int num = TestUtil.nextInt(random(), 10, 20);
- for (int i = 0; i < num; i++) {
- long lower=(long)(random().nextDouble()*noDocs*distance)+startOffset;
- long upper=(long)(random().nextDouble()*noDocs*distance)+startOffset;
- if (lower>upper) {
- long a=lower; lower=upper; upper=a;
- }
- final BytesRef lowerBytes, upperBytes;
- BytesRefBuilder b = new BytesRefBuilder();
- LegacyNumericUtils.longToPrefixCoded(lower, 0, b);
- lowerBytes = b.toBytesRef();
- LegacyNumericUtils.longToPrefixCoded(upper, 0, b);
- upperBytes = b.toBytesRef();
-
- // test inclusive range
- LegacyNumericRangeQuery tq= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, true);
- TermRangeQuery cq=new TermRangeQuery(field, lowerBytes, upperBytes, true, true);
- TopDocs tTopDocs = searcher.search(tq, 1);
- TopDocs cTopDocs = searcher.search(cq, 1);
- assertEquals("Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", cTopDocs.totalHits, tTopDocs.totalHits );
- totalTermCountT += termCountT = countTerms(tq);
- totalTermCountC += termCountC = countTerms(cq);
- checkTermCounts(precisionStep, termCountT, termCountC);
- // test exclusive range
- tq= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, false, false);
- cq=new TermRangeQuery(field, lowerBytes, upperBytes, false, false);
- tTopDocs = searcher.search(tq, 1);
- cTopDocs = searcher.search(cq, 1);
- assertEquals("Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", cTopDocs.totalHits, tTopDocs.totalHits );
- totalTermCountT += termCountT = countTerms(tq);
- totalTermCountC += termCountC = countTerms(cq);
- checkTermCounts(precisionStep, termCountT, termCountC);
- // test left exclusive range
- tq= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, false, true);
- cq=new TermRangeQuery(field, lowerBytes, upperBytes, false, true);
- tTopDocs = searcher.search(tq, 1);
- cTopDocs = searcher.search(cq, 1);
- assertEquals("Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", cTopDocs.totalHits, tTopDocs.totalHits );
- totalTermCountT += termCountT = countTerms(tq);
- totalTermCountC += termCountC = countTerms(cq);
- checkTermCounts(precisionStep, termCountT, termCountC);
- // test right exclusive range
- tq= LegacyNumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, false);
- cq=new TermRangeQuery(field, lowerBytes, upperBytes, true, false);
- tTopDocs = searcher.search(tq, 1);
- cTopDocs = searcher.search(cq, 1);
- assertEquals("Returned count for LegacyNumericRangeQuery and TermRangeQuery must be equal", cTopDocs.totalHits, tTopDocs.totalHits );
- totalTermCountT += termCountT = countTerms(tq);
- totalTermCountC += termCountC = countTerms(cq);
- checkTermCounts(precisionStep, termCountT, termCountC);
- }
-
- checkTermCounts(precisionStep, totalTermCountT, totalTermCountC);
- if (VERBOSE && precisionStep != Integer.MAX_VALUE) {
- System.out.println("Average number of terms during random search on '" + field + "':");
- System.out.println(" Numeric query: " + (((double)totalTermCountT)/(num * 4)));
- System.out.println(" Classical query: " + (((double)totalTermCountC)/(num * 4)));
- }
- }
-
- @Test
- public void testEmptyEnums() throws Exception {
- int count=3000;
- long lower=(distance*3/2)+startOffset, upper=lower + count*distance + (distance/3);
- // test empty enum
- assert lower < upper;
- assertTrue(0 < countTerms(LegacyNumericRangeQuery.newLongRange("field4", 4, lower, upper, true, true)));
- assertEquals(0, countTerms(LegacyNumericRangeQuery.newLongRange("field4", 4, upper, lower, true, true)));
- // test empty enum outside of bounds
- lower = distance*noDocs+startOffset;
- upper = 2L * lower;
- assert lower < upper;
- assertEquals(0, countTerms(LegacyNumericRangeQuery.newLongRange("field4", 4, lower, upper, true, true)));
- }
-
- private int countTerms(MultiTermQuery q) throws Exception {
- final Terms terms = MultiFields.getTerms(reader, q.getField());
- if (terms == null)
- return 0;
- final TermsEnum termEnum = q.getTermsEnum(terms);
- assertNotNull(termEnum);
- int count = 0;
- BytesRef cur, last = null;
- while ((cur = termEnum.next()) != null) {
- count++;
- if (last != null) {
- assertTrue(last.compareTo(cur) < 0);
- }
- last = BytesRef.deepCopyOf(cur);
- }
- // LUCENE-3314: the results after next() already returned null are undefined,
- // assertNull(termEnum.next());
- return count;
- }
-
- private void checkTermCounts(int precisionStep, int termCountT, int termCountC) {
- if (precisionStep == Integer.MAX_VALUE) {
- assertEquals("Number of terms should be equal for unlimited precStep", termCountC, termCountT);
- } else {
- assertTrue("Number of terms for NRQ should be <= compared to classical TRQ", termCountT <= termCountC);
- }
- }
-
- @Test
- public void testRandomTrieAndClassicRangeQuery_8bit() throws Exception {
- testRandomTrieAndClassicRangeQuery(8);
- }
-
- @Test
- public void testRandomTrieAndClassicRangeQuery_6bit() throws Exception {
- testRandomTrieAndClassicRangeQuery(6);
- }
-
- @Test
- public void testRandomTrieAndClassicRangeQuery_4bit() throws Exception {
- testRandomTrieAndClassicRangeQuery(4);
- }
-
- @Test
- public void testRandomTrieAndClassicRangeQuery_2bit() throws Exception {
- testRandomTrieAndClassicRangeQuery(2);
- }
-
- @Test
- public void testRandomTrieAndClassicRangeQuery_NoTrie() throws Exception {
- testRandomTrieAndClassicRangeQuery(Integer.MAX_VALUE);
- }
-
private void testRangeSplit(int precisionStep) throws Exception {
String field="ascfield"+precisionStep;
// 10 random tests
diff --git a/lucene/core/src/test/org/apache/lucene/analysis/TestNumericTokenStream.java b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericTokenStream.java
similarity index 85%
rename from lucene/core/src/test/org/apache/lucene/analysis/TestNumericTokenStream.java
rename to lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericTokenStream.java
index dfaa20e5a2e..a507af09e0d 100644
--- a/lucene/core/src/test/org/apache/lucene/analysis/TestNumericTokenStream.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/legacy/TestNumericTokenStream.java
@@ -14,15 +14,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.lucene.analysis;
+package org.apache.lucene.legacy;
+import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.LegacyNumericUtils;
-import org.apache.lucene.analysis.LegacyNumericTokenStream.LegacyNumericTermAttributeImpl;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
-import org.apache.lucene.analysis.tokenattributes.TestCharTermAttributeImpl;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
+import org.apache.lucene.legacy.LegacyNumericTokenStream;
+import org.apache.lucene.legacy.LegacyNumericUtils;
+import org.apache.lucene.legacy.LegacyNumericTokenStream.LegacyNumericTermAttributeImpl;
+import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.CharTermAttributeImpl;
@@ -150,20 +152,37 @@ public class TestNumericTokenStream extends BaseTokenStreamTestCase {
public void testAttributeClone() throws Exception {
LegacyNumericTermAttributeImpl att = new LegacyNumericTermAttributeImpl();
att.init(lvalue, 64, 8, 0); // set some value, to make getBytesRef() work
- LegacyNumericTermAttributeImpl copy = TestCharTermAttributeImpl.assertCloneIsEqual(att);
+ LegacyNumericTermAttributeImpl copy = assertCloneIsEqual(att);
assertNotSame(att.getBytesRef(), copy.getBytesRef());
- LegacyNumericTermAttributeImpl copy2 = TestCharTermAttributeImpl.assertCopyIsEqual(att);
+ LegacyNumericTermAttributeImpl copy2 = assertCopyIsEqual(att);
assertNotSame(att.getBytesRef(), copy2.getBytesRef());
// LUCENE-7027 test
att.init(lvalue, 64, 8, 64); // Exhausted TokenStream -> should return empty BytesRef
assertEquals(new BytesRef(), att.getBytesRef());
- copy = TestCharTermAttributeImpl.assertCloneIsEqual(att);
+ copy = assertCloneIsEqual(att);
assertEquals(new BytesRef(), copy.getBytesRef());
assertNotSame(att.getBytesRef(), copy.getBytesRef());
- copy2 = TestCharTermAttributeImpl.assertCopyIsEqual(att);
+ copy2 = assertCopyIsEqual(att);
assertEquals(new BytesRef(), copy2.getBytesRef());
assertNotSame(att.getBytesRef(), copy2.getBytesRef());
}
+ public static T assertCloneIsEqual(T att) {
+ @SuppressWarnings("unchecked")
+ T clone = (T) att.clone();
+ assertEquals("Clone must be equal", att, clone);
+ assertEquals("Clone's hashcode must be equal", att.hashCode(), clone.hashCode());
+ return clone;
+ }
+
+ public static T assertCopyIsEqual(T att) throws Exception {
+ @SuppressWarnings("unchecked")
+ T copy = (T) att.getClass().newInstance();
+ att.copyTo(copy);
+ assertEquals("Copied instance must be equal", att, copy);
+ assertEquals("Copied instance's hashcode must be equal", att.hashCode(), copy.hashCode());
+ return copy;
+ }
+
}
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/TokenStream.java b/lucene/core/src/java/org/apache/lucene/analysis/TokenStream.java
index 6a78e1c0f38..af1e7bd5e9b 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/TokenStream.java
+++ b/lucene/core/src/java/org/apache/lucene/analysis/TokenStream.java
@@ -22,7 +22,6 @@ import java.io.Closeable;
import java.lang.reflect.Modifier;
import org.apache.lucene.analysis.tokenattributes.PackedTokenAttributeImpl;
-import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
@@ -176,11 +175,7 @@ public abstract class TokenStream extends AttributeSource implements Closeable {
* @throws IOException If an I/O error occurs
*/
public void end() throws IOException {
- clearAttributes(); // LUCENE-3849: don't consume dirty atts
- PositionIncrementAttribute posIncAtt = getAttribute(PositionIncrementAttribute.class);
- if (posIncAtt != null) {
- posIncAtt.setPositionIncrement(0);
- }
+ endAttributes(); // LUCENE-3849: don't consume dirty atts
}
/**
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/PackedTokenAttributeImpl.java b/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/PackedTokenAttributeImpl.java
index a84d7b70c26..aaa3316b576 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/PackedTokenAttributeImpl.java
+++ b/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/PackedTokenAttributeImpl.java
@@ -138,6 +138,17 @@ public class PackedTokenAttributeImpl extends CharTermAttributeImpl
startOffset = endOffset = 0;
type = DEFAULT_TYPE;
}
+
+ /** Resets the attributes at end
+ */
+ @Override
+ public void end() {
+ super.end();
+ positionIncrement = 0;
+ positionLength = 1;
+ startOffset = endOffset = 0;
+ type = DEFAULT_TYPE;
+ }
@Override
public PackedTokenAttributeImpl clone() {
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/PositionIncrementAttributeImpl.java b/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/PositionIncrementAttributeImpl.java
index 283f481f760..4d63d6fb36b 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/PositionIncrementAttributeImpl.java
+++ b/lucene/core/src/java/org/apache/lucene/analysis/tokenattributes/PositionIncrementAttributeImpl.java
@@ -46,6 +46,11 @@ public class PositionIncrementAttributeImpl extends AttributeImpl implements Pos
this.positionIncrement = 1;
}
+ @Override
+ public void end() {
+ this.positionIncrement = 0;
+ }
+
@Override
public boolean equals(Object other) {
if (other == this) {
diff --git a/lucene/core/src/java/org/apache/lucene/document/Field.java b/lucene/core/src/java/org/apache/lucene/document/Field.java
index 87986101f3b..8f5f8692d66 100644
--- a/lucene/core/src/java/org/apache/lucene/document/Field.java
+++ b/lucene/core/src/java/org/apache/lucene/document/Field.java
@@ -21,7 +21,6 @@ import java.io.IOException;
import java.io.Reader;
import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.LegacyNumericTokenStream;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.BytesTermAttribute;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
@@ -426,9 +425,6 @@ public class Field implements IndexableField {
if (type.indexOptions() == IndexOptions.NONE || !type.tokenized()) {
throw new IllegalArgumentException("TokenStream fields must be indexed and tokenized");
}
- if (type.numericType() != null) {
- throw new IllegalArgumentException("cannot set private TokenStream on numeric fields");
- }
this.tokenStream = tokenStream;
}
@@ -511,35 +507,6 @@ public class Field implements IndexableField {
return null;
}
- final FieldType.LegacyNumericType numericType = fieldType().numericType();
- if (numericType != null) {
- if (!(reuse instanceof LegacyNumericTokenStream && ((LegacyNumericTokenStream)reuse).getPrecisionStep() == type.numericPrecisionStep())) {
- // lazy init the TokenStream as it is heavy to instantiate
- // (attributes,...) if not needed (stored field loading)
- reuse = new LegacyNumericTokenStream(type.numericPrecisionStep());
- }
- final LegacyNumericTokenStream nts = (LegacyNumericTokenStream) reuse;
- // initialize value in TokenStream
- final Number val = (Number) fieldsData;
- switch (numericType) {
- case INT:
- nts.setIntValue(val.intValue());
- break;
- case LONG:
- nts.setLongValue(val.longValue());
- break;
- case FLOAT:
- nts.setFloatValue(val.floatValue());
- break;
- case DOUBLE:
- nts.setDoubleValue(val.doubleValue());
- break;
- default:
- throw new AssertionError("Should never get here");
- }
- return reuse;
- }
-
if (!fieldType().tokenized()) {
if (stringValue() != null) {
if (!(reuse instanceof StringTokenStream)) {
diff --git a/lucene/core/src/java/org/apache/lucene/document/FieldType.java b/lucene/core/src/java/org/apache/lucene/document/FieldType.java
index e0f058f520e..6f206a49ca7 100644
--- a/lucene/core/src/java/org/apache/lucene/document/FieldType.java
+++ b/lucene/core/src/java/org/apache/lucene/document/FieldType.java
@@ -22,30 +22,12 @@ import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.index.PointValues;
-import org.apache.lucene.util.LegacyNumericUtils;
/**
* Describes the properties of a field.
*/
public class FieldType implements IndexableFieldType {
- /** Data type of the numeric value
- * @since 3.2
- *
- * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
- */
- @Deprecated
- public enum LegacyNumericType {
- /** 32-bit integer numeric type */
- INT,
- /** 64-bit long numeric type */
- LONG,
- /** 32-bit float numeric type */
- FLOAT,
- /** 64-bit double numeric type */
- DOUBLE
- }
-
private boolean stored;
private boolean tokenized = true;
private boolean storeTermVectors;
@@ -54,9 +36,7 @@ public class FieldType implements IndexableFieldType {
private boolean storeTermVectorPayloads;
private boolean omitNorms;
private IndexOptions indexOptions = IndexOptions.NONE;
- private LegacyNumericType numericType;
private boolean frozen;
- private int numericPrecisionStep = LegacyNumericUtils.PRECISION_STEP_DEFAULT;
private DocValuesType docValuesType = DocValuesType.NONE;
private int dimensionCount;
private int dimensionNumBytes;
@@ -73,8 +53,6 @@ public class FieldType implements IndexableFieldType {
this.storeTermVectorPayloads = ref.storeTermVectorPayloads();
this.omitNorms = ref.omitNorms();
this.indexOptions = ref.indexOptions();
- this.numericType = ref.numericType();
- this.numericPrecisionStep = ref.numericPrecisionStep();
this.docValuesType = ref.docValuesType();
this.dimensionCount = ref.dimensionCount;
this.dimensionNumBytes = ref.dimensionNumBytes;
@@ -297,70 +275,6 @@ public class FieldType implements IndexableFieldType {
this.indexOptions = value;
}
- /**
- * Specifies the field's numeric type.
- * @param type numeric type, or null if the field has no numeric type.
- * @throws IllegalStateException if this FieldType is frozen against
- * future modifications.
- * @see #numericType()
- *
- * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
- */
- @Deprecated
- public void setNumericType(LegacyNumericType type) {
- checkIfFrozen();
- numericType = type;
- }
-
- /**
- * LegacyNumericType: if non-null then the field's value will be indexed
- * numerically so that {@link org.apache.lucene.search.LegacyNumericRangeQuery} can be used at
- * search time.
- *
- * The default is null (no numeric type)
- * @see #setNumericType(org.apache.lucene.document.FieldType.LegacyNumericType)
- *
- * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
- */
- @Deprecated
- public LegacyNumericType numericType() {
- return numericType;
- }
-
- /**
- * Sets the numeric precision step for the field.
- * @param precisionStep numeric precision step for the field
- * @throws IllegalArgumentException if precisionStep is less than 1.
- * @throws IllegalStateException if this FieldType is frozen against
- * future modifications.
- * @see #numericPrecisionStep()
- *
- * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
- */
- @Deprecated
- public void setNumericPrecisionStep(int precisionStep) {
- checkIfFrozen();
- if (precisionStep < 1) {
- throw new IllegalArgumentException("precisionStep must be >= 1 (got " + precisionStep + ")");
- }
- this.numericPrecisionStep = precisionStep;
- }
-
- /**
- * Precision step for numeric field.
- *
- * This has no effect if {@link #numericType()} returns null.
- *
- * The default is {@link org.apache.lucene.util.LegacyNumericUtils#PRECISION_STEP_DEFAULT}
- * @see #setNumericPrecisionStep(int)
- *
- * @deprecated Please switch to {@link org.apache.lucene.index.PointValues} instead
- */
- @Deprecated
- public int numericPrecisionStep() {
- return numericPrecisionStep;
- }
-
/**
* Enables points indexing.
*/
@@ -403,7 +317,7 @@ public class FieldType implements IndexableFieldType {
/** Prints a Field for human consumption. */
@Override
- public final String toString() {
+ public String toString() {
StringBuilder result = new StringBuilder();
if (stored()) {
result.append("stored");
@@ -434,12 +348,6 @@ public class FieldType implements IndexableFieldType {
result.append(",indexOptions=");
result.append(indexOptions);
}
- if (numericType != null) {
- result.append(",numericType=");
- result.append(numericType);
- result.append(",numericPrecisionStep=");
- result.append(numericPrecisionStep);
- }
}
if (dimensionCount != 0) {
if (result.length() > 0) {
@@ -495,8 +403,6 @@ public class FieldType implements IndexableFieldType {
result = prime * result + dimensionNumBytes;
result = prime * result + ((docValuesType == null) ? 0 : docValuesType.hashCode());
result = prime * result + indexOptions.hashCode();
- result = prime * result + numericPrecisionStep;
- result = prime * result + ((numericType == null) ? 0 : numericType.hashCode());
result = prime * result + (omitNorms ? 1231 : 1237);
result = prime * result + (storeTermVectorOffsets ? 1231 : 1237);
result = prime * result + (storeTermVectorPayloads ? 1231 : 1237);
@@ -517,8 +423,6 @@ public class FieldType implements IndexableFieldType {
if (dimensionNumBytes != other.dimensionNumBytes) return false;
if (docValuesType != other.docValuesType) return false;
if (indexOptions != other.indexOptions) return false;
- if (numericPrecisionStep != other.numericPrecisionStep) return false;
- if (numericType != other.numericType) return false;
if (omitNorms != other.omitNorms) return false;
if (storeTermVectorOffsets != other.storeTermVectorOffsets) return false;
if (storeTermVectorPayloads != other.storeTermVectorPayloads) return false;
diff --git a/lucene/core/src/java/org/apache/lucene/geo/Rectangle.java b/lucene/core/src/java/org/apache/lucene/geo/Rectangle.java
index c8fddf728c7..a8200c6edd3 100644
--- a/lucene/core/src/java/org/apache/lucene/geo/Rectangle.java
+++ b/lucene/core/src/java/org/apache/lucene/geo/Rectangle.java
@@ -186,4 +186,33 @@ public class Rectangle {
return new Rectangle(minLat, maxLat, minLon, maxLon);
}
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ Rectangle rectangle = (Rectangle) o;
+
+ if (Double.compare(rectangle.minLat, minLat) != 0) return false;
+ if (Double.compare(rectangle.minLon, minLon) != 0) return false;
+ if (Double.compare(rectangle.maxLat, maxLat) != 0) return false;
+ return Double.compare(rectangle.maxLon, maxLon) == 0;
+
+ }
+
+ @Override
+ public int hashCode() {
+ int result;
+ long temp;
+ temp = Double.doubleToLongBits(minLat);
+ result = (int) (temp ^ (temp >>> 32));
+ temp = Double.doubleToLongBits(minLon);
+ result = 31 * result + (int) (temp ^ (temp >>> 32));
+ temp = Double.doubleToLongBits(maxLat);
+ result = 31 * result + (int) (temp ^ (temp >>> 32));
+ temp = Double.doubleToLongBits(maxLon);
+ result = 31 * result + (int) (temp ^ (temp >>> 32));
+ return result;
+ }
}
diff --git a/lucene/core/src/java/org/apache/lucene/search/LRUQueryCache.java b/lucene/core/src/java/org/apache/lucene/search/LRUQueryCache.java
index 7ad208ff22f..44c04e54b7c 100644
--- a/lucene/core/src/java/org/apache/lucene/search/LRUQueryCache.java
+++ b/lucene/core/src/java/org/apache/lucene/search/LRUQueryCache.java
@@ -405,6 +405,7 @@ public class LRUQueryCache implements QueryCache, Accountable {
lock.lock();
try {
cache.clear();
+ // Note that this also clears the uniqueQueries map since mostRecentlyUsedQueries is the uniqueQueries.keySet view:
mostRecentlyUsedQueries.clear();
onClear();
} finally {
diff --git a/lucene/core/src/java/org/apache/lucene/store/ByteBufferGuard.java b/lucene/core/src/java/org/apache/lucene/store/ByteBufferGuard.java
new file mode 100644
index 00000000000..95fa17d5ea0
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/store/ByteBufferGuard.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.store;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * A guard that is created for every {@link ByteBufferIndexInput} that tries on best effort
+ * to reject any access to the {@link ByteBuffer} behind, once it is unmapped. A single instance
+ * of this is used for the original and all clones, so once the original is closed and unmapped
+ * all clones also throw {@link AlreadyClosedException}, triggered by a {@link NullPointerException}.
+ *
+ * This code tries to hopefully flush any CPU caches using a store-store barrier. It also yields the
+ * current thread to give other threads a chance to finish in-flight requests...
+ */
+final class ByteBufferGuard {
+
+ /**
+ * Pass in an implementation of this interface to cleanup ByteBuffers.
+ * MMapDirectory implements this to allow unmapping of bytebuffers with private Java APIs.
+ */
+ @FunctionalInterface
+ static interface BufferCleaner {
+ void freeBuffer(String resourceDescription, ByteBuffer b) throws IOException;
+ }
+
+ private final String resourceDescription;
+ private final BufferCleaner cleaner;
+
+ /** Not volatile; see comments on visibility below! */
+ private boolean invalidated = false;
+
+ /** Used as a store-store barrier; see comments below! */
+ private final AtomicInteger barrier = new AtomicInteger();
+
+ /**
+ * Creates an instance to be used for a single {@link ByteBufferIndexInput} which
+ * must be shared by all of its clones.
+ */
+ public ByteBufferGuard(String resourceDescription, BufferCleaner cleaner) {
+ this.resourceDescription = resourceDescription;
+ this.cleaner = cleaner;
+ }
+
+ /**
+ * Invalidates this guard and unmaps (if supported).
+ */
+ public void invalidateAndUnmap(ByteBuffer... bufs) throws IOException {
+ if (cleaner != null) {
+ invalidated = true;
+ // This call should hopefully flush any CPU caches and as a result make
+ // the "invalidated" field update visible to other threads. We specifically
+ // don't make "invalidated" field volatile for performance reasons, hoping the
+ // JVM won't optimize away reads of that field and hardware should ensure
+ // caches are in sync after this call. This isn't entirely "fool-proof"
+ // (see LUCENE-7409 discussion), but it has been shown to work in practice
+ // and we count on this behavior.
+ barrier.lazySet(0);
+ // we give other threads a bit of time to finish reads on their ByteBuffer...:
+ Thread.yield();
+ // finally unmap the ByteBuffers:
+ for (ByteBuffer b : bufs) {
+ cleaner.freeBuffer(resourceDescription, b);
+ }
+ }
+ }
+
+ private void ensureValid() {
+ if (invalidated) {
+ // this triggers an AlreadyClosedException in ByteBufferIndexInput:
+ throw new NullPointerException();
+ }
+ }
+
+ public void getBytes(ByteBuffer receiver, byte[] dst, int offset, int length) {
+ ensureValid();
+ receiver.get(dst, offset, length);
+ }
+
+ public byte getByte(ByteBuffer receiver) {
+ ensureValid();
+ return receiver.get();
+ }
+
+ public short getShort(ByteBuffer receiver) {
+ ensureValid();
+ return receiver.getShort();
+ }
+
+ public int getInt(ByteBuffer receiver) {
+ ensureValid();
+ return receiver.getInt();
+ }
+
+ public long getLong(ByteBuffer receiver) {
+ ensureValid();
+ return receiver.getLong();
+ }
+
+ public byte getByte(ByteBuffer receiver, int pos) {
+ ensureValid();
+ return receiver.get(pos);
+ }
+
+ public short getShort(ByteBuffer receiver, int pos) {
+ ensureValid();
+ return receiver.getShort(pos);
+ }
+
+ public int getInt(ByteBuffer receiver, int pos) {
+ ensureValid();
+ return receiver.getInt(pos);
+ }
+
+ public long getLong(ByteBuffer receiver, int pos) {
+ ensureValid();
+ return receiver.getLong(pos);
+ }
+
+}
diff --git a/lucene/core/src/java/org/apache/lucene/store/ByteBufferIndexInput.java b/lucene/core/src/java/org/apache/lucene/store/ByteBufferIndexInput.java
index 8e8ef90655a..0f6c733410b 100644
--- a/lucene/core/src/java/org/apache/lucene/store/ByteBufferIndexInput.java
+++ b/lucene/core/src/java/org/apache/lucene/store/ByteBufferIndexInput.java
@@ -21,9 +21,6 @@ import java.io.EOFException;
import java.io.IOException;
import java.nio.BufferUnderflowException;
import java.nio.ByteBuffer;
-import java.util.Iterator;
-
-import org.apache.lucene.util.WeakIdentityMap;
/**
* Base IndexInput implementation that uses an array
@@ -37,35 +34,32 @@ import org.apache.lucene.util.WeakIdentityMap;
* are a power-of-two (chunkSizePower).
*/
abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessInput {
- protected final BufferCleaner cleaner;
protected final long length;
protected final long chunkSizeMask;
protected final int chunkSizePower;
+ protected final ByteBufferGuard guard;
protected ByteBuffer[] buffers;
protected int curBufIndex = -1;
protected ByteBuffer curBuf; // redundant for speed: buffers[curBufIndex]
protected boolean isClone = false;
- protected final WeakIdentityMap clones;
- public static ByteBufferIndexInput newInstance(String resourceDescription, ByteBuffer[] buffers, long length, int chunkSizePower, BufferCleaner cleaner, boolean trackClones) {
- final WeakIdentityMap clones = trackClones ? WeakIdentityMap.newConcurrentHashMap() : null;
+ public static ByteBufferIndexInput newInstance(String resourceDescription, ByteBuffer[] buffers, long length, int chunkSizePower, ByteBufferGuard guard) {
if (buffers.length == 1) {
- return new SingleBufferImpl(resourceDescription, buffers[0], length, chunkSizePower, cleaner, clones);
+ return new SingleBufferImpl(resourceDescription, buffers[0], length, chunkSizePower, guard);
} else {
- return new MultiBufferImpl(resourceDescription, buffers, 0, length, chunkSizePower, cleaner, clones);
+ return new MultiBufferImpl(resourceDescription, buffers, 0, length, chunkSizePower, guard);
}
}
- ByteBufferIndexInput(String resourceDescription, ByteBuffer[] buffers, long length, int chunkSizePower, BufferCleaner cleaner, WeakIdentityMap clones) {
+ ByteBufferIndexInput(String resourceDescription, ByteBuffer[] buffers, long length, int chunkSizePower, ByteBufferGuard guard) {
super(resourceDescription);
this.buffers = buffers;
this.length = length;
this.chunkSizePower = chunkSizePower;
this.chunkSizeMask = (1L << chunkSizePower) - 1L;
- this.clones = clones;
- this.cleaner = cleaner;
+ this.guard = guard;
assert chunkSizePower >= 0 && chunkSizePower <= 30;
assert (length >>> chunkSizePower) < Integer.MAX_VALUE;
}
@@ -73,7 +67,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
@Override
public final byte readByte() throws IOException {
try {
- return curBuf.get();
+ return guard.getByte(curBuf);
} catch (BufferUnderflowException e) {
do {
curBufIndex++;
@@ -83,7 +77,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
curBuf = buffers[curBufIndex];
curBuf.position(0);
} while (!curBuf.hasRemaining());
- return curBuf.get();
+ return guard.getByte(curBuf);
} catch (NullPointerException npe) {
throw new AlreadyClosedException("Already closed: " + this);
}
@@ -92,11 +86,11 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
@Override
public final void readBytes(byte[] b, int offset, int len) throws IOException {
try {
- curBuf.get(b, offset, len);
+ guard.getBytes(curBuf, b, offset, len);
} catch (BufferUnderflowException e) {
int curAvail = curBuf.remaining();
while (len > curAvail) {
- curBuf.get(b, offset, curAvail);
+ guard.getBytes(curBuf, b, offset, curAvail);
len -= curAvail;
offset += curAvail;
curBufIndex++;
@@ -107,7 +101,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
curBuf.position(0);
curAvail = curBuf.remaining();
}
- curBuf.get(b, offset, len);
+ guard.getBytes(curBuf, b, offset, len);
} catch (NullPointerException npe) {
throw new AlreadyClosedException("Already closed: " + this);
}
@@ -116,7 +110,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
@Override
public final short readShort() throws IOException {
try {
- return curBuf.getShort();
+ return guard.getShort(curBuf);
} catch (BufferUnderflowException e) {
return super.readShort();
} catch (NullPointerException npe) {
@@ -127,7 +121,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
@Override
public final int readInt() throws IOException {
try {
- return curBuf.getInt();
+ return guard.getInt(curBuf);
} catch (BufferUnderflowException e) {
return super.readInt();
} catch (NullPointerException npe) {
@@ -138,7 +132,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
@Override
public final long readLong() throws IOException {
try {
- return curBuf.getLong();
+ return guard.getLong(curBuf);
} catch (BufferUnderflowException e) {
return super.readLong();
} catch (NullPointerException npe) {
@@ -181,7 +175,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
public byte readByte(long pos) throws IOException {
try {
final int bi = (int) (pos >> chunkSizePower);
- return buffers[bi].get((int) (pos & chunkSizeMask));
+ return guard.getByte(buffers[bi], (int) (pos & chunkSizeMask));
} catch (IndexOutOfBoundsException ioobe) {
throw new EOFException("seek past EOF: " + this);
} catch (NullPointerException npe) {
@@ -207,7 +201,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
public short readShort(long pos) throws IOException {
final int bi = (int) (pos >> chunkSizePower);
try {
- return buffers[bi].getShort((int) (pos & chunkSizeMask));
+ return guard.getShort(buffers[bi], (int) (pos & chunkSizeMask));
} catch (IndexOutOfBoundsException ioobe) {
// either it's a boundary, or read past EOF, fall back:
setPos(pos, bi);
@@ -221,7 +215,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
public int readInt(long pos) throws IOException {
final int bi = (int) (pos >> chunkSizePower);
try {
- return buffers[bi].getInt((int) (pos & chunkSizeMask));
+ return guard.getInt(buffers[bi], (int) (pos & chunkSizeMask));
} catch (IndexOutOfBoundsException ioobe) {
// either it's a boundary, or read past EOF, fall back:
setPos(pos, bi);
@@ -235,7 +229,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
public long readLong(long pos) throws IOException {
final int bi = (int) (pos >> chunkSizePower);
try {
- return buffers[bi].getLong((int) (pos & chunkSizeMask));
+ return guard.getLong(buffers[bi], (int) (pos & chunkSizeMask));
} catch (IndexOutOfBoundsException ioobe) {
// either it's a boundary, or read past EOF, fall back:
setPos(pos, bi);
@@ -285,11 +279,6 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
final ByteBufferIndexInput clone = newCloneInstance(getFullSliceDescription(sliceDescription), newBuffers, ofs, length);
clone.isClone = true;
-
- // register the new clone in our clone list to clean it up on closing:
- if (clones != null) {
- this.clones.put(clone, Boolean.TRUE);
- }
return clone;
}
@@ -299,9 +288,9 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
protected ByteBufferIndexInput newCloneInstance(String newResourceDescription, ByteBuffer[] newBuffers, int offset, long length) {
if (newBuffers.length == 1) {
newBuffers[0].position(offset);
- return new SingleBufferImpl(newResourceDescription, newBuffers[0].slice(), length, chunkSizePower, this.cleaner, this.clones);
+ return new SingleBufferImpl(newResourceDescription, newBuffers[0].slice(), length, chunkSizePower, this.guard);
} else {
- return new MultiBufferImpl(newResourceDescription, newBuffers, offset, length, chunkSizePower, cleaner, clones);
+ return new MultiBufferImpl(newResourceDescription, newBuffers, offset, length, chunkSizePower, guard);
}
}
@@ -335,25 +324,11 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
// make local copy, then un-set early
final ByteBuffer[] bufs = buffers;
unsetBuffers();
- if (clones != null) {
- clones.remove(this);
- }
if (isClone) return;
- // for extra safety unset also all clones' buffers:
- if (clones != null) {
- for (Iterator it = this.clones.keyIterator(); it.hasNext();) {
- final ByteBufferIndexInput clone = it.next();
- assert clone.isClone;
- clone.unsetBuffers();
- }
- this.clones.clear();
- }
-
- for (final ByteBuffer b : bufs) {
- freeBuffer(b);
- }
+ // tell the guard to invalidate and later unmap the bytebuffers (if supported):
+ guard.invalidateAndUnmap(bufs);
} finally {
unsetBuffers();
}
@@ -367,31 +342,12 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
curBuf = null;
curBufIndex = 0;
}
-
- /**
- * Called when the contents of a buffer will be no longer needed.
- */
- private void freeBuffer(ByteBuffer b) throws IOException {
- if (cleaner != null) {
- cleaner.freeBuffer(this, b);
- }
- }
-
- /**
- * Pass in an implementation of this interface to cleanup ByteBuffers.
- * MMapDirectory implements this to allow unmapping of bytebuffers with private Java APIs.
- */
- @FunctionalInterface
- static interface BufferCleaner {
- void freeBuffer(ByteBufferIndexInput parent, ByteBuffer b) throws IOException;
- }
/** Optimization of ByteBufferIndexInput for when there is only one buffer */
static final class SingleBufferImpl extends ByteBufferIndexInput {
- SingleBufferImpl(String resourceDescription, ByteBuffer buffer, long length, int chunkSizePower,
- BufferCleaner cleaner, WeakIdentityMap clones) {
- super(resourceDescription, new ByteBuffer[] { buffer }, length, chunkSizePower, cleaner, clones);
+ SingleBufferImpl(String resourceDescription, ByteBuffer buffer, long length, int chunkSizePower, ByteBufferGuard guard) {
+ super(resourceDescription, new ByteBuffer[] { buffer }, length, chunkSizePower, guard);
this.curBufIndex = 0;
this.curBuf = buffer;
buffer.position(0);
@@ -426,7 +382,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
@Override
public byte readByte(long pos) throws IOException {
try {
- return curBuf.get((int) pos);
+ return guard.getByte(curBuf, (int) pos);
} catch (IllegalArgumentException e) {
if (pos < 0) {
throw new IllegalArgumentException("Seeking to negative position: " + this, e);
@@ -441,7 +397,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
@Override
public short readShort(long pos) throws IOException {
try {
- return curBuf.getShort((int) pos);
+ return guard.getShort(curBuf, (int) pos);
} catch (IllegalArgumentException e) {
if (pos < 0) {
throw new IllegalArgumentException("Seeking to negative position: " + this, e);
@@ -456,7 +412,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
@Override
public int readInt(long pos) throws IOException {
try {
- return curBuf.getInt((int) pos);
+ return guard.getInt(curBuf, (int) pos);
} catch (IllegalArgumentException e) {
if (pos < 0) {
throw new IllegalArgumentException("Seeking to negative position: " + this, e);
@@ -471,7 +427,7 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
@Override
public long readLong(long pos) throws IOException {
try {
- return curBuf.getLong((int) pos);
+ return guard.getLong(curBuf, (int) pos);
} catch (IllegalArgumentException e) {
if (pos < 0) {
throw new IllegalArgumentException("Seeking to negative position: " + this, e);
@@ -489,8 +445,8 @@ abstract class ByteBufferIndexInput extends IndexInput implements RandomAccessIn
private final int offset;
MultiBufferImpl(String resourceDescription, ByteBuffer[] buffers, int offset, long length, int chunkSizePower,
- BufferCleaner cleaner, WeakIdentityMap clones) {
- super(resourceDescription, buffers, length, chunkSizePower, cleaner, clones);
+ ByteBufferGuard guard) {
+ super(resourceDescription, buffers, length, chunkSizePower, guard);
this.offset = offset;
try {
seek(0L);
diff --git a/lucene/core/src/java/org/apache/lucene/store/MMapDirectory.java b/lucene/core/src/java/org/apache/lucene/store/MMapDirectory.java
index 60ca103a047..c0e35197f0e 100644
--- a/lucene/core/src/java/org/apache/lucene/store/MMapDirectory.java
+++ b/lucene/core/src/java/org/apache/lucene/store/MMapDirectory.java
@@ -36,7 +36,7 @@ import java.util.concurrent.Future;
import java.lang.invoke.MethodHandle;
import java.lang.reflect.Method;
-import org.apache.lucene.store.ByteBufferIndexInput.BufferCleaner;
+import org.apache.lucene.store.ByteBufferGuard.BufferCleaner;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.SuppressForbidden;
@@ -240,7 +240,7 @@ public class MMapDirectory extends FSDirectory {
final boolean useUnmap = getUseUnmap();
return ByteBufferIndexInput.newInstance(resourceDescription,
map(resourceDescription, c, 0, c.size()),
- c.size(), chunkSizePower, useUnmap ? CLEANER : null, useUnmap);
+ c.size(), chunkSizePower, new ByteBufferGuard(resourceDescription, useUnmap ? CLEANER : null));
}
}
@@ -370,7 +370,7 @@ public class MMapDirectory extends FSDirectory {
final MethodHandle unmapper = filterReturnValue(directBufferCleanerMethod, guardWithTest(nonNullTest, cleanMethod, noop))
.asType(methodType(void.class, ByteBuffer.class));
- return (BufferCleaner) (ByteBufferIndexInput parent, ByteBuffer buffer) -> {
+ return (BufferCleaner) (String resourceDescription, ByteBuffer buffer) -> {
if (directBufferClass.isInstance(buffer)) {
final Throwable error = AccessController.doPrivileged((PrivilegedAction) () -> {
try {
@@ -381,7 +381,7 @@ public class MMapDirectory extends FSDirectory {
}
});
if (error != null) {
- throw new IOException("Unable to unmap the mapped buffer: " + parent.toString(), error);
+ throw new IOException("Unable to unmap the mapped buffer: " + resourceDescription, error);
}
}
};
diff --git a/lucene/core/src/java/org/apache/lucene/util/AttributeImpl.java b/lucene/core/src/java/org/apache/lucene/util/AttributeImpl.java
index b7226612ef7..8a7c5429d05 100644
--- a/lucene/core/src/java/org/apache/lucene/util/AttributeImpl.java
+++ b/lucene/core/src/java/org/apache/lucene/util/AttributeImpl.java
@@ -32,6 +32,17 @@ public abstract class AttributeImpl implements Cloneable, Attribute {
*/
public abstract void clear();
+ /**
+ * Clears the values in this AttributeImpl and resets it to its value
+ * at the end of the field. If this implementation implements more than one Attribute interface
+ * it clears all.
+ *
+ * The default implementation simply calls {@link #clear()}
+ */
+ public void end() {
+ clear();
+ }
+
/**
* This method returns the current attribute values as a string in the following format
* by calling the {@link #reflectWith(AttributeReflector)} method:
diff --git a/lucene/core/src/java/org/apache/lucene/util/AttributeSource.java b/lucene/core/src/java/org/apache/lucene/util/AttributeSource.java
index 6c39025ba4a..e962fedc1de 100644
--- a/lucene/core/src/java/org/apache/lucene/util/AttributeSource.java
+++ b/lucene/core/src/java/org/apache/lucene/util/AttributeSource.java
@@ -270,6 +270,16 @@ public class AttributeSource {
state.attribute.clear();
}
}
+
+ /**
+ * Resets all Attributes in this AttributeSource by calling
+ * {@link AttributeImpl#end()} on each Attribute implementation.
+ */
+ public final void endAttributes() {
+ for (State state = getCurrentState(); state != null; state = state.next) {
+ state.attribute.end();
+ }
+ }
/**
* Removes all attributes and their implementations from this AttributeSource.
diff --git a/lucene/core/src/java/org/apache/lucene/util/Version.java b/lucene/core/src/java/org/apache/lucene/util/Version.java
index 3bd817ea8a5..478f41f16ff 100644
--- a/lucene/core/src/java/org/apache/lucene/util/Version.java
+++ b/lucene/core/src/java/org/apache/lucene/util/Version.java
@@ -59,6 +59,13 @@ public final class Version {
@Deprecated
public static final Version LUCENE_6_2_0 = new Version(6, 2, 0);
+ /**
+ * Match settings and bugs in Lucene's 6.3.0 release.
+ * @deprecated Use latest
+ */
+ @Deprecated
+ public static final Version LUCENE_6_3_0 = new Version(6, 3, 0);
+
/**
* Match settings and bugs in Lucene's 7.0.0 release.
*
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestGrowableByteArrayDataOutput.java b/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestGrowableByteArrayDataOutput.java
index fb90d925eb9..3820733ecca 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestGrowableByteArrayDataOutput.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestGrowableByteArrayDataOutput.java
@@ -58,7 +58,7 @@ public class TestGrowableByteArrayDataOutput extends LuceneTestCase {
public void testWriteLargeStrings() throws Exception {
int minSizeForDoublePass = GrowableByteArrayDataOutput.MIN_UTF8_SIZE_TO_ENABLE_DOUBLE_PASS_ENCODING;
- int num = atLeast(1000);
+ int num = atLeast(100);
for (int i = 0; i < num; i++) {
String unicode = TestUtil.randomRealisticUnicodeString(random(), minSizeForDoublePass, 10 * minSizeForDoublePass);
byte[] utf8 = new byte[unicode.length() * UnicodeUtil.MAX_UTF8_BYTES_PER_CHAR];
diff --git a/lucene/core/src/test/org/apache/lucene/document/TestField.java b/lucene/core/src/test/org/apache/lucene/document/TestField.java
index 92d6a832141..4ef7ffbcdcc 100644
--- a/lucene/core/src/test/org/apache/lucene/document/TestField.java
+++ b/lucene/core/src/test/org/apache/lucene/document/TestField.java
@@ -79,29 +79,7 @@ public class TestField extends LuceneTestCase {
assertEquals("DoublePoint ", field.toString());
}
- public void testLegacyDoubleField() throws Exception {
- Field fields[] = new Field[] {
- new LegacyDoubleField("foo", 5d, Field.Store.NO),
- new LegacyDoubleField("foo", 5d, Field.Store.YES)
- };
- for (Field field : fields) {
- trySetBoost(field);
- trySetByteValue(field);
- trySetBytesValue(field);
- trySetBytesRefValue(field);
- field.setDoubleValue(6d); // ok
- trySetIntValue(field);
- trySetFloatValue(field);
- trySetLongValue(field);
- trySetReaderValue(field);
- trySetShortValue(field);
- trySetStringValue(field);
- trySetTokenStreamValue(field);
-
- assertEquals(6d, field.numericValue().doubleValue(), 0.0d);
- }
- }
public void testDoubleDocValuesField() throws Exception {
DoubleDocValuesField field = new DoubleDocValuesField("foo", 5d);
@@ -185,30 +163,6 @@ public class TestField extends LuceneTestCase {
assertEquals("FloatPoint ", field.toString());
}
- public void testLegacyFloatField() throws Exception {
- Field fields[] = new Field[] {
- new LegacyFloatField("foo", 5f, Field.Store.NO),
- new LegacyFloatField("foo", 5f, Field.Store.YES)
- };
-
- for (Field field : fields) {
- trySetBoost(field);
- trySetByteValue(field);
- trySetBytesValue(field);
- trySetBytesRefValue(field);
- trySetDoubleValue(field);
- trySetIntValue(field);
- field.setFloatValue(6f); // ok
- trySetLongValue(field);
- trySetReaderValue(field);
- trySetShortValue(field);
- trySetStringValue(field);
- trySetTokenStreamValue(field);
-
- assertEquals(6f, field.numericValue().floatValue(), 0.0f);
- }
- }
-
public void testIntPoint() throws Exception {
Field field = new IntPoint("foo", 5);
@@ -253,30 +207,6 @@ public class TestField extends LuceneTestCase {
assertEquals("IntPoint ", field.toString());
}
- public void testLegacyIntField() throws Exception {
- Field fields[] = new Field[] {
- new LegacyIntField("foo", 5, Field.Store.NO),
- new LegacyIntField("foo", 5, Field.Store.YES)
- };
-
- for (Field field : fields) {
- trySetBoost(field);
- trySetByteValue(field);
- trySetBytesValue(field);
- trySetBytesRefValue(field);
- trySetDoubleValue(field);
- field.setIntValue(6); // ok
- trySetFloatValue(field);
- trySetLongValue(field);
- trySetReaderValue(field);
- trySetShortValue(field);
- trySetStringValue(field);
- trySetTokenStreamValue(field);
-
- assertEquals(6, field.numericValue().intValue());
- }
- }
-
public void testNumericDocValuesField() throws Exception {
NumericDocValuesField field = new NumericDocValuesField("foo", 5L);
@@ -340,30 +270,6 @@ public class TestField extends LuceneTestCase {
assertEquals("LongPoint ", field.toString());
}
- public void testLegacyLongField() throws Exception {
- Field fields[] = new Field[] {
- new LegacyLongField("foo", 5L, Field.Store.NO),
- new LegacyLongField("foo", 5L, Field.Store.YES)
- };
-
- for (Field field : fields) {
- trySetBoost(field);
- trySetByteValue(field);
- trySetBytesValue(field);
- trySetBytesRefValue(field);
- trySetDoubleValue(field);
- trySetIntValue(field);
- trySetFloatValue(field);
- field.setLongValue(6); // ok
- trySetReaderValue(field);
- trySetShortValue(field);
- trySetStringValue(field);
- trySetTokenStreamValue(field);
-
- assertEquals(6L, field.numericValue().longValue());
- }
- }
-
public void testSortedBytesDocValuesField() throws Exception {
SortedDocValuesField field = new SortedDocValuesField("foo", new BytesRef("bar"));
diff --git a/lucene/core/src/test/org/apache/lucene/document/TestFieldType.java b/lucene/core/src/test/org/apache/lucene/document/TestFieldType.java
index da76f40fe71..9214cb9f844 100644
--- a/lucene/core/src/test/org/apache/lucene/document/TestFieldType.java
+++ b/lucene/core/src/test/org/apache/lucene/document/TestFieldType.java
@@ -18,7 +18,6 @@ package org.apache.lucene.document;
import java.lang.reflect.Method;
-import org.apache.lucene.document.FieldType.LegacyNumericType;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.PointValues;
@@ -58,14 +57,6 @@ public class TestFieldType extends LuceneTestCase {
ft7.setOmitNorms(true);
assertFalse(ft7.equals(ft));
- FieldType ft8 = new FieldType();
- ft8.setNumericType(LegacyNumericType.DOUBLE);
- assertFalse(ft8.equals(ft));
-
- FieldType ft9 = new FieldType();
- ft9.setNumericPrecisionStep(3);
- assertFalse(ft9.equals(ft));
-
FieldType ft10 = new FieldType();
ft10.setStoreTermVectors(true);
assertFalse(ft10.equals(ft));
diff --git a/lucene/core/src/test/org/apache/lucene/geo/TestGeoUtils.java b/lucene/core/src/test/org/apache/lucene/geo/TestGeoUtils.java
index e75ae85b8a6..2cfb2f84002 100644
--- a/lucene/core/src/test/org/apache/lucene/geo/TestGeoUtils.java
+++ b/lucene/core/src/test/org/apache/lucene/geo/TestGeoUtils.java
@@ -30,7 +30,7 @@ public class TestGeoUtils extends LuceneTestCase {
// We rely heavily on GeoUtils.circleToBBox so we test it here:
public void testRandomCircleToBBox() throws Exception {
- int iters = atLeast(1000);
+ int iters = atLeast(100);
for(int iter=0;iter 0) {
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestFieldReuse.java b/lucene/core/src/test/org/apache/lucene/index/TestFieldReuse.java
index b36cfefa9ff..977df3da618 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestFieldReuse.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestFieldReuse.java
@@ -24,16 +24,12 @@ import java.util.Collections;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.CannedTokenStream;
-import org.apache.lucene.analysis.LegacyNumericTokenStream.LegacyNumericTermAttribute;
-import org.apache.lucene.analysis.LegacyNumericTokenStream;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
-import org.apache.lucene.document.LegacyIntField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.LegacyNumericUtils;
/** test tokenstream reuse by DefaultIndexingChain */
public class TestFieldReuse extends BaseTokenStreamTestCase {
@@ -61,7 +57,7 @@ public class TestFieldReuse extends BaseTokenStreamTestCase {
// pass a bogus stream and ensure it's still ok
stringField = new StringField("foo", "beer", Field.Store.NO);
- TokenStream bogus = new LegacyNumericTokenStream();
+ TokenStream bogus = new CannedTokenStream();
ts = stringField.tokenStream(null, bogus);
assertNotSame(ts, bogus);
assertTokenStreamContents(ts,
@@ -71,37 +67,6 @@ public class TestFieldReuse extends BaseTokenStreamTestCase {
);
}
- public void testNumericReuse() throws IOException {
- LegacyIntField legacyIntField = new LegacyIntField("foo", 5, Field.Store.NO);
-
- // passing null
- TokenStream ts = legacyIntField.tokenStream(null, null);
- assertTrue(ts instanceof LegacyNumericTokenStream);
- assertEquals(LegacyNumericUtils.PRECISION_STEP_DEFAULT_32, ((LegacyNumericTokenStream)ts).getPrecisionStep());
- assertNumericContents(5, ts);
-
- // now reuse previous stream
- legacyIntField = new LegacyIntField("foo", 20, Field.Store.NO);
- TokenStream ts2 = legacyIntField.tokenStream(null, ts);
- assertSame(ts, ts2);
- assertNumericContents(20, ts);
-
- // pass a bogus stream and ensure it's still ok
- legacyIntField = new LegacyIntField("foo", 2343, Field.Store.NO);
- TokenStream bogus = new CannedTokenStream(new Token("bogus", 0, 5));
- ts = legacyIntField.tokenStream(null, bogus);
- assertNotSame(bogus, ts);
- assertNumericContents(2343, ts);
-
- // pass another bogus stream (numeric, but different precision step!)
- legacyIntField = new LegacyIntField("foo", 42, Field.Store.NO);
- assert 3 != LegacyNumericUtils.PRECISION_STEP_DEFAULT;
- bogus = new LegacyNumericTokenStream(3);
- ts = legacyIntField.tokenStream(null, bogus);
- assertNotSame(bogus, ts);
- assertNumericContents(42, ts);
- }
-
static class MyField implements IndexableField {
TokenStream lastSeen;
TokenStream lastReturned;
@@ -163,20 +128,4 @@ public class TestFieldReuse extends BaseTokenStreamTestCase {
iw.close();
dir.close();
}
-
- private void assertNumericContents(int value, TokenStream ts) throws IOException {
- assertTrue(ts instanceof LegacyNumericTokenStream);
- LegacyNumericTermAttribute numericAtt = ts.getAttribute(LegacyNumericTermAttribute.class);
- ts.reset();
- boolean seen = false;
- while (ts.incrementToken()) {
- if (numericAtt.getShift() == 0) {
- assertEquals(value, numericAtt.getRawValue());
- seen = true;
- }
- }
- ts.end();
- ts.close();
- assertTrue(seen);
- }
}
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
index 363ccb2ce59..5045396e43e 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
@@ -1217,7 +1217,7 @@ public class TestIndexSorting extends LuceneTestCase {
if (TEST_NIGHTLY) {
numDocs = atLeast(100000);
} else {
- numDocs = atLeast(10000);
+ numDocs = atLeast(1000);
}
List docs = new ArrayList<>();
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexingSequenceNumbers.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexingSequenceNumbers.java
index 8d4c5c7903f..55aa6e036d8 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexingSequenceNumbers.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexingSequenceNumbers.java
@@ -72,6 +72,7 @@ public class TestIndexingSequenceNumbers extends LuceneTestCase {
dir.close();
}
+ @Slow
public void testStressUpdateSameID() throws Exception {
int iters = atLeast(100);
for(int iter=0;iter writerRef = new AtomicReference<>();
+ writerRef.set(new IndexWriter(dir, newIndexWriterConfig()));
+
+ AtomicReference mgrRef = new AtomicReference<>();
+ mgrRef.set(new SearcherManager(writerRef.get(), null));
+ final AtomicBoolean stop = new AtomicBoolean();
+
+ Thread indexThread = new Thread() {
+ @Override
+ public void run() {
+ try {
+ LineFileDocs docs = new LineFileDocs(random());
+ long runTimeSec = TEST_NIGHTLY ? atLeast(10) : atLeast(2);
+ long endTime = System.nanoTime() + runTimeSec * 1000000000;
+ while (System.nanoTime() < endTime) {
+ IndexWriter w = writerRef.get();
+ w.addDocument(docs.nextDoc());
+ if (random().nextInt(1000) == 17) {
+ if (random().nextBoolean()) {
+ w.close();
+ } else {
+ w.rollback();
+ }
+ writerRef.set(new IndexWriter(dir, newIndexWriterConfig()));
+ }
+ }
+ docs.close();
+ stop.set(true);
+ if (VERBOSE) {
+ System.out.println("TEST: index count=" + writerRef.get().maxDoc());
+ }
+ } catch (IOException ioe) {
+ throw new RuntimeException(ioe);
+ }
+ }
+ };
+
+ Thread searchThread = new Thread() {
+ @Override
+ public void run() {
+ try {
+ long totCount = 0;
+ while (stop.get() == false) {
+ SearcherManager mgr = mgrRef.get();
+ if (mgr != null) {
+ IndexSearcher searcher;
+ try {
+ searcher = mgr.acquire();
+ } catch (AlreadyClosedException ace) {
+ // ok
+ continue;
+ }
+ totCount += searcher.getIndexReader().maxDoc();
+ mgr.release(searcher);
+ }
+ }
+ if (VERBOSE) {
+ System.out.println("TEST: search totCount=" + totCount);
+ }
+ } catch (IOException ioe) {
+ throw new RuntimeException(ioe);
+ }
+ }
+ };
+
+ Thread refreshThread = new Thread() {
+ @Override
+ public void run() {
+ try {
+ int refreshCount = 0;
+ int aceCount = 0;
+ while (stop.get() == false) {
+ SearcherManager mgr = mgrRef.get();
+ if (mgr != null) {
+ refreshCount++;
+ try {
+ mgr.maybeRefreshBlocking();
+ } catch (AlreadyClosedException ace) {
+ // ok
+ aceCount++;
+ continue;
+ }
+ }
+ }
+ if (VERBOSE) {
+ System.out.println("TEST: refresh count=" + refreshCount + " aceCount=" + aceCount);
+ }
+ } catch (IOException ioe) {
+ throw new RuntimeException(ioe);
+ }
+ }
+ };
+
+ Thread closeThread = new Thread() {
+ @Override
+ public void run() {
+ try {
+ int closeCount = 0;
+ int aceCount = 0;
+ while (stop.get() == false) {
+ SearcherManager mgr = mgrRef.get();
+ assert mgr != null;
+ mgr.close();
+ closeCount++;
+ while (stop.get() == false) {
+ try {
+ mgrRef.set(new SearcherManager(writerRef.get(), null));
+ break;
+ } catch (AlreadyClosedException ace) {
+ // ok
+ aceCount++;
+ }
+ }
+ }
+ if (VERBOSE) {
+ System.out.println("TEST: close count=" + closeCount + " aceCount=" + aceCount);
+ }
+ } catch (IOException ioe) {
+ throw new RuntimeException(ioe);
+ }
+ }
+ };
+
+ indexThread.start();
+ searchThread.start();
+ refreshThread.start();
+ closeThread.start();
+
+ indexThread.join();
+ searchThread.join();
+ refreshThread.join();
+ closeThread.join();
+
+ mgrRef.get().close();
+ writerRef.get().close();
+ dir.close();
+ }
}
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSimpleExplanationsWithFillerDocs.java b/lucene/core/src/test/org/apache/lucene/search/TestSimpleExplanationsWithFillerDocs.java
index 0a79ae0f495..9f506688c52 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestSimpleExplanationsWithFillerDocs.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestSimpleExplanationsWithFillerDocs.java
@@ -23,6 +23,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.util.TestUtil;
+import org.apache.lucene.util.LuceneTestCase.Slow;
import org.junit.BeforeClass;
import org.junit.Assume;
@@ -34,6 +35,7 @@ import org.junit.Assume;
* all use terms from same set of source data as our regular docs (to emphasis the DocFreq factor in scoring),
* in which case the queries will be wrapped so they can be excluded.
*/
+@Slow // can this be sped up to be non-slow? filler docs make it quite a bit slower and many test methods...
public class TestSimpleExplanationsWithFillerDocs extends TestSimpleExplanations {
/** num of empty docs injected between every doc in the index */
diff --git a/lucene/core/src/test/org/apache/lucene/store/TestMmapDirectory.java b/lucene/core/src/test/org/apache/lucene/store/TestMmapDirectory.java
index 153cc5e6d30..098fd44519b 100644
--- a/lucene/core/src/test/org/apache/lucene/store/TestMmapDirectory.java
+++ b/lucene/core/src/test/org/apache/lucene/store/TestMmapDirectory.java
@@ -19,6 +19,10 @@ package org.apache.lucene.store;
import java.io.IOException;
import java.nio.file.Path;
+import java.util.Random;
+import java.util.concurrent.CountDownLatch;
+
+import org.junit.Ignore;
/**
* Tests MMapDirectory
@@ -39,4 +43,38 @@ public class TestMmapDirectory extends BaseDirectoryTestCase {
MMapDirectory.UNMAP_SUPPORTED);
}
+ @Ignore("This test is for JVM testing purposes. There are no guarantees that it may not fail with SIGSEGV!")
+ public void testAceWithThreads() throws Exception {
+ for (int iter = 0; iter < 10; iter++) {
+ Directory dir = getDirectory(createTempDir("testAceWithThreads"));
+ IndexOutput out = dir.createOutput("test", IOContext.DEFAULT);
+ Random random = random();
+ for (int i = 0; i < 8 * 1024 * 1024; i++) {
+ out.writeInt(random.nextInt());
+ }
+ out.close();
+ IndexInput in = dir.openInput("test", IOContext.DEFAULT);
+ IndexInput clone = in.clone();
+ final byte accum[] = new byte[32 * 1024 * 1024];
+ final CountDownLatch shotgun = new CountDownLatch(1);
+ Thread t1 = new Thread(() -> {
+ try {
+ shotgun.await();
+ for (int i = 0; i < 10; i++) {
+ clone.seek(0);
+ clone.readBytes(accum, 0, accum.length);
+ }
+ } catch (IOException | AlreadyClosedException ok) {
+ // OK
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ });
+ t1.start();
+ shotgun.countDown();
+ in.close();
+ t1.join();
+ dir.close();
+ }
+ }
}
diff --git a/lucene/join/build.xml b/lucene/join/build.xml
index b5360c4c7b9..b6878b809c6 100644
--- a/lucene/join/build.xml
+++ b/lucene/join/build.xml
@@ -26,6 +26,7 @@
+
@@ -34,13 +35,14 @@
-
+
-
+
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/DocValuesTermsCollector.java b/lucene/join/src/java/org/apache/lucene/search/join/DocValuesTermsCollector.java
index a9b11ed7008..4bb692a50da 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/DocValuesTermsCollector.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/DocValuesTermsCollector.java
@@ -19,8 +19,6 @@ package org.apache.lucene.search.join;
import java.io.IOException;
import java.util.function.LongConsumer;
-import org.apache.lucene.document.FieldType;
-import org.apache.lucene.document.FieldType.LegacyNumericType;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.LeafReader;
@@ -28,10 +26,11 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues;
+import org.apache.lucene.legacy.LegacyNumericType;
+import org.apache.lucene.legacy.LegacyNumericUtils;
import org.apache.lucene.search.SimpleCollector;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
-import org.apache.lucene.util.LegacyNumericUtils;
abstract class DocValuesTermsCollector extends SimpleCollector {
@@ -85,13 +84,13 @@ abstract class DocValuesTermsCollector extends SimpleCollector {
return (l) -> LegacyNumericUtils.longToPrefixCoded(l, 0, bytes);
default:
throw new IllegalArgumentException("Unsupported "+type+
- ". Only "+ LegacyNumericType.INT+" and "+ FieldType.LegacyNumericType.LONG+" are supported."
+ ". Only "+ LegacyNumericType.INT+" and "+ LegacyNumericType.LONG+" are supported."
+ "Field "+fieldName );
}
}
/** this adapter is quite weird. ords are per doc index, don't use ords across different docs*/
- static Function sortedNumericAsSortedSetDocValues(String field, FieldType.LegacyNumericType numTyp) {
+ static Function sortedNumericAsSortedSetDocValues(String field, LegacyNumericType numTyp) {
return (ctx) -> {
final SortedNumericDocValues numerics = DocValues.getSortedNumeric(ctx, field);
final BytesRefBuilder bytes = new BytesRefBuilder();
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/JoinUtil.java b/lucene/join/src/java/org/apache/lucene/search/join/JoinUtil.java
index b0133e570da..49423947383 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/JoinUtil.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/JoinUtil.java
@@ -26,7 +26,7 @@ import java.util.function.BiConsumer;
import java.util.function.LongFunction;
import org.apache.lucene.document.DoublePoint;
-import org.apache.lucene.document.FieldType.LegacyNumericType;
+import org.apache.lucene.legacy.LegacyNumericType;
import org.apache.lucene.document.FloatPoint;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.LongPoint;
@@ -123,8 +123,8 @@ public final class JoinUtil {
* @param multipleValuesPerDocument Whether the from field has multiple terms per document
* when true fromField might be {@link DocValuesType#SORTED_NUMERIC},
* otherwise fromField should be {@link DocValuesType#NUMERIC}
- * @param toField The to field to join to, should be {@link org.apache.lucene.document.LegacyIntField} or {@link org.apache.lucene.document.LegacyLongField}
- * @param numericType either {@link org.apache.lucene.document.FieldType.LegacyNumericType#INT} or {@link org.apache.lucene.document.FieldType.LegacyNumericType#LONG}, it should correspond to fromField and toField types
+ * @param toField The to field to join to, should be {@link org.apache.lucene.legacy.LegacyIntField} or {@link org.apache.lucene.legacy.LegacyLongField}
+ * @param numericType either {@link LegacyNumericType#INT} or {@link LegacyNumericType#LONG}, it should correspond to fromField and toField types
* @param fromQuery The query to match documents on the from side
* @param fromSearcher The searcher that executed the specified fromQuery
* @param scoreMode Instructs how scores from the fromQuery are mapped to the returned query
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/TermsIncludingScoreQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/TermsIncludingScoreQuery.java
index 3b03bd39c38..a39c25fcc93 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/TermsIncludingScoreQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/TermsIncludingScoreQuery.java
@@ -27,6 +27,7 @@ import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.legacy.LegacyNumericUtils;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
@@ -37,7 +38,6 @@ import org.apache.lucene.util.BitSetIterator;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefHash;
import org.apache.lucene.util.FixedBitSet;
-import org.apache.lucene.util.LegacyNumericUtils;
class TermsIncludingScoreQuery extends Query {
diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
index b29e9ff0e9e..6d9eb2ab339 100644
--- a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
+++ b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
@@ -37,12 +37,9 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleDocValuesField;
import org.apache.lucene.document.DoublePoint;
import org.apache.lucene.document.Field;
-import org.apache.lucene.document.FieldType.LegacyNumericType;
import org.apache.lucene.document.FloatDocValuesField;
import org.apache.lucene.document.FloatPoint;
import org.apache.lucene.document.IntPoint;
-import org.apache.lucene.document.LegacyIntField;
-import org.apache.lucene.document.LegacyLongField;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedDocValuesField;
@@ -59,6 +56,9 @@ import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.index.MultiDocValues.OrdinalMap;
+import org.apache.lucene.legacy.LegacyIntField;
+import org.apache.lucene.legacy.LegacyLongField;
+import org.apache.lucene.legacy.LegacyNumericType;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.NumericDocValues;
diff --git a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java
index 45e95510dc5..a7857207a39 100644
--- a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java
+++ b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java
@@ -45,7 +45,6 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.FloatPoint;
import org.apache.lucene.document.IntPoint;
-import org.apache.lucene.document.LegacyLongField;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedDocValuesField;
@@ -457,9 +456,6 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
Document doc = new Document();
long randomLong = random().nextLong();
doc.add(new NumericDocValuesField("numeric", randomLong));
- if (random().nextBoolean()) {
- doc.add(new LegacyLongField("numeric", randomLong, Field.Store.NO));
- }
int numValues = atLeast(5);
for (int i = 0; i < numValues; i++) {
randomLong = random().nextLong();
@@ -468,9 +464,6 @@ public class TestMemoryIndexAgainstRAMDir extends BaseTokenStreamTestCase {
// randomly duplicate field/value
doc.add(new SortedNumericDocValuesField("sorted_numeric", randomLong));
}
- if (random().nextBoolean()) {
- doc.add(new LegacyLongField("numeric", randomLong, Field.Store.NO));
- }
}
BytesRef randomTerm = new BytesRef(randomTerm());
doc.add(new BinaryDocValuesField("binary", randomTerm));
diff --git a/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java b/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
index da9fdc5dc5d..05a3b239959 100644
--- a/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
+++ b/lucene/misc/src/test/org/apache/lucene/search/TestDiversifiedTopDocsCollector.java
@@ -21,11 +21,10 @@ import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FloatDocValuesField;
-import org.apache.lucene.document.LegacyFloatField;
import org.apache.lucene.document.SortedDocValuesField;
+import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.FieldInvertState;
@@ -331,7 +330,7 @@ public class TestDiversifiedTopDocsCollector extends LuceneTestCase {
new BytesRef(""));
Field weeksAtNumberOneField = new FloatDocValuesField("weeksAtNumberOne",
0.0F);
- Field weeksStoredField = new LegacyFloatField("weeks", 0.0F, Store.YES);
+ Field weeksStoredField = new StoredField("weeks", 0.0F);
Field idField = newStringField("id", "", Field.Store.YES);
Field songField = newTextField("song", "", Field.Store.NO);
Field storedArtistField = newTextField("artistName", "", Field.Store.NO);
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/mlt/TestMoreLikeThis.java b/lucene/queries/src/test/org/apache/lucene/queries/mlt/TestMoreLikeThis.java
index 89722db075f..32a610bf8a9 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/mlt/TestMoreLikeThis.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/mlt/TestMoreLikeThis.java
@@ -265,6 +265,7 @@ public class TestMoreLikeThis extends LuceneTestCase {
return writer.numDocs() - 1;
}
+ @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/LUCENE-7161")
public void testMultiFieldShouldReturnPerFieldBooleanQuery() throws Exception {
IndexReader reader = null;
Directory dir = newDirectory();
diff --git a/lucene/queryparser/build.xml b/lucene/queryparser/build.xml
index b6e43c2ce26..f1d59a34a4c 100644
--- a/lucene/queryparser/build.xml
+++ b/lucene/queryparser/build.xml
@@ -25,15 +25,17 @@
+
-
+
-
+
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/LegacyNumericRangeQueryNodeBuilder.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/LegacyNumericRangeQueryNodeBuilder.java
index 8ae7d5e2e43..0781afb533c 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/LegacyNumericRangeQueryNodeBuilder.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/builders/LegacyNumericRangeQueryNodeBuilder.java
@@ -16,7 +16,8 @@
*/
package org.apache.lucene.queryparser.flexible.standard.builders;
-import org.apache.lucene.document.FieldType;
+import org.apache.lucene.legacy.LegacyNumericRangeQuery;
+import org.apache.lucene.legacy.LegacyNumericType;
import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages;
import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode;
@@ -25,12 +26,11 @@ import org.apache.lucene.queryparser.flexible.messages.MessageImpl;
import org.apache.lucene.queryparser.flexible.standard.config.LegacyNumericConfig;
import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericQueryNode;
import org.apache.lucene.queryparser.flexible.standard.nodes.LegacyNumericRangeQueryNode;
-import org.apache.lucene.search.LegacyNumericRangeQuery;
/**
- * Builds {@link org.apache.lucene.search.LegacyNumericRangeQuery}s out of {@link LegacyNumericRangeQueryNode}s.
+ * Builds {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}s out of {@link LegacyNumericRangeQueryNode}s.
*
- * @see org.apache.lucene.search.LegacyNumericRangeQuery
+ * @see org.apache.lucene.legacy.LegacyNumericRangeQuery
* @see LegacyNumericRangeQueryNode
* @deprecated Index with points and use {@link PointRangeQueryNodeBuilder} instead.
*/
@@ -56,7 +56,7 @@ public class LegacyNumericRangeQueryNodeBuilder implements StandardQueryBuilder
Number upperNumber = upperNumericNode.getValue();
LegacyNumericConfig numericConfig = numericRangeNode.getNumericConfig();
- FieldType.LegacyNumericType numberType = numericConfig.getType();
+ LegacyNumericType numberType = numericConfig.getType();
String field = StringUtils.toString(numericRangeNode.getField());
boolean minInclusive = numericRangeNode.isLowerInclusive();
boolean maxInclusive = numericRangeNode.isUpperInclusive();
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericConfig.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericConfig.java
index 6cd3c490e57..038023e65ae 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericConfig.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/config/LegacyNumericConfig.java
@@ -19,14 +19,13 @@ package org.apache.lucene.queryparser.flexible.standard.config;
import java.text.NumberFormat;
import java.util.Objects;
-import org.apache.lucene.document.FieldType;
-import org.apache.lucene.document.FieldType.LegacyNumericType;
+import org.apache.lucene.legacy.LegacyNumericType;
/**
* This class holds the configuration used to parse numeric queries and create
- * {@link org.apache.lucene.search.LegacyNumericRangeQuery}s.
+ * {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}s.
*
- * @see org.apache.lucene.search.LegacyNumericRangeQuery
+ * @see org.apache.lucene.legacy.LegacyNumericRangeQuery
* @see NumberFormat
* @deprecated Index with Points instead and use {@link PointsConfig}
*/
@@ -37,7 +36,7 @@ public class LegacyNumericConfig {
private NumberFormat format;
- private FieldType.LegacyNumericType type;
+ private LegacyNumericType type;
/**
* Constructs a {@link LegacyNumericConfig} object.
@@ -52,7 +51,7 @@ public class LegacyNumericConfig {
*
* @see LegacyNumericConfig#setPrecisionStep(int)
* @see LegacyNumericConfig#setNumberFormat(NumberFormat)
- * @see #setType(org.apache.lucene.document.FieldType.LegacyNumericType)
+ * @see #setType(LegacyNumericType)
*/
public LegacyNumericConfig(int precisionStep, NumberFormat format,
LegacyNumericType type) {
@@ -67,7 +66,7 @@ public class LegacyNumericConfig {
*
* @return the precision used to index the numeric values
*
- * @see org.apache.lucene.search.LegacyNumericRangeQuery#getPrecisionStep()
+ * @see org.apache.lucene.legacy.LegacyNumericRangeQuery#getPrecisionStep()
*/
public int getPrecisionStep() {
return precisionStep;
@@ -79,7 +78,7 @@ public class LegacyNumericConfig {
* @param precisionStep
* the precision used to index the numeric values
*
- * @see org.apache.lucene.search.LegacyNumericRangeQuery#getPrecisionStep()
+ * @see org.apache.lucene.legacy.LegacyNumericRangeQuery#getPrecisionStep()
*/
public void setPrecisionStep(int precisionStep) {
this.precisionStep = precisionStep;
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericRangeQueryNode.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericRangeQueryNode.java
index 27c285eb34c..20cde351ce7 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericRangeQueryNode.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/nodes/LegacyNumericRangeQueryNode.java
@@ -16,8 +16,7 @@
*/
package org.apache.lucene.queryparser.flexible.standard.nodes;
-import org.apache.lucene.document.FieldType;
-import org.apache.lucene.document.FieldType.LegacyNumericType;
+import org.apache.lucene.legacy.LegacyNumericType;
import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
import org.apache.lucene.queryparser.flexible.core.messages.QueryParserMessages;
import org.apache.lucene.queryparser.flexible.messages.MessageImpl;
@@ -57,13 +56,13 @@ public class LegacyNumericRangeQueryNode extends
private static LegacyNumericType getNumericDataType(Number number) throws QueryNodeException {
if (number instanceof Long) {
- return FieldType.LegacyNumericType.LONG;
+ return LegacyNumericType.LONG;
} else if (number instanceof Integer) {
- return FieldType.LegacyNumericType.INT;
+ return LegacyNumericType.INT;
} else if (number instanceof Double) {
return LegacyNumericType.DOUBLE;
} else if (number instanceof Float) {
- return FieldType.LegacyNumericType.FLOAT;
+ return LegacyNumericType.FLOAT;
} else {
throw new QueryNodeException(
new MessageImpl(
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/LegacyNumericRangeQueryBuilder.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/LegacyNumericRangeQueryBuilder.java
index f7aef3f477b..9f4505f056c 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/LegacyNumericRangeQueryBuilder.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/builders/LegacyNumericRangeQueryBuilder.java
@@ -16,19 +16,19 @@
*/
package org.apache.lucene.queryparser.xml.builders;
-import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.Query;
-import org.apache.lucene.util.LegacyNumericUtils;
+import org.apache.lucene.legacy.LegacyNumericRangeQuery;
+import org.apache.lucene.legacy.LegacyNumericUtils;
import org.apache.lucene.queryparser.xml.DOMUtils;
import org.apache.lucene.queryparser.xml.ParserException;
import org.apache.lucene.queryparser.xml.QueryBuilder;
import org.w3c.dom.Element;
/**
- * Creates a {@link org.apache.lucene.search.LegacyNumericRangeQuery}. The table below specifies the required
+ * Creates a {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}. The table below specifies the required
* attributes and the defaults if optional attributes are omitted. For more
* detail on what each of the attributes actually do, consult the documentation
- * for {@link org.apache.lucene.search.LegacyNumericRangeQuery}:
+ * for {@link org.apache.lucene.legacy.LegacyNumericRangeQuery}:
*
*
*
Attribute name
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestLegacyNumericQueryParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestLegacyNumericQueryParser.java
index c6ab7f5ffff..398923e299f 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestLegacyNumericQueryParser.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestLegacyNumericQueryParser.java
@@ -32,15 +32,15 @@ import java.util.TimeZone;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
-import org.apache.lucene.document.LegacyDoubleField;
import org.apache.lucene.document.Field;
-import org.apache.lucene.document.FieldType.LegacyNumericType;
-import org.apache.lucene.document.FieldType;
-import org.apache.lucene.document.LegacyFloatField;
-import org.apache.lucene.document.LegacyIntField;
-import org.apache.lucene.document.LegacyLongField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.legacy.LegacyDoubleField;
+import org.apache.lucene.legacy.LegacyFieldType;
+import org.apache.lucene.legacy.LegacyFloatField;
+import org.apache.lucene.legacy.LegacyIntField;
+import org.apache.lucene.legacy.LegacyLongField;
+import org.apache.lucene.legacy.LegacyNumericType;
import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
import org.apache.lucene.queryparser.flexible.core.parser.EscapeQuerySyntax;
import org.apache.lucene.queryparser.flexible.standard.config.NumberDateFormat;
@@ -179,7 +179,7 @@ public class TestLegacyNumericQueryParser extends LuceneTestCase {
;
randomNumberMap.put(LegacyNumericType.LONG.name(), randomLong);
- randomNumberMap.put(FieldType.LegacyNumericType.INT.name(), randomInt);
+ randomNumberMap.put(LegacyNumericType.INT.name(), randomInt);
randomNumberMap.put(LegacyNumericType.FLOAT.name(), randomFloat);
randomNumberMap.put(LegacyNumericType.DOUBLE.name(), randomDouble);
randomNumberMap.put(DATE_FIELD_NAME, randomDate);
@@ -201,7 +201,7 @@ public class TestLegacyNumericQueryParser extends LuceneTestCase {
numericConfigMap.put(type.name(), new LegacyNumericConfig(PRECISION_STEP,
NUMBER_FORMAT, type));
- FieldType ft = new FieldType(LegacyIntField.TYPE_NOT_STORED);
+ LegacyFieldType ft = new LegacyFieldType(LegacyIntField.TYPE_NOT_STORED);
ft.setNumericType(type);
ft.setStored(true);
ft.setNumericPrecisionStep(PRECISION_STEP);
@@ -231,7 +231,7 @@ public class TestLegacyNumericQueryParser extends LuceneTestCase {
numericConfigMap.put(DATE_FIELD_NAME, new LegacyNumericConfig(PRECISION_STEP,
DATE_FORMAT, LegacyNumericType.LONG));
- FieldType ft = new FieldType(LegacyLongField.TYPE_NOT_STORED);
+ LegacyFieldType ft = new LegacyFieldType(LegacyLongField.TYPE_NOT_STORED);
ft.setStored(true);
ft.setNumericPrecisionStep(PRECISION_STEP);
LegacyLongField dateField = new LegacyLongField(DATE_FIELD_NAME, 0l, ft);
@@ -268,10 +268,10 @@ public class TestLegacyNumericQueryParser extends LuceneTestCase {
|| DATE_FIELD_NAME.equals(fieldName)) {
number = -number.longValue();
- } else if (FieldType.LegacyNumericType.DOUBLE.name().equals(fieldName)) {
+ } else if (LegacyNumericType.DOUBLE.name().equals(fieldName)) {
number = -number.doubleValue();
- } else if (FieldType.LegacyNumericType.FLOAT.name().equals(fieldName)) {
+ } else if (LegacyNumericType.FLOAT.name().equals(fieldName)) {
number = -number.floatValue();
} else if (LegacyNumericType.INT.name().equals(fieldName)) {
@@ -299,16 +299,16 @@ public class TestLegacyNumericQueryParser extends LuceneTestCase {
numericFieldMap.get(LegacyNumericType.DOUBLE.name()).setDoubleValue(
number.doubleValue());
- number = getNumberType(numberType, FieldType.LegacyNumericType.INT.name());
- numericFieldMap.get(FieldType.LegacyNumericType.INT.name()).setIntValue(
+ number = getNumberType(numberType, LegacyNumericType.INT.name());
+ numericFieldMap.get(LegacyNumericType.INT.name()).setIntValue(
number.intValue());
number = getNumberType(numberType, LegacyNumericType.LONG.name());
- numericFieldMap.get(FieldType.LegacyNumericType.LONG.name()).setLongValue(
+ numericFieldMap.get(LegacyNumericType.LONG.name()).setLongValue(
number.longValue());
- number = getNumberType(numberType, FieldType.LegacyNumericType.FLOAT.name());
- numericFieldMap.get(FieldType.LegacyNumericType.FLOAT.name()).setFloatValue(
+ number = getNumberType(numberType, LegacyNumericType.FLOAT.name());
+ numericFieldMap.get(LegacyNumericType.FLOAT.name()).setFloatValue(
number.floatValue());
number = getNumberType(numberType, DATE_FIELD_NAME);
@@ -456,7 +456,7 @@ public class TestLegacyNumericQueryParser extends LuceneTestCase {
StringBuilder sb = new StringBuilder();
- for (LegacyNumericType type : FieldType.LegacyNumericType.values()) {
+ for (LegacyNumericType type : LegacyNumericType.values()) {
String boundStr = numberToString(getNumberType(boundType, type.name()));
sb.append("+").append(type.name()).append(operator).append('"').append(boundStr).append('"').append(' ');
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/CoreParserTestIndexData.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/CoreParserTestIndexData.java
index 71b627e74cd..4763005d985 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/CoreParserTestIndexData.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/CoreParserTestIndexData.java
@@ -20,10 +20,10 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.IntPoint;
-import org.apache.lucene.document.LegacyIntField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.legacy.LegacyIntField;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/builders/TestNumericRangeQueryBuilder.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/builders/TestNumericRangeQueryBuilder.java
index 8fc0641e4e7..0bc019595be 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/builders/TestNumericRangeQueryBuilder.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/builders/TestNumericRangeQueryBuilder.java
@@ -16,9 +16,9 @@
*/
package org.apache.lucene.queryparser.xml.builders;
-import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.legacy.LegacyNumericRangeQuery;
import org.apache.lucene.queryparser.xml.ParserException;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/FloatRangeField.java b/lucene/sandbox/src/java/org/apache/lucene/document/FloatRangeField.java
new file mode 100644
index 00000000000..e138ae2057d
--- /dev/null
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/FloatRangeField.java
@@ -0,0 +1,262 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.document;
+
+import org.apache.lucene.document.RangeFieldQuery.QueryType;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.NumericUtils;
+
+/**
+ * An indexed Float Range field.
+ *
+ * This field indexes dimensional ranges defined as min/max pairs. It supports
+ * up to a maximum of 4 dimensions (indexed as 8 numeric values). With 1 dimension representing a single float range,
+ * 2 dimensions representing a bounding box, 3 dimensions a bounding cube, and 4 dimensions a tesseract.
+ *
+ * Multiple values for the same field in one document is supported, and open ended ranges can be defined using
+ * {@code Float.NEGATIVE_INFINITY} and {@code Float.POSITIVE_INFINITY}.
+ *
+ *
+ * This field defines the following static factory methods for common search operations over float ranges:
+ *
+ *
{@link #newIntersectsQuery newIntersectsQuery()} matches ranges that intersect the defined search range.
+ *
{@link #newWithinQuery newWithinQuery()} matches ranges that are within the defined search range.
+ *
{@link #newContainsQuery newContainsQuery()} matches ranges that contain the defined search range.
+ *
+ */
+public class FloatRangeField extends Field {
+ /** stores float values so number of bytes is 4 */
+ public static final int BYTES = Float.BYTES;
+
+ /**
+ * Create a new FloatRangeField type, from min/max parallel arrays
+ *
+ * @param name field name. must not be null.
+ * @param min range min values; each entry is the min value for the dimension
+ * @param max range max values; each entry is the max value for the dimension
+ */
+ public FloatRangeField(String name, final float[] min, final float[] max) {
+ super(name, getType(min.length));
+ setRangeValues(min, max);
+ }
+
+ /** set the field type */
+ private static FieldType getType(int dimensions) {
+ if (dimensions > 4) {
+ throw new IllegalArgumentException("FloatRangeField does not support greater than 4 dimensions");
+ }
+
+ FieldType ft = new FieldType();
+ // dimensions is set as 2*dimension size (min/max per dimension)
+ ft.setDimensions(dimensions*2, BYTES);
+ ft.freeze();
+ return ft;
+ }
+
+ /**
+ * Changes the values of the field.
+ * @param min array of min values. (accepts {@code Float.NEGATIVE_INFINITY})
+ * @param max array of max values. (accepts {@code Float.POSITIVE_INFINITY})
+ * @throws IllegalArgumentException if {@code min} or {@code max} is invalid
+ */
+ public void setRangeValues(float[] min, float[] max) {
+ checkArgs(min, max);
+ if (min.length*2 != type.pointDimensionCount() || max.length*2 != type.pointDimensionCount()) {
+ throw new IllegalArgumentException("field (name=" + name + ") uses " + type.pointDimensionCount()/2
+ + " dimensions; cannot change to (incoming) " + min.length + " dimensions");
+ }
+
+ final byte[] bytes;
+ if (fieldsData == null) {
+ bytes = new byte[BYTES*2*min.length];
+ fieldsData = new BytesRef(bytes);
+ } else {
+ bytes = ((BytesRef)fieldsData).bytes;
+ }
+ verifyAndEncode(min, max, bytes);
+ }
+
+ /** validate the arguments */
+ private static void checkArgs(final float[] min, final float[] max) {
+ if (min == null || max == null || min.length == 0 || max.length == 0) {
+ throw new IllegalArgumentException("min/max range values cannot be null or empty");
+ }
+ if (min.length != max.length) {
+ throw new IllegalArgumentException("min/max ranges must agree");
+ }
+ if (min.length > 4) {
+ throw new IllegalArgumentException("FloatRangeField does not support greater than 4 dimensions");
+ }
+ }
+
+ /**
+ * Encodes the min, max ranges into a byte array
+ */
+ private static byte[] encode(float[] min, float[] max) {
+ checkArgs(min, max);
+ byte[] b = new byte[BYTES*2*min.length];
+ verifyAndEncode(min, max, b);
+ return b;
+ }
+
+ /**
+ * encode the ranges into a sortable byte array ({@code Float.NaN} not allowed)
+ *
+ * example for 4 dimensions (8 bytes per dimension value):
+ * minD1 ... minD4 | maxD1 ... maxD4
+ */
+ static void verifyAndEncode(float[] min, float[] max, byte[] bytes) {
+ for (int d=0,i=0,j=min.length*BYTES; d max[d]) {
+ throw new IllegalArgumentException("min value (" + min[d] + ") is greater than max value (" + max[d] + ")");
+ }
+ encode(min[d], bytes, i);
+ encode(max[d], bytes, j);
+ }
+ }
+
+ /** encode the given value into the byte array at the defined offset */
+ private static void encode(float val, byte[] bytes, int offset) {
+ NumericUtils.intToSortableBytes(NumericUtils.floatToSortableInt(val), bytes, offset);
+ }
+
+ /**
+ * Get the min value for the given dimension
+ * @param dimension the dimension, always positive
+ * @return the decoded min value
+ */
+ public float getMin(int dimension) {
+ if (dimension < 0 || dimension >= type.pointDimensionCount()/2) {
+ throw new IllegalArgumentException("dimension request (" + dimension +
+ ") out of bounds for field (name=" + name + " dimensions=" + type.pointDimensionCount()/2 + "). ");
+ }
+ return decodeMin(((BytesRef)fieldsData).bytes, dimension);
+ }
+
+ /**
+ * Get the max value for the given dimension
+ * @param dimension the dimension, always positive
+ * @return the decoded max value
+ */
+ public float getMax(int dimension) {
+ if (dimension < 0 || dimension >= type.pointDimensionCount()/2) {
+ throw new IllegalArgumentException("dimension request (" + dimension +
+ ") out of bounds for field (name=" + name + " dimensions=" + type.pointDimensionCount()/2 + "). ");
+ }
+ return decodeMax(((BytesRef)fieldsData).bytes, dimension);
+ }
+
+ /** decodes the min value (for the defined dimension) from the encoded input byte array */
+ static float decodeMin(byte[] b, int dimension) {
+ int offset = dimension*BYTES;
+ return NumericUtils.sortableIntToFloat(NumericUtils.sortableBytesToInt(b, offset));
+ }
+
+ /** decodes the max value (for the defined dimension) from the encoded input byte array */
+ static float decodeMax(byte[] b, int dimension) {
+ int offset = b.length/2 + dimension*BYTES;
+ return NumericUtils.sortableIntToFloat(NumericUtils.sortableBytesToInt(b, offset));
+ }
+
+ /**
+ * Create a query for matching indexed ranges that intersect the defined range.
+ * @param field field name. must not be null.
+ * @param min array of min values. (accepts {@code Float.NEGATIVE_INFINITY})
+ * @param max array of max values. (accepts {@code Float.MAX_VALUE})
+ * @return query for matching intersecting ranges (overlap, within, or contains)
+ * @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
+ */
+ public static Query newIntersectsQuery(String field, final float[] min, final float[] max) {
+ return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.INTERSECTS) {
+ @Override
+ protected String toString(byte[] ranges, int dimension) {
+ return FloatRangeField.toString(ranges, dimension);
+ }
+ };
+ }
+
+ /**
+ * Create a query for matching indexed float ranges that contain the defined range.
+ * @param field field name. must not be null.
+ * @param min array of min values. (accepts {@code Float.NEGATIVE_INFINITY})
+ * @param max array of max values. (accepts {@code Float.POSITIVE_INFINITY})
+ * @return query for matching ranges that contain the defined range
+ * @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
+ */
+ public static Query newContainsQuery(String field, final float[] min, final float[] max) {
+ return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.CONTAINS) {
+ @Override
+ protected String toString(byte[] ranges, int dimension) {
+ return FloatRangeField.toString(ranges, dimension);
+ }
+ };
+ }
+
+ /**
+ * Create a query for matching indexed ranges that are within the defined range.
+ * @param field field name. must not be null.
+ * @param min array of min values. (accepts {@code Float.NEGATIVE_INFINITY})
+ * @param max array of max values. (accepts {@code Float.POSITIVE_INFINITY})
+ * @return query for matching ranges within the defined range
+ * @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
+ */
+ public static Query newWithinQuery(String field, final float[] min, final float[] max) {
+ checkArgs(min, max);
+ return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.WITHIN) {
+ @Override
+ protected String toString(byte[] ranges, int dimension) {
+ return FloatRangeField.toString(ranges, dimension);
+ }
+ };
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append(getClass().getSimpleName());
+ sb.append(" <");
+ sb.append(name);
+ sb.append(':');
+ byte[] b = ((BytesRef)fieldsData).bytes;
+ toString(b, 0);
+ for (int d=1; d');
+
+ return sb.toString();
+ }
+
+ /**
+ * Returns the String representation for the range at the given dimension
+ * @param ranges the encoded ranges, never null
+ * @param dimension the dimension of interest
+ * @return The string representation for the range at the provided dimension
+ */
+ private static String toString(byte[] ranges, int dimension) {
+ return "[" + Float.toString(decodeMin(ranges, dimension)) + " : "
+ + Float.toString(decodeMax(ranges, dimension)) + "]";
+ }
+}
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/IntRangeField.java b/lucene/sandbox/src/java/org/apache/lucene/document/IntRangeField.java
new file mode 100644
index 00000000000..c0ce61d85e3
--- /dev/null
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/IntRangeField.java
@@ -0,0 +1,262 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.document;
+
+import org.apache.lucene.document.RangeFieldQuery.QueryType;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.NumericUtils;
+
+/**
+ * An indexed Integer Range field.
+ *
+ * This field indexes dimensional ranges defined as min/max pairs. It supports
+ * up to a maximum of 4 dimensions (indexed as 8 numeric values). With 1 dimension representing a single integer range,
+ * 2 dimensions representing a bounding box, 3 dimensions a bounding cube, and 4 dimensions a tesseract.
+ *
+ * Multiple values for the same field in one document is supported, and open ended ranges can be defined using
+ * {@code Integer.MIN_VALUE} and {@code Integer.MAX_VALUE}.
+ *
+ *
+ * This field defines the following static factory methods for common search operations over integer ranges:
+ *
+ *
{@link #newIntersectsQuery newIntersectsQuery()} matches ranges that intersect the defined search range.
+ *
{@link #newWithinQuery newWithinQuery()} matches ranges that are within the defined search range.
+ *
{@link #newContainsQuery newContainsQuery()} matches ranges that contain the defined search range.
+ *
+ */
+public class IntRangeField extends Field {
+ /** stores integer values so number of bytes is 4 */
+ public static final int BYTES = Integer.BYTES;
+
+ /**
+ * Create a new IntRangeField type, from min/max parallel arrays
+ *
+ * @param name field name. must not be null.
+ * @param min range min values; each entry is the min value for the dimension
+ * @param max range max values; each entry is the max value for the dimension
+ */
+ public IntRangeField(String name, final int[] min, final int[] max) {
+ super(name, getType(min.length));
+ setRangeValues(min, max);
+ }
+
+ /** set the field type */
+ private static FieldType getType(int dimensions) {
+ if (dimensions > 4) {
+ throw new IllegalArgumentException("IntRangeField does not support greater than 4 dimensions");
+ }
+
+ FieldType ft = new FieldType();
+ // dimensions is set as 2*dimension size (min/max per dimension)
+ ft.setDimensions(dimensions*2, BYTES);
+ ft.freeze();
+ return ft;
+ }
+
+ /**
+ * Changes the values of the field.
+ * @param min array of min values. (accepts {@code Integer.NEGATIVE_INFINITY})
+ * @param max array of max values. (accepts {@code Integer.POSITIVE_INFINITY})
+ * @throws IllegalArgumentException if {@code min} or {@code max} is invalid
+ */
+ public void setRangeValues(int[] min, int[] max) {
+ checkArgs(min, max);
+ if (min.length*2 != type.pointDimensionCount() || max.length*2 != type.pointDimensionCount()) {
+ throw new IllegalArgumentException("field (name=" + name + ") uses " + type.pointDimensionCount()/2
+ + " dimensions; cannot change to (incoming) " + min.length + " dimensions");
+ }
+
+ final byte[] bytes;
+ if (fieldsData == null) {
+ bytes = new byte[BYTES*2*min.length];
+ fieldsData = new BytesRef(bytes);
+ } else {
+ bytes = ((BytesRef)fieldsData).bytes;
+ }
+ verifyAndEncode(min, max, bytes);
+ }
+
+ /** validate the arguments */
+ private static void checkArgs(final int[] min, final int[] max) {
+ if (min == null || max == null || min.length == 0 || max.length == 0) {
+ throw new IllegalArgumentException("min/max range values cannot be null or empty");
+ }
+ if (min.length != max.length) {
+ throw new IllegalArgumentException("min/max ranges must agree");
+ }
+ if (min.length > 4) {
+ throw new IllegalArgumentException("IntRangeField does not support greater than 4 dimensions");
+ }
+ }
+
+ /**
+ * Encodes the min, max ranges into a byte array
+ */
+ private static byte[] encode(int[] min, int[] max) {
+ checkArgs(min, max);
+ byte[] b = new byte[BYTES*2*min.length];
+ verifyAndEncode(min, max, b);
+ return b;
+ }
+
+ /**
+ * encode the ranges into a sortable byte array ({@code Double.NaN} not allowed)
+ *
+ * example for 4 dimensions (8 bytes per dimension value):
+ * minD1 ... minD4 | maxD1 ... maxD4
+ */
+ static void verifyAndEncode(int[] min, int[] max, byte[] bytes) {
+ for (int d=0,i=0,j=min.length*BYTES; d max[d]) {
+ throw new IllegalArgumentException("min value (" + min[d] + ") is greater than max value (" + max[d] + ")");
+ }
+ encode(min[d], bytes, i);
+ encode(max[d], bytes, j);
+ }
+ }
+
+ /** encode the given value into the byte array at the defined offset */
+ private static void encode(int val, byte[] bytes, int offset) {
+ NumericUtils.intToSortableBytes(val, bytes, offset);
+ }
+
+ /**
+ * Get the min value for the given dimension
+ * @param dimension the dimension, always positive
+ * @return the decoded min value
+ */
+ public int getMin(int dimension) {
+ if (dimension < 0 || dimension >= type.pointDimensionCount()/2) {
+ throw new IllegalArgumentException("dimension request (" + dimension +
+ ") out of bounds for field (name=" + name + " dimensions=" + type.pointDimensionCount()/2 + "). ");
+ }
+ return decodeMin(((BytesRef)fieldsData).bytes, dimension);
+ }
+
+ /**
+ * Get the max value for the given dimension
+ * @param dimension the dimension, always positive
+ * @return the decoded max value
+ */
+ public int getMax(int dimension) {
+ if (dimension < 0 || dimension >= type.pointDimensionCount()/2) {
+ throw new IllegalArgumentException("dimension request (" + dimension +
+ ") out of bounds for field (name=" + name + " dimensions=" + type.pointDimensionCount()/2 + "). ");
+ }
+ return decodeMax(((BytesRef)fieldsData).bytes, dimension);
+ }
+
+ /** decodes the min value (for the defined dimension) from the encoded input byte array */
+ static int decodeMin(byte[] b, int dimension) {
+ int offset = dimension*BYTES;
+ return NumericUtils.sortableBytesToInt(b, offset);
+ }
+
+ /** decodes the max value (for the defined dimension) from the encoded input byte array */
+ static int decodeMax(byte[] b, int dimension) {
+ int offset = b.length/2 + dimension*BYTES;
+ return NumericUtils.sortableBytesToInt(b, offset);
+ }
+
+ /**
+ * Create a query for matching indexed ranges that intersect the defined range.
+ * @param field field name. must not be null.
+ * @param min array of min values. (accepts {@code Integer.MIN_VALUE})
+ * @param max array of max values. (accepts {@code Integer.MAX_VALUE})
+ * @return query for matching intersecting ranges (overlap, within, or contains)
+ * @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
+ */
+ public static Query newIntersectsQuery(String field, final int[] min, final int[] max) {
+ return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.INTERSECTS) {
+ @Override
+ protected String toString(byte[] ranges, int dimension) {
+ return IntRangeField.toString(ranges, dimension);
+ }
+ };
+ }
+
+ /**
+ * Create a query for matching indexed ranges that contain the defined range.
+ * @param field field name. must not be null.
+ * @param min array of min values. (accepts {@code Integer.MIN_VALUE})
+ * @param max array of max values. (accepts {@code Integer.MAX_VALUE})
+ * @return query for matching ranges that contain the defined range
+ * @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
+ */
+ public static Query newContainsQuery(String field, final int[] min, final int[] max) {
+ return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.CONTAINS) {
+ @Override
+ protected String toString(byte[] ranges, int dimension) {
+ return IntRangeField.toString(ranges, dimension);
+ }
+ };
+ }
+
+ /**
+ * Create a query for matching indexed ranges that are within the defined range.
+ * @param field field name. must not be null.
+ * @param min array of min values. (accepts {@code Integer.MIN_VALUE})
+ * @param max array of max values. (accepts {@code Integer.MAX_VALUE})
+ * @return query for matching ranges within the defined range
+ * @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
+ */
+ public static Query newWithinQuery(String field, final int[] min, final int[] max) {
+ checkArgs(min, max);
+ return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.WITHIN) {
+ @Override
+ protected String toString(byte[] ranges, int dimension) {
+ return IntRangeField.toString(ranges, dimension);
+ }
+ };
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append(getClass().getSimpleName());
+ sb.append(" <");
+ sb.append(name);
+ sb.append(':');
+ byte[] b = ((BytesRef)fieldsData).bytes;
+ toString(b, 0);
+ for (int d=1; d');
+
+ return sb.toString();
+ }
+
+ /**
+ * Returns the String representation for the range at the given dimension
+ * @param ranges the encoded ranges, never null
+ * @param dimension the dimension of interest
+ * @return The string representation for the range at the provided dimension
+ */
+ private static String toString(byte[] ranges, int dimension) {
+ return "[" + Integer.toString(decodeMin(ranges, dimension)) + " : "
+ + Integer.toString(decodeMax(ranges, dimension)) + "]";
+ }
+}
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LongRangeField.java b/lucene/sandbox/src/java/org/apache/lucene/document/LongRangeField.java
new file mode 100644
index 00000000000..b9298b9d8d3
--- /dev/null
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/LongRangeField.java
@@ -0,0 +1,260 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.document;
+
+import org.apache.lucene.document.RangeFieldQuery.QueryType;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.NumericUtils;
+
+/**
+ * An indexed Long Range field.
+ *
+ * This field indexes dimensional ranges defined as min/max pairs. It supports
+ * up to a maximum of 4 dimensions (indexed as 8 numeric values). With 1 dimension representing a single long range,
+ * 2 dimensions representing a bounding box, 3 dimensions a bounding cube, and 4 dimensions a tesseract.
+ *
+ * Multiple values for the same field in one document is supported, and open ended ranges can be defined using
+ * {@code Long.MIN_VALUE} and {@code Long.MAX_VALUE}.
+ *
+ *
+ * This field defines the following static factory methods for common search operations over long ranges:
+ *
+ *
{@link #newIntersectsQuery newIntersectsQuery()} matches ranges that intersect the defined search range.
+ *
{@link #newWithinQuery newWithinQuery()} matches ranges that are within the defined search range.
+ *
{@link #newContainsQuery newContainsQuery()} matches ranges that contain the defined search range.
+ *
+ */
+public class LongRangeField extends Field {
+ /** stores long values so number of bytes is 8 */
+ public static final int BYTES = Long.BYTES;
+
+ /**
+ * Create a new LongRangeField type, from min/max parallel arrays
+ *
+ * @param name field name. must not be null.
+ * @param min range min values; each entry is the min value for the dimension
+ * @param max range max values; each entry is the max value for the dimension
+ */
+ public LongRangeField(String name, final long[] min, final long[] max) {
+ super(name, getType(min.length));
+ setRangeValues(min, max);
+ }
+
+ /** set the field type */
+ private static FieldType getType(int dimensions) {
+ if (dimensions > 4) {
+ throw new IllegalArgumentException("LongRangeField does not support greater than 4 dimensions");
+ }
+
+ FieldType ft = new FieldType();
+ // dimensions is set as 2*dimension size (min/max per dimension)
+ ft.setDimensions(dimensions*2, BYTES);
+ ft.freeze();
+ return ft;
+ }
+
+ /**
+ * Changes the values of the field.
+ * @param min array of min values. (accepts {@code Long.MIN_VALUE})
+ * @param max array of max values. (accepts {@code Long.MAX_VALUE})
+ * @throws IllegalArgumentException if {@code min} or {@code max} is invalid
+ */
+ public void setRangeValues(long[] min, long[] max) {
+ checkArgs(min, max);
+ if (min.length*2 != type.pointDimensionCount() || max.length*2 != type.pointDimensionCount()) {
+ throw new IllegalArgumentException("field (name=" + name + ") uses " + type.pointDimensionCount()/2
+ + " dimensions; cannot change to (incoming) " + min.length + " dimensions");
+ }
+
+ final byte[] bytes;
+ if (fieldsData == null) {
+ bytes = new byte[BYTES*2*min.length];
+ fieldsData = new BytesRef(bytes);
+ } else {
+ bytes = ((BytesRef)fieldsData).bytes;
+ }
+ verifyAndEncode(min, max, bytes);
+ }
+
+ /** validate the arguments */
+ private static void checkArgs(final long[] min, final long[] max) {
+ if (min == null || max == null || min.length == 0 || max.length == 0) {
+ throw new IllegalArgumentException("min/max range values cannot be null or empty");
+ }
+ if (min.length != max.length) {
+ throw new IllegalArgumentException("min/max ranges must agree");
+ }
+ if (min.length > 4) {
+ throw new IllegalArgumentException("LongRangeField does not support greater than 4 dimensions");
+ }
+ }
+
+ /** Encodes the min, max ranges into a byte array */
+ private static byte[] encode(long[] min, long[] max) {
+ checkArgs(min, max);
+ byte[] b = new byte[BYTES*2*min.length];
+ verifyAndEncode(min, max, b);
+ return b;
+ }
+
+ /**
+ * encode the ranges into a sortable byte array ({@code Double.NaN} not allowed)
+ *
+ * example for 4 dimensions (8 bytes per dimension value):
+ * minD1 ... minD4 | maxD1 ... maxD4
+ */
+ static void verifyAndEncode(long[] min, long[] max, byte[] bytes) {
+ for (int d=0,i=0,j=min.length*BYTES; d max[d]) {
+ throw new IllegalArgumentException("min value (" + min[d] + ") is greater than max value (" + max[d] + ")");
+ }
+ encode(min[d], bytes, i);
+ encode(max[d], bytes, j);
+ }
+ }
+
+ /** encode the given value into the byte array at the defined offset */
+ private static void encode(long val, byte[] bytes, int offset) {
+ NumericUtils.longToSortableBytes(val, bytes, offset);
+ }
+
+ /**
+ * Get the min value for the given dimension
+ * @param dimension the dimension, always positive
+ * @return the decoded min value
+ */
+ public long getMin(int dimension) {
+ if (dimension < 0 || dimension >= type.pointDimensionCount()/2) {
+ throw new IllegalArgumentException("dimension request (" + dimension +
+ ") out of bounds for field (name=" + name + " dimensions=" + type.pointDimensionCount()/2 + "). ");
+ }
+ return decodeMin(((BytesRef)fieldsData).bytes, dimension);
+ }
+
+ /**
+ * Get the max value for the given dimension
+ * @param dimension the dimension, always positive
+ * @return the decoded max value
+ */
+ public long getMax(int dimension) {
+ if (dimension < 0 || dimension >= type.pointDimensionCount()/2) {
+ throw new IllegalArgumentException("dimension request (" + dimension +
+ ") out of bounds for field (name=" + name + " dimensions=" + type.pointDimensionCount()/2 + "). ");
+ }
+ return decodeMax(((BytesRef)fieldsData).bytes, dimension);
+ }
+
+ /** decodes the min value (for the defined dimension) from the encoded input byte array */
+ static long decodeMin(byte[] b, int dimension) {
+ int offset = dimension*BYTES;
+ return NumericUtils.sortableBytesToLong(b, offset);
+ }
+
+ /** decodes the max value (for the defined dimension) from the encoded input byte array */
+ static long decodeMax(byte[] b, int dimension) {
+ int offset = b.length/2 + dimension*BYTES;
+ return NumericUtils.sortableBytesToLong(b, offset);
+ }
+
+ /**
+ * Create a query for matching indexed ranges that intersect the defined range.
+ * @param field field name. must not be null.
+ * @param min array of min values. (accepts {@code Long.MIN_VALUE})
+ * @param max array of max values. (accepts {@code Long.MAX_VALUE})
+ * @return query for matching intersecting ranges (overlap, within, or contains)
+ * @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
+ */
+ public static Query newIntersectsQuery(String field, final long[] min, final long[] max) {
+ return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.INTERSECTS) {
+ @Override
+ protected String toString(byte[] ranges, int dimension) {
+ return LongRangeField.toString(ranges, dimension);
+ }
+ };
+ }
+
+ /**
+ * Create a query for matching indexed ranges that contain the defined range.
+ * @param field field name. must not be null.
+ * @param min array of min values. (accepts {@code Long.MIN_VALUE})
+ * @param max array of max values. (accepts {@code Long.MAX_VALUE})
+ * @return query for matching ranges that contain the defined range
+ * @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
+ */
+ public static Query newContainsQuery(String field, final long[] min, final long[] max) {
+ return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.CONTAINS) {
+ @Override
+ protected String toString(byte[] ranges, int dimension) {
+ return LongRangeField.toString(ranges, dimension);
+ }
+ };
+ }
+
+ /**
+ * Create a query for matching indexed ranges that are within the defined range.
+ * @param field field name. must not be null.
+ * @param min array of min values. (accepts {@code Long.MIN_VALUE})
+ * @param max array of max values. (accepts {@code Long.MAX_VALUE})
+ * @return query for matching ranges within the defined range
+ * @throws IllegalArgumentException if {@code field} is null, {@code min} or {@code max} is invalid
+ */
+ public static Query newWithinQuery(String field, final long[] min, final long[] max) {
+ checkArgs(min, max);
+ return new RangeFieldQuery(field, encode(min, max), min.length, QueryType.WITHIN) {
+ @Override
+ protected String toString(byte[] ranges, int dimension) {
+ return LongRangeField.toString(ranges, dimension);
+ }
+ };
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append(getClass().getSimpleName());
+ sb.append(" <");
+ sb.append(name);
+ sb.append(':');
+ byte[] b = ((BytesRef)fieldsData).bytes;
+ toString(b, 0);
+ for (int d=1; d');
+
+ return sb.toString();
+ }
+
+ /**
+ * Returns the String representation for the range at the given dimension
+ * @param ranges the encoded ranges, never null
+ * @param dimension the dimension of interest
+ * @return The string representation for the range at the provided dimension
+ */
+ private static String toString(byte[] ranges, int dimension) {
+ return "[" + Long.toString(decodeMin(ranges, dimension)) + " : "
+ + Long.toString(decodeMax(ranges, dimension)) + "]";
+ }
+}
diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/BaseRangeFieldQueryTestCase.java b/lucene/sandbox/src/test/org/apache/lucene/search/BaseRangeFieldQueryTestCase.java
index d9cb830c120..9d293305c70 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/search/BaseRangeFieldQueryTestCase.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/search/BaseRangeFieldQueryTestCase.java
@@ -17,7 +17,6 @@
package org.apache.lucene.search;
import java.io.IOException;
-import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
@@ -41,16 +40,18 @@ import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
/**
- * Abstract class to do basic tests for a RangeField query.
+ * Abstract class to do basic tests for a RangeField query. Testing rigor inspired by {@code BaseGeoPointTestCase}
*/
public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
- protected abstract Field newRangeField(double[] min, double[] max);
+ protected abstract Field newRangeField(Range box);
- protected abstract Query newIntersectsQuery(double[] min, double[] max);
+ protected abstract Query newIntersectsQuery(Range box);
- protected abstract Query newContainsQuery(double[] min, double[] max);
+ protected abstract Query newContainsQuery(Range box);
- protected abstract Query newWithinQuery(double[] min, double[] max);
+ protected abstract Query newWithinQuery(Range box);
+
+ protected abstract Range nextRange(int dimensions);
protected int dimension() {
return random().nextInt(4) + 1;
@@ -82,18 +83,18 @@ public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
System.out.println("TEST: numDocs=" + numDocs);
}
- Box[][] boxes = new Box[numDocs][];
+ Range[][] ranges = new Range[numDocs][];
boolean haveRealDoc = true;
nextdoc: for (int id=0; id 0 && x < 9 && haveRealDoc) {
int oldID;
int i=0;
- // don't step on missing boxes:
+ // don't step on missing ranges:
while (true) {
oldID = random().nextInt(id);
- if (Double.isNaN(boxes[oldID][0].min[0]) == false) {
+ if (ranges[oldID][0].isMissing == false) {
break;
} else if (++i > id) {
continue nextdoc;
@@ -125,11 +126,11 @@ public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
if (x == dimensions*2) {
// Fully identical box (use first box in case current is multivalued but old is not)
for (int d=0; d 50000) {
+ if (ranges.length > 50000) {
dir = newFSDirectory(createTempDir(getClass().getSimpleName()));
} else {
dir = newDirectory();
@@ -173,13 +174,13 @@ public abstract class BaseRangeFieldQueryTestCase extends LuceneTestCase {
Set deleted = new HashSet<>();
IndexWriter w = new IndexWriter(dir, iwc);
- for (int id=0; id < boxes.length; ++id) {
+ for (int id=0; id < ranges.length; ++id) {
Document doc = new Document();
doc.add(newStringField("id", ""+id, Field.Store.NO));
doc.add(new NumericDocValuesField("id", id));
- if (Double.isNaN(boxes[id][0].min[0]) == false) {
- for (int n=0; n 1 ? " (MultiValue) " : " ") + "should match but did not\n");
+ b.append("id=" + id + (ranges[id].length > 1 ? " (MultiValue) " : " ") + "should match but did not\n");
} else {
b.append("id=" + id + " should not match but did\n");
}
- b.append(" queryBox=" + queryBox + "\n");
- b.append(" box" + ((boxes[id].length > 1) ? "es=" : "=" ) + boxes[id][0]);
- for (int n=1; n 1) ? "es=" : "=" ) + ranges[id][0]);
+ for (int n=1; n 0 && max.length > 0
- : "test box: min/max cannot be null or empty";
- assert min.length == max.length : "test box: min/max length do not agree";
- this.min = new double[min.length];
- this.max = new double[max.length];
- for (int d=0; d other.max[d] || this.max[d] < other.min[d]) {
- // disjoint:
- return null;
- }
- }
-
- // check within
- boolean within = true;
- for (int d=0; d= other.min[d] && this.max[d] <= other.max[d]) == false) {
- // not within:
- within = false;
- break;
- }
- }
- if (within == true) {
+ protected QueryType relate(Range other) {
+ if (isDisjoint(other)) {
+ // if disjoint; return null:
+ return null;
+ } else if (isWithin(other)) {
return QueryType.WITHIN;
- }
-
- // check contains
- boolean contains = true;
- for (int d=0; d= other.max[d]) == false) {
- // not contains:
- contains = false;
- break;
- }
- }
- if (contains == true) {
+ } else if (contains(other)) {
return QueryType.CONTAINS;
}
return QueryType.INTERSECTS;
}
-
- @Override
- public String toString() {
- StringBuilder b = new StringBuilder();
- b.append("Box(");
- b.append(min[0]);
- b.append(" TO ");
- b.append(max[0]);
- for (int d=1; d 0 && max.length > 0
+ : "test box: min/max cannot be null or empty";
+ assert min.length == max.length : "test box: min/max length do not agree";
+ this.min = new double[min.length];
+ this.max = new double[max.length];
+ for (int d=0; d max[d]) {
+ // swap if max < min:
+ double temp = min[d];
+ min[d] = max[d];
+ max[d] = temp;
+ }
+ }
+ }
+
+ @Override
+ protected int numDimensions() {
+ return min.length;
+ }
+
+ @Override
+ protected Double getMin(int dim) {
+ return min[dim];
+ }
+
+ @Override
+ protected void setMin(int dim, Object val) {
+ min[dim] = (Double)val;
+ }
+
+ @Override
+ protected Double getMax(int dim) {
+ return max[dim];
+ }
+
+ @Override
+ protected void setMax(int dim, Object val) {
+ max[dim] = (Double)val;
+ }
+
+ @Override
+ protected boolean isEqual(Range other) {
+ DoubleRange o = (DoubleRange)other;
+ return Arrays.equals(min, o.min) && Arrays.equals(max, o.max);
+ }
+
+ @Override
+ protected boolean isDisjoint(Range o) {
+ DoubleRange other = (DoubleRange)o;
+ for (int d=0; d other.max[d] || this.max[d] < other.min[d]) {
+ // disjoint:
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @Override
+ protected boolean isWithin(Range o) {
+ DoubleRange other = (DoubleRange)o;
+ for (int d=0; d= other.min[d] && this.max[d] <= other.max[d]) == false) {
+ // not within:
+ return false;
+ }
+ }
+ return true;
+ }
+
+ @Override
+ protected boolean contains(Range o) {
+ DoubleRange other = (DoubleRange) o;
+ for (int d=0; d= other.max[d]) == false) {
+ // not contains:
+ return false;
+ }
+ }
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder b = new StringBuilder();
+ b.append("Box(");
+ b.append(min[0]);
+ b.append(" TO ");
+ b.append(max[0]);
+ for (int d=1; d 0 && max.length > 0
+ : "test box: min/max cannot be null or empty";
+ assert min.length == max.length : "test box: min/max length do not agree";
+ this.min = new float[min.length];
+ this.max = new float[max.length];
+ for (int d=0; d max[d]) {
+ // swap if max < min:
+ float temp = min[d];
+ min[d] = max[d];
+ max[d] = temp;
+ }
+ }
+ }
+
+ @Override
+ protected int numDimensions() {
+ return min.length;
+ }
+
+ @Override
+ protected Float getMin(int dim) {
+ return min[dim];
+ }
+
+ @Override
+ protected void setMin(int dim, Object val) {
+ min[dim] = (Float)val;
+ }
+
+ @Override
+ protected Float getMax(int dim) {
+ return max[dim];
+ }
+
+ @Override
+ protected void setMax(int dim, Object val) {
+ max[dim] = (Float)val;
+ }
+
+ @Override
+ protected boolean isEqual(Range other) {
+ FloatRange o = (FloatRange)other;
+ return Arrays.equals(min, o.min) && Arrays.equals(max, o.max);
+ }
+
+ @Override
+ protected boolean isDisjoint(Range o) {
+ FloatRange other = (FloatRange)o;
+ for (int d=0; d other.max[d] || this.max[d] < other.min[d]) {
+ // disjoint:
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @Override
+ protected boolean isWithin(Range o) {
+ FloatRange other = (FloatRange)o;
+ for (int d=0; d= other.min[d] && this.max[d] <= other.max[d]) == false) {
+ // not within:
+ return false;
+ }
+ }
+ return true;
+ }
+
+ @Override
+ protected boolean contains(Range o) {
+ FloatRange other = (FloatRange) o;
+ for (int d=0; d= other.max[d]) == false) {
+ // not contains:
+ return false;
+ }
+ }
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder b = new StringBuilder();
+ b.append("Box(");
+ b.append(min[0]);
+ b.append(" TO ");
+ b.append(max[0]);
+ for (int d=1; d 0 && max.length > 0
+ : "test box: min/max cannot be null or empty";
+ assert min.length == max.length : "test box: min/max length do not agree";
+ this.min = new int[min.length];
+ this.max = new int[max.length];
+ for (int d=0; d max[d]) {
+ // swap if max < min:
+ int temp = min[d];
+ min[d] = max[d];
+ max[d] = temp;
+ }
+ }
+ }
+
+ @Override
+ protected int numDimensions() {
+ return min.length;
+ }
+
+ @Override
+ protected Integer getMin(int dim) {
+ return min[dim];
+ }
+
+ @Override
+ protected void setMin(int dim, Object val) {
+ min[dim] = (Integer)val;
+ }
+
+ @Override
+ protected Integer getMax(int dim) {
+ return max[dim];
+ }
+
+ @Override
+ protected void setMax(int dim, Object val) {
+ max[dim] = (Integer)val;
+ }
+
+ @Override
+ protected boolean isEqual(Range other) {
+ IntRange o = (IntRange)other;
+ return Arrays.equals(min, o.min) && Arrays.equals(max, o.max);
+ }
+
+ @Override
+ protected boolean isDisjoint(Range o) {
+ IntRange other = (IntRange)o;
+ for (int d=0; d other.max[d] || this.max[d] < other.min[d]) {
+ // disjoint:
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @Override
+ protected boolean isWithin(Range o) {
+ IntRange other = (IntRange)o;
+ for (int d=0; d= other.min[d] && this.max[d] <= other.max[d]) == false) {
+ // not within:
+ return false;
+ }
+ }
+ return true;
+ }
+
+ @Override
+ protected boolean contains(Range o) {
+ IntRange other = (IntRange) o;
+ for (int d=0; d= other.max[d]) == false) {
+ // not contains:
+ return false;
+ }
+ }
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder b = new StringBuilder();
+ b.append("Box(");
+ b.append(min[0]);
+ b.append(" TO ");
+ b.append(max[0]);
+ for (int d=1; d 0 && max.length > 0
+ : "test box: min/max cannot be null or empty";
+ assert min.length == max.length : "test box: min/max length do not agree";
+ this.min = new long[min.length];
+ this.max = new long[max.length];
+ for (int d=0; d max[d]) {
+ // swap if max < min:
+ long temp = min[d];
+ min[d] = max[d];
+ max[d] = temp;
+ }
+ }
+ }
+
+ @Override
+ protected int numDimensions() {
+ return min.length;
+ }
+
+ @Override
+ protected Long getMin(int dim) {
+ return min[dim];
+ }
+
+ @Override
+ protected void setMin(int dim, Object val) {
+ min[dim] = (Long)val;
+ }
+
+ @Override
+ protected Long getMax(int dim) {
+ return max[dim];
+ }
+
+ @Override
+ protected void setMax(int dim, Object val) {
+ max[dim] = (Long)val;
+ }
+
+ @Override
+ protected boolean isEqual(Range other) {
+ LongRange o = (LongRange)other;
+ return Arrays.equals(min, o.min) && Arrays.equals(max, o.max);
+ }
+
+ @Override
+ protected boolean isDisjoint(Range o) {
+ LongRange other = (LongRange)o;
+ for (int d=0; d other.max[d] || this.max[d] < other.min[d]) {
+ // disjoint:
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @Override
+ protected boolean isWithin(Range o) {
+ LongRange other = (LongRange)o;
+ for (int d=0; d= other.min[d] && this.max[d] <= other.max[d]) == false) {
+ // not within:
+ return false;
+ }
+ }
+ return true;
+ }
+
+ @Override
+ protected boolean contains(Range o) {
+ LongRange other = (LongRange) o;
+ for (int d=0; d= other.max[d]) == false) {
+ // not contains:
+ return false;
+ }
+ }
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder b = new StringBuilder();
+ b.append("Box(");
+ b.append(min[0]);
+ b.append(" TO ");
+ b.append(max[0]);
+ for (int d=1; d
+
@@ -42,16 +43,17 @@
-
+
-
+
-
\ No newline at end of file
+
diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxStrategy.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxStrategy.java
index 63a113839b5..90e36d835db 100644
--- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxStrategy.java
+++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/bbox/BBoxStrategy.java
@@ -20,17 +20,20 @@ import org.apache.lucene.document.DoubleDocValuesField;
import org.apache.lucene.document.DoublePoint;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
-import org.apache.lucene.document.LegacyDoubleField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Term;
+import org.apache.lucene.legacy.LegacyDoubleField;
+import org.apache.lucene.legacy.LegacyFieldType;
+import org.apache.lucene.legacy.LegacyNumericRangeQuery;
+import org.apache.lucene.legacy.LegacyNumericType;
+import org.apache.lucene.legacy.LegacyNumericUtils;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
-import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.spatial.SpatialStrategy;
@@ -39,7 +42,6 @@ import org.apache.lucene.spatial.query.SpatialOperation;
import org.apache.lucene.spatial.query.UnsupportedSpatialOperation;
import org.apache.lucene.spatial.util.DistanceToShapeValueSource;
import org.apache.lucene.util.BytesRefBuilder;
-import org.apache.lucene.util.LegacyNumericUtils;
import org.apache.lucene.util.NumericUtils;
import org.locationtech.spatial4j.context.SpatialContext;
import org.locationtech.spatial4j.shape.Point;
@@ -87,7 +89,7 @@ public class BBoxStrategy extends SpatialStrategy {
public static FieldType DEFAULT_FIELDTYPE;
@Deprecated
- public static FieldType LEGACY_FIELDTYPE;
+ public static LegacyFieldType LEGACY_FIELDTYPE;
static {
// Default: pointValues + docValues
FieldType type = new FieldType();
@@ -97,14 +99,14 @@ public class BBoxStrategy extends SpatialStrategy {
type.freeze();
DEFAULT_FIELDTYPE = type;
// Legacy default: legacyNumerics + docValues
- type = new FieldType();
- type.setIndexOptions(IndexOptions.DOCS);
- type.setNumericType(FieldType.LegacyNumericType.DOUBLE);
- type.setNumericPrecisionStep(8);// same as solr default
- type.setDocValuesType(DocValuesType.NUMERIC);//docValues
- type.setStored(false);
- type.freeze();
- LEGACY_FIELDTYPE = type;
+ LegacyFieldType legacyType = new LegacyFieldType();
+ legacyType.setIndexOptions(IndexOptions.DOCS);
+ legacyType.setNumericType(LegacyNumericType.DOUBLE);
+ legacyType.setNumericPrecisionStep(8);// same as solr default
+ legacyType.setDocValuesType(DocValuesType.NUMERIC);//docValues
+ legacyType.setStored(false);
+ legacyType.freeze();
+ LEGACY_FIELDTYPE = legacyType;
}
public static final String SUFFIX_MINX = "__minX";
@@ -130,7 +132,7 @@ public class BBoxStrategy extends SpatialStrategy {
private final boolean hasDocVals;
private final boolean hasPointVals;
// equiv to "hasLegacyNumerics":
- private final FieldType legacyNumericFieldType; // not stored; holds precision step.
+ private final LegacyFieldType legacyNumericFieldType; // not stored; holds precision step.
private final FieldType xdlFieldType;
/**
@@ -177,16 +179,17 @@ public class BBoxStrategy extends SpatialStrategy {
if ((this.hasPointVals = fieldType.pointDimensionCount() > 0)) {
numQuads++;
}
- if (fieldType.indexOptions() != IndexOptions.NONE && fieldType.numericType() != null) {
+ if (fieldType.indexOptions() != IndexOptions.NONE && fieldType instanceof LegacyFieldType && ((LegacyFieldType)fieldType).numericType() != null) {
if (hasPointVals) {
throw new IllegalArgumentException("pointValues and LegacyNumericType are mutually exclusive");
}
- if (fieldType.numericType() != FieldType.LegacyNumericType.DOUBLE) {
- throw new IllegalArgumentException(getClass() + " does not support " + fieldType.numericType());
+ final LegacyFieldType legacyType = (LegacyFieldType) fieldType;
+ if (legacyType.numericType() != LegacyNumericType.DOUBLE) {
+ throw new IllegalArgumentException(getClass() + " does not support " + legacyType.numericType());
}
numQuads++;
- legacyNumericFieldType = new FieldType(LegacyDoubleField.TYPE_NOT_STORED);
- legacyNumericFieldType.setNumericPrecisionStep(fieldType.numericPrecisionStep());
+ legacyNumericFieldType = new LegacyFieldType(LegacyDoubleField.TYPE_NOT_STORED);
+ legacyNumericFieldType.setNumericPrecisionStep(legacyType.numericPrecisionStep());
legacyNumericFieldType.freeze();
} else {
legacyNumericFieldType = null;
diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/BytesRefIteratorTokenStream.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/BytesRefIteratorTokenStream.java
index e724ab05fe6..757e2bd38f7 100644
--- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/BytesRefIteratorTokenStream.java
+++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/BytesRefIteratorTokenStream.java
@@ -26,7 +26,7 @@ import org.apache.lucene.util.BytesRefIterator;
/**
* A TokenStream used internally by {@link org.apache.lucene.spatial.prefix.PrefixTreeStrategy}.
*
- * This is modelled after {@link org.apache.lucene.analysis.LegacyNumericTokenStream}.
+ * This is modelled after {@link org.apache.lucene.legacy.LegacyNumericTokenStream}.
*
* @lucene.internal
*/
diff --git a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/vector/PointVectorStrategy.java b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/vector/PointVectorStrategy.java
index 197547c1d56..59aff490916 100644
--- a/lucene/spatial-extras/src/java/org/apache/lucene/spatial/vector/PointVectorStrategy.java
+++ b/lucene/spatial-extras/src/java/org/apache/lucene/spatial/vector/PointVectorStrategy.java
@@ -20,16 +20,18 @@ import org.apache.lucene.document.DoubleDocValuesField;
import org.apache.lucene.document.DoublePoint;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
-import org.apache.lucene.document.LegacyDoubleField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
+import org.apache.lucene.legacy.LegacyDoubleField;
+import org.apache.lucene.legacy.LegacyFieldType;
+import org.apache.lucene.legacy.LegacyNumericRangeQuery;
+import org.apache.lucene.legacy.LegacyNumericType;
import org.apache.lucene.queries.function.FunctionRangeQuery;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
-import org.apache.lucene.search.LegacyNumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.spatial.SpatialStrategy;
import org.apache.lucene.spatial.query.SpatialArgs;
@@ -85,7 +87,7 @@ public class PointVectorStrategy extends SpatialStrategy {
public static FieldType DEFAULT_FIELDTYPE;
@Deprecated
- public static FieldType LEGACY_FIELDTYPE;
+ public static LegacyFieldType LEGACY_FIELDTYPE;
static {
// Default: pointValues + docValues
FieldType type = new FieldType();
@@ -95,14 +97,14 @@ public class PointVectorStrategy extends SpatialStrategy {
type.freeze();
DEFAULT_FIELDTYPE = type;
// Legacy default: legacyNumerics
- type = new FieldType();
- type.setIndexOptions(IndexOptions.DOCS);
- type.setNumericType(FieldType.LegacyNumericType.DOUBLE);
- type.setNumericPrecisionStep(8);// same as solr default
- type.setDocValuesType(DocValuesType.NONE);//no docValues!
- type.setStored(false);
- type.freeze();
- LEGACY_FIELDTYPE = type;
+ LegacyFieldType legacyType = new LegacyFieldType();
+ legacyType.setIndexOptions(IndexOptions.DOCS);
+ legacyType.setNumericType(LegacyNumericType.DOUBLE);
+ legacyType.setNumericPrecisionStep(8);// same as solr default
+ legacyType.setDocValuesType(DocValuesType.NONE);//no docValues!
+ legacyType.setStored(false);
+ legacyType.freeze();
+ LEGACY_FIELDTYPE = legacyType;
}
public static final String SUFFIX_X = "__x";
@@ -116,7 +118,7 @@ public class PointVectorStrategy extends SpatialStrategy {
private final boolean hasDocVals;
private final boolean hasPointVals;
// equiv to "hasLegacyNumerics":
- private final FieldType legacyNumericFieldType; // not stored; holds precision step.
+ private final LegacyFieldType legacyNumericFieldType; // not stored; holds precision step.
/**
* Create a new {@link PointVectorStrategy} instance that uses {@link DoublePoint} and {@link DoublePoint#newRangeQuery}
@@ -157,16 +159,17 @@ public class PointVectorStrategy extends SpatialStrategy {
if ((this.hasPointVals = fieldType.pointDimensionCount() > 0)) {
numPairs++;
}
- if (fieldType.indexOptions() != IndexOptions.NONE && fieldType.numericType() != null) {
+ if (fieldType.indexOptions() != IndexOptions.NONE && fieldType instanceof LegacyFieldType && ((LegacyFieldType)fieldType).numericType() != null) {
if (hasPointVals) {
throw new IllegalArgumentException("pointValues and LegacyNumericType are mutually exclusive");
}
- if (fieldType.numericType() != FieldType.LegacyNumericType.DOUBLE) {
- throw new IllegalArgumentException(getClass() + " does not support " + fieldType.numericType());
+ final LegacyFieldType legacyType = (LegacyFieldType) fieldType;
+ if (legacyType.numericType() != LegacyNumericType.DOUBLE) {
+ throw new IllegalArgumentException(getClass() + " does not support " + legacyType.numericType());
}
numPairs++;
- legacyNumericFieldType = new FieldType(LegacyDoubleField.TYPE_NOT_STORED);
- legacyNumericFieldType.setNumericPrecisionStep(fieldType.numericPrecisionStep());
+ legacyNumericFieldType = new LegacyFieldType(LegacyDoubleField.TYPE_NOT_STORED);
+ legacyNumericFieldType.setNumericPrecisionStep(legacyType.numericPrecisionStep());
legacyNumericFieldType.freeze();
} else {
legacyNumericFieldType = null;
diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java
index 01e925926d2..20df7305cbe 100644
--- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java
+++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java
@@ -22,6 +22,7 @@ import com.carrotsearch.randomizedtesting.annotations.Repeat;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
+import org.apache.lucene.legacy.LegacyFieldType;
import org.apache.lucene.search.Query;
import org.apache.lucene.spatial.SpatialMatchConcern;
import org.apache.lucene.spatial.prefix.RandomSpatialOpStrategyTestCase;
@@ -100,7 +101,12 @@ public class TestBBoxStrategy extends RandomSpatialOpStrategyTestCase {
}
//test we can disable docValues for predicate tests
if (random().nextBoolean()) {
- FieldType fieldType = new FieldType(((BBoxStrategy)strategy).getFieldType());
+ FieldType fieldType = ((BBoxStrategy)strategy).getFieldType();
+ if (fieldType instanceof LegacyFieldType) {
+ fieldType = new LegacyFieldType((LegacyFieldType)fieldType);
+ } else {
+ fieldType = new FieldType(fieldType);
+ }
fieldType.setDocValuesType(DocValuesType.NONE);
strategy = new BBoxStrategy(ctx, strategy.getFieldName(), fieldType);
}
diff --git a/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java b/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
index 1ff9470fb97..d15476ae7f3 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
@@ -838,7 +838,7 @@ public class MockDirectoryWrapper extends BaseDirectoryWrapper {
}
// RuntimeException instead of IOException because
// super() does not throw IOException currently:
- throw new RuntimeException("MockDirectoryWrapper: cannot close: there are still open files: " + openFiles, cause);
+ throw new RuntimeException("MockDirectoryWrapper: cannot close: there are still " + openFiles.size() + " open files: " + openFiles, cause);
}
if (openLocks.size() > 0) {
Exception cause = null;
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
index b63216085b3..19fcb3bfffb 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
@@ -177,11 +177,14 @@ public final class TestUtil {
assert hasNext;
T v = iterator.next();
assert allowNull || v != null;
- try {
- iterator.remove();
- throw new AssertionError("broken iterator (supports remove): " + iterator);
- } catch (UnsupportedOperationException expected) {
- // ok
+ // for the first element, check that remove is not supported
+ if (i == 0) {
+ try {
+ iterator.remove();
+ throw new AssertionError("broken iterator (supports remove): " + iterator);
+ } catch (UnsupportedOperationException expected) {
+ // ok
+ }
}
}
assert !iterator.hasNext();
diff --git a/lucene/tools/junit4/tests.policy b/lucene/tools/junit4/tests.policy
index f1d8f106dc2..2a623b70cea 100644
--- a/lucene/tools/junit4/tests.policy
+++ b/lucene/tools/junit4/tests.policy
@@ -28,10 +28,6 @@ grant {
// should be enclosed within common.dir, but just in case:
permission java.io.FilePermission "${junit4.childvm.cwd}", "read";
- // jenkins wants to read outside its sandbox, to use a special linedocs file.
- // this is best effort and not really supported.
- permission java.io.FilePermission "/home/jenkins/lucene-data/enwiki.random.lines.txt", "read";
-
// write only to sandbox
permission java.io.FilePermission "${junit4.childvm.cwd}${/}temp", "read,write,delete";
permission java.io.FilePermission "${junit4.childvm.cwd}${/}temp${/}-", "read,write,delete";
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 68834576ad4..519472616db 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -50,6 +50,23 @@ Optimizations
check on every request and move connection lifecycle management towards the client.
(Ryan Zezeski, Mark Miller, Shawn Heisey, Steve Davids)
+================== 6.3.0 ==================
+
+Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
+
+Versions of Major Components
+---------------------
+Apache Tika 1.13
+Carrot2 3.12.0
+Velocity 1.7 and Velocity Tools 2.0
+Apache UIMA 2.3.1
+Apache ZooKeeper 3.4.6
+Jetty 9.3.8.v20160314
+
+
+(No Changes)
+
+
================== 6.2.0 ==================
Versions of Major Components
@@ -119,6 +136,11 @@ New Features
* SOLR-6465: CDCR: fall back to whole-index replication when tlogs are insufficient.
(Noble Paul, Renaud Delbru, shalin)
+* SOLR-9320: A REPLACENODE command to decommission an existing node with another new node
+ (noble, Nitin Sharma, Varun Thacker)
+
+* SOLR-9318: A DELETENODE command to delete all replicas in that node (noble, Nitin Sharma, Varun Thacker)
+
Bug Fixes
----------------------
@@ -190,10 +212,32 @@ Bug Fixes
* SOLR-8379: UI Cloud->Tree view now shows .txt files correctly (Alexandre Rafalovitch via janhoy)
+* SOLR-9003: New Admin UI's Dataimport screen now correctly displays DIH Debug output (Alexandre Rafalovitch)
+
* SOLR-9308: Fix distributed RTG to forward request params, fixes fq and non-default fl params (hossman)
* SOLR-9179: NPE in IndexSchema using IBM JDK (noble, Colvin Cowie)
+* SOLR-9397: Config API does not support adding caches (noble)
+
+* SOLR-9405: ConcurrentModificationException in ZkStateReader.getStateWatchers.
+ (Alan Woodward, Edward Ribeiro, shalin)
+
+* SOLR-9232: Admin UI now fully implements Swap Cores interface (Alexandre Rafalovitch)
+
+* SOLR-8715: Admin UI's Schema screen now works for fields with stored=false and some content indexed (Alexandre Rafalovitch)
+
+* SOLR-8911: In Admin UI, enable scrolling for overflowing Versions and JVM property values (Alexandre Rafalovitch)
+
+* SOLR-9002: Admin UI now correctly displays json and text files in the collection/Files screen (Upayavira, Alexandre Rafalovitch)
+
+* SOLR-8993: Admin UI now correctly supports multiple DIH handler end-points (Upayavira, Alexandre Rafalovitch)
+
+* SOLR-9032: Admin UI now correctly implements Create Alias command (Upayavira, Alexandre Rafalovitch)
+
+* SOLR-9391: LBHttpSolrClient.request now correctly returns Rsp.server when
+ previously skipped servers were successfully tried. (Christine Poerschke)
+
Optimizations
----------------------
@@ -249,6 +293,21 @@ Other Changes
* SOLR-9367: Improved TestInjection's randomization logic to use LuceneTestCase.random() (hossman)
+* SOLR-9331: Remove ReRankQuery's length constructor argument and member. (Christine Poerschke)
+
+* SOLR-9092: For the delete replica command we attempt to send the core admin delete request only
+ if that node is actually up. (Jessica Cheng Mallet, Varun Thacker)
+
+* SOLR-9410: Make ReRankQParserPlugin's private ReRankWeight a public class of its own. (Christine Poerschke)
+
+* SOLR-9404: Refactor move/renames in JSON FacetProcessor and FacetFieldProcessor. (David Smiley)
+
+* SOLR-9421: Refactored out OverseerCollectionMessageHandler to smaller classes (noble)
+
+* SOLR-8643: BlockJoinFacetComponent is substituted by BlockJoinFacetDocSetComponent. It doesn't need to change solrconfig.xml (Mikhail Khludnev)
+
+* SOLR-8644: Test asserts that block join facets work with parent level fq exclusions. (Dr. Oleg Savrasov via Mikhail Khludnev)
+
================== 6.1.0 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/AnalyticsParsers.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/AnalyticsParsers.java
index 7a7e697d059..aadb9e2d4ce 100644
--- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/AnalyticsParsers.java
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/AnalyticsParsers.java
@@ -20,8 +20,8 @@ import java.io.IOException;
import java.time.Instant;
import java.util.Arrays;
+import org.apache.lucene.legacy.LegacyNumericUtils;
import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.LegacyNumericUtils;
import org.apache.lucene.util.NumericUtils;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.TrieDateField;
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/valuesource/DateFieldSource.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/valuesource/DateFieldSource.java
index 4d66e0025bc..22dde4c00ab 100644
--- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/valuesource/DateFieldSource.java
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/valuesource/DateFieldSource.java
@@ -24,12 +24,12 @@ import java.util.Map;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.legacy.LegacyNumericUtils;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.docvalues.LongDocValues;
import org.apache.lucene.queries.function.valuesource.LongFieldSource;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.LegacyNumericUtils;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueDate;
diff --git a/solr/core/src/java/org/apache/solr/cloud/AddReplicaCmd.java b/solr/core/src/java/org/apache/solr/cloud/AddReplicaCmd.java
new file mode 100644
index 00000000000..6bb33508edb
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/AddReplicaCmd.java
@@ -0,0 +1,192 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+
+import java.lang.invoke.MethodHandles;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.params.CoreAdminParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.ShardParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.handler.component.ShardHandler;
+import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.cloud.Assign.getNodesForNewReplicas;
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.COLL_CONF;
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.SKIP_CREATE_REPLICA_IN_CLUSTER_STATE;
+import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA;
+import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
+
+public class AddReplicaCmd implements OverseerCollectionMessageHandler.Cmd {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ private final OverseerCollectionMessageHandler ocmh;
+
+ public AddReplicaCmd(OverseerCollectionMessageHandler ocmh) {
+ this.ocmh = ocmh;
+ }
+
+ @Override
+ public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception {
+ addReplica(ocmh.zkStateReader.getClusterState(), message, results, null);
+ }
+
+ ZkNodeProps addReplica(ClusterState clusterState, ZkNodeProps message, NamedList results, Runnable onComplete)
+ throws KeeperException, InterruptedException {
+ log.info("addReplica() : {}", Utils.toJSONString(message));
+ String collection = message.getStr(COLLECTION_PROP);
+ String node = message.getStr(CoreAdminParams.NODE);
+ String shard = message.getStr(SHARD_ID_PROP);
+ String coreName = message.getStr(CoreAdminParams.NAME);
+ boolean parallel = message.getBool("parallel", false);
+ if (StringUtils.isBlank(coreName)) {
+ coreName = message.getStr(CoreAdminParams.PROPERTY_PREFIX + CoreAdminParams.NAME);
+ }
+
+ final String asyncId = message.getStr(ASYNC);
+
+ DocCollection coll = clusterState.getCollection(collection);
+ if (coll == null) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Collection: " + collection + " does not exist");
+ }
+ if (coll.getSlice(shard) == null) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+ "Collection: " + collection + " shard: " + shard + " does not exist");
+ }
+ ShardHandler shardHandler = ocmh.shardHandlerFactory.getShardHandler();
+ boolean skipCreateReplicaInClusterState = message.getBool(SKIP_CREATE_REPLICA_IN_CLUSTER_STATE, false);
+
+ // Kind of unnecessary, but it does put the logic of whether to override maxShardsPerNode in one place.
+ if (!skipCreateReplicaInClusterState) {
+ node = getNodesForNewReplicas(clusterState, collection, shard, 1, node,
+ ocmh.overseer.getZkController().getCoreContainer()).get(0).nodeName;
+ }
+ log.info("Node Identified {} for creating new replica", node);
+
+ if (!clusterState.liveNodesContain(node)) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Node: " + node + " is not live");
+ }
+ if (coreName == null) {
+ coreName = Assign.buildCoreName(coll, shard);
+ } else if (!skipCreateReplicaInClusterState) {
+ //Validate that the core name is unique in that collection
+ for (Slice slice : coll.getSlices()) {
+ for (Replica replica : slice.getReplicas()) {
+ String replicaCoreName = replica.getStr(CORE_NAME_PROP);
+ if (coreName.equals(replicaCoreName)) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Another replica with the same core name already exists" +
+ " for this collection");
+ }
+ }
+ }
+ }
+ ModifiableSolrParams params = new ModifiableSolrParams();
+
+ ZkStateReader zkStateReader = ocmh.zkStateReader;
+ if (!Overseer.isLegacy(zkStateReader)) {
+ if (!skipCreateReplicaInClusterState) {
+ ZkNodeProps props = new ZkNodeProps(
+ Overseer.QUEUE_OPERATION, ADDREPLICA.toLower(),
+ ZkStateReader.COLLECTION_PROP, collection,
+ ZkStateReader.SHARD_ID_PROP, shard,
+ ZkStateReader.CORE_NAME_PROP, coreName,
+ ZkStateReader.STATE_PROP, Replica.State.DOWN.toString(),
+ ZkStateReader.BASE_URL_PROP, zkStateReader.getBaseUrlForNodeName(node),
+ ZkStateReader.NODE_NAME_PROP, node);
+ Overseer.getStateUpdateQueue(zkStateReader.getZkClient()).offer(Utils.toJSON(props));
+ }
+ params.set(CoreAdminParams.CORE_NODE_NAME,
+ ocmh.waitToSeeReplicasInState(collection, Collections.singletonList(coreName)).get(coreName).getName());
+ }
+
+ String configName = zkStateReader.readConfigName(collection);
+ String routeKey = message.getStr(ShardParams._ROUTE_);
+ String dataDir = message.getStr(CoreAdminParams.DATA_DIR);
+ String instanceDir = message.getStr(CoreAdminParams.INSTANCE_DIR);
+
+ params.set(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATE.toString());
+ params.set(CoreAdminParams.NAME, coreName);
+ params.set(COLL_CONF, configName);
+ params.set(CoreAdminParams.COLLECTION, collection);
+ if (shard != null) {
+ params.set(CoreAdminParams.SHARD, shard);
+ } else if (routeKey != null) {
+ Collection slices = coll.getRouter().getSearchSlicesSingle(routeKey, null, coll);
+ if (slices.isEmpty()) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No active shard serving _route_=" + routeKey + " found");
+ } else {
+ params.set(CoreAdminParams.SHARD, slices.iterator().next().getName());
+ }
+ } else {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Specify either 'shard' or _route_ param");
+ }
+ if (dataDir != null) {
+ params.set(CoreAdminParams.DATA_DIR, dataDir);
+ }
+ if (instanceDir != null) {
+ params.set(CoreAdminParams.INSTANCE_DIR, instanceDir);
+ }
+ ocmh.addPropertyParams(message, params);
+
+ // For tracking async calls.
+ Map requestMap = new HashMap<>();
+ ocmh.sendShardRequest(node, params, shardHandler, asyncId, requestMap);
+
+ final String fnode = node;
+ final String fcoreName = coreName;
+
+ Runnable runnable = () -> {
+ ocmh.processResponses(results, shardHandler, true, "ADDREPLICA failed to create replica", asyncId, requestMap);
+ ocmh.waitForCoreNodeName(collection, fnode, fcoreName);
+ if (onComplete != null) onComplete.run();
+ };
+
+ if (!parallel) {
+ runnable.run();
+ } else {
+ ocmh.tpe.submit(runnable);
+ }
+
+
+ return new ZkNodeProps(
+ ZkStateReader.COLLECTION_PROP, collection,
+ ZkStateReader.SHARD_ID_PROP, shard,
+ ZkStateReader.CORE_NAME_PROP, coreName,
+ ZkStateReader.NODE_NAME_PROP, node
+ );
+ }
+}
diff --git a/solr/core/src/java/org/apache/solr/cloud/BackupCmd.java b/solr/core/src/java/org/apache/solr/cloud/BackupCmd.java
new file mode 100644
index 00000000000..679cb07ac2e
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/BackupCmd.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.cloud;
+
+import java.lang.invoke.MethodHandles;
+import java.net.URI;
+import java.time.Instant;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Properties;
+
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.params.CoreAdminParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.backup.BackupManager;
+import org.apache.solr.core.backup.repository.BackupRepository;
+import org.apache.solr.handler.component.ShardHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.COLL_CONF;
+import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP;
+import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
+import static org.apache.solr.common.params.CommonParams.NAME;
+
+public class BackupCmd implements OverseerCollectionMessageHandler.Cmd {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ private final OverseerCollectionMessageHandler ocmh;
+
+ public BackupCmd(OverseerCollectionMessageHandler ocmh) {
+ this.ocmh = ocmh;
+ }
+
+ @Override
+ public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception {
+ String collectionName = message.getStr(COLLECTION_PROP);
+ String backupName = message.getStr(NAME);
+ ShardHandler shardHandler = ocmh.shardHandlerFactory.getShardHandler();
+ String asyncId = message.getStr(ASYNC);
+ String repo = message.getStr(CoreAdminParams.BACKUP_REPOSITORY);
+ String location = message.getStr(CoreAdminParams.BACKUP_LOCATION);
+
+ Map requestMap = new HashMap<>();
+ Instant startTime = Instant.now();
+
+ CoreContainer cc = ocmh.overseer.getZkController().getCoreContainer();
+ BackupRepository repository = cc.newBackupRepository(Optional.ofNullable(repo));
+ BackupManager backupMgr = new BackupManager(repository, ocmh.zkStateReader, collectionName);
+
+ // Backup location
+ URI backupPath = repository.createURI(location, backupName);
+
+ //Validating if the directory already exists.
+ if (repository.exists(backupPath)) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "The backup directory already exists: " + backupPath);
+ }
+
+ // Create a directory to store backup details.
+ repository.createDirectory(backupPath);
+
+ log.info("Starting backup of collection={} with backupName={} at location={}", collectionName, backupName,
+ backupPath);
+
+ for (Slice slice : ocmh.zkStateReader.getClusterState().getCollection(collectionName).getActiveSlices()) {
+ Replica replica = slice.getLeader();
+
+ String coreName = replica.getStr(CORE_NAME_PROP);
+
+ ModifiableSolrParams params = new ModifiableSolrParams();
+ params.set(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.BACKUPCORE.toString());
+ params.set(NAME, slice.getName());
+ params.set(CoreAdminParams.BACKUP_REPOSITORY, repo);
+ params.set(CoreAdminParams.BACKUP_LOCATION, backupPath.getPath()); // note: index dir will be here then the "snapshot." + slice name
+ params.set(CORE_NAME_PROP, coreName);
+
+ ocmh.sendShardRequest(replica.getNodeName(), params, shardHandler, asyncId, requestMap);
+ log.debug("Sent backup request to core={} for backupName={}", coreName, backupName);
+ }
+ log.debug("Sent backup requests to all shard leaders for backupName={}", backupName);
+
+ ocmh.processResponses(results, shardHandler, true, "Could not backup all replicas", asyncId, requestMap);
+
+ log.info("Starting to backup ZK data for backupName={}", backupName);
+
+ //Download the configs
+ String configName = ocmh.zkStateReader.readConfigName(collectionName);
+ backupMgr.downloadConfigDir(location, backupName, configName);
+
+ //Save the collection's state. Can be part of the monolithic clusterstate.json or a individual state.json
+ //Since we don't want to distinguish we extract the state and back it up as a separate json
+ DocCollection collectionState = ocmh.zkStateReader.getClusterState().getCollection(collectionName);
+ backupMgr.writeCollectionState(location, backupName, collectionName, collectionState);
+
+ Properties properties = new Properties();
+
+ properties.put(BackupManager.BACKUP_NAME_PROP, backupName);
+ properties.put(BackupManager.COLLECTION_NAME_PROP, collectionName);
+ properties.put(COLL_CONF, configName);
+ properties.put(BackupManager.START_TIME_PROP, startTime.toString());
+ //TODO: Add MD5 of the configset. If during restore the same name configset exists then we can compare checksums to see if they are the same.
+ //if they are not the same then we can throw an error or have an 'overwriteConfig' flag
+ //TODO save numDocs for the shardLeader. We can use it to sanity check the restore.
+
+ backupMgr.writeBackupProperties(location, backupName, properties);
+
+ log.info("Completed backing up ZK data for backupName={}", backupName);
+ }
+}
diff --git a/solr/core/src/java/org/apache/solr/cloud/CreateAliasCmd.java b/solr/core/src/java/org/apache/solr/cloud/CreateAliasCmd.java
new file mode 100644
index 00000000000..b966ebdd769
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/CreateAliasCmd.java
@@ -0,0 +1,101 @@
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.cloud;
+
+import java.lang.invoke.MethodHandles;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.solr.cloud.OverseerCollectionMessageHandler.Cmd;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.Aliases;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.util.TimeOut;
+import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.common.params.CommonParams.NAME;
+
+
+public class CreateAliasCmd implements Cmd {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private final OverseerCollectionMessageHandler ocmh;
+
+ public CreateAliasCmd(OverseerCollectionMessageHandler ocmh) {
+ this.ocmh = ocmh;
+ }
+
+ @Override
+ public void call(ClusterState state, ZkNodeProps message, NamedList results)
+ throws Exception {
+ String aliasName = message.getStr(NAME);
+ String collections = message.getStr("collections");
+
+ Map> newAliasesMap = new HashMap<>();
+ Map newCollectionAliasesMap = new HashMap<>();
+ ZkStateReader zkStateReader = ocmh.zkStateReader;
+ Map prevColAliases = zkStateReader.getAliases().getCollectionAliasMap();
+ if (prevColAliases != null) {
+ newCollectionAliasesMap.putAll(prevColAliases);
+ }
+ newCollectionAliasesMap.put(aliasName, collections);
+ newAliasesMap.put("collection", newCollectionAliasesMap);
+ Aliases newAliases = new Aliases(newAliasesMap);
+ byte[] jsonBytes = null;
+ if (newAliases.collectionAliasSize() > 0) { // only sub map right now
+ jsonBytes = Utils.toJSON(newAliases.getAliasMap());
+ }
+ try {
+ zkStateReader.getZkClient().setData(ZkStateReader.ALIASES, jsonBytes, true);
+
+ checkForAlias(aliasName, collections);
+ // some fudge for other nodes
+ Thread.sleep(100);
+ } catch (KeeperException e) {
+ log.error("", e);
+ throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
+ } catch (InterruptedException e) {
+ log.warn("", e);
+ throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
+ }
+ }
+
+ private void checkForAlias(String name, String value) {
+
+ TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS);
+ boolean success = false;
+ Aliases aliases;
+ while (!timeout.hasTimedOut()) {
+ aliases = ocmh.zkStateReader.getAliases();
+ String collections = aliases.getCollectionAlias(name);
+ if (collections != null && collections.equals(value)) {
+ success = true;
+ break;
+ }
+ }
+ if (!success) {
+ log.warn("Timeout waiting to be notified of Alias change...");
+ }
+ }
+}
diff --git a/solr/core/src/java/org/apache/solr/cloud/CreateCollectionCmd.java b/solr/core/src/java/org/apache/solr/cloud/CreateCollectionCmd.java
new file mode 100644
index 00000000000..7f28600498f
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/CreateCollectionCmd.java
@@ -0,0 +1,291 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.solr.cloud.OverseerCollectionMessageHandler.Cmd;
+import org.apache.solr.cloud.overseer.ClusterStateMutator;
+import org.apache.solr.cloud.rule.ReplicaAssigner;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocRouter;
+import org.apache.solr.common.cloud.ImplicitDocRouter;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.ZkConfigManager;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.params.CoreAdminParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.SimpleOrderedMap;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.handler.component.ShardHandler;
+import org.apache.solr.handler.component.ShardRequest;
+import org.apache.solr.util.TimeOut;
+import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.COLL_CONF;
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.CREATE_NODE_SET;
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.NUM_SLICES;
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.RANDOM;
+import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
+import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA;
+import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
+import static org.apache.solr.common.params.CommonParams.NAME;
+import static org.apache.solr.common.util.StrUtils.formatString;
+
+public class CreateCollectionCmd implements Cmd {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private final OverseerCollectionMessageHandler ocmh;
+
+ public CreateCollectionCmd(OverseerCollectionMessageHandler ocmh) {
+ this.ocmh = ocmh;
+ }
+
+ @Override
+ public void call(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception {
+ final String collectionName = message.getStr(NAME);
+ log.info("Create collection {}", collectionName);
+ if (clusterState.hasCollection(collectionName)) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "collection already exists: " + collectionName);
+ }
+
+ String configName = getConfigName(collectionName, message);
+ if (configName == null) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No config set found to associate with the collection.");
+ }
+
+ ocmh.validateConfigOrThrowSolrException(configName);
+
+
+ try {
+ // look at the replication factor and see if it matches reality
+ // if it does not, find best nodes to create more cores
+
+ int repFactor = message.getInt(REPLICATION_FACTOR, 1);
+
+ ShardHandler shardHandler = ocmh.shardHandlerFactory.getShardHandler();
+ final String async = message.getStr(ASYNC);
+
+ Integer numSlices = message.getInt(NUM_SLICES, null);
+ String router = message.getStr("router.name", DocRouter.DEFAULT_NAME);
+ List shardNames = new ArrayList<>();
+ if(ImplicitDocRouter.NAME.equals(router)){
+ ClusterStateMutator.getShardNames(shardNames, message.getStr("shards", null));
+ numSlices = shardNames.size();
+ } else {
+ if (numSlices == null ) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, NUM_SLICES + " is a required param (when using CompositeId router).");
+ }
+ ClusterStateMutator.getShardNames(numSlices, shardNames);
+ }
+
+ int maxShardsPerNode = message.getInt(MAX_SHARDS_PER_NODE, 1);
+
+ if (repFactor <= 0) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, REPLICATION_FACTOR + " must be greater than 0");
+ }
+
+ if (numSlices <= 0) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, NUM_SLICES + " must be > 0");
+ }
+
+ // we need to look at every node and see how many cores it serves
+ // add our new cores to existing nodes serving the least number of cores
+ // but (for now) require that each core goes on a distinct node.
+
+ final List nodeList = OverseerCollectionMessageHandler.getLiveOrLiveAndCreateNodeSetList(clusterState.getLiveNodes(), message, RANDOM);
+ Map positionVsNodes;
+ if (nodeList.isEmpty()) {
+ log.warn("It is unusual to create a collection ("+collectionName+") without cores.");
+
+ positionVsNodes = new HashMap<>();
+ } else {
+ if (repFactor > nodeList.size()) {
+ log.warn("Specified "
+ + REPLICATION_FACTOR
+ + " of "
+ + repFactor
+ + " on collection "
+ + collectionName
+ + " is higher than or equal to the number of Solr instances currently live or live and part of your " + CREATE_NODE_SET + "("
+ + nodeList.size()
+ + "). It's unusual to run two replica of the same slice on the same Solr-instance.");
+ }
+
+ int maxShardsAllowedToCreate = maxShardsPerNode * nodeList.size();
+ int requestedShardsToCreate = numSlices * repFactor;
+ if (maxShardsAllowedToCreate < requestedShardsToCreate) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Cannot create collection " + collectionName + ". Value of "
+ + MAX_SHARDS_PER_NODE + " is " + maxShardsPerNode
+ + ", and the number of nodes currently live or live and part of your "+CREATE_NODE_SET+" is " + nodeList.size()
+ + ". This allows a maximum of " + maxShardsAllowedToCreate
+ + " to be created. Value of " + NUM_SLICES + " is " + numSlices
+ + " and value of " + REPLICATION_FACTOR + " is " + repFactor
+ + ". This requires " + requestedShardsToCreate
+ + " shards to be created (higher than the allowed number)");
+ }
+
+ positionVsNodes = ocmh.identifyNodes(clusterState, nodeList, message, shardNames, repFactor);
+ }
+
+ ZkStateReader zkStateReader = ocmh.zkStateReader;
+ boolean isLegacyCloud = Overseer.isLegacy(zkStateReader);
+
+ ocmh.createConfNode(configName, collectionName, isLegacyCloud);
+
+ Overseer.getStateUpdateQueue(zkStateReader.getZkClient()).offer(Utils.toJSON(message));
+
+ // wait for a while until we don't see the collection
+ TimeOut waitUntil = new TimeOut(30, TimeUnit.SECONDS);
+ boolean created = false;
+ while (! waitUntil.hasTimedOut()) {
+ Thread.sleep(100);
+ created = zkStateReader.getClusterState().hasCollection(collectionName);
+ if(created) break;
+ }
+ if (!created)
+ throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Could not fully create collection: " + collectionName);
+
+ if (nodeList.isEmpty()) {
+ log.info("Finished create command for collection: {}", collectionName);
+ return;
+ }
+
+ // For tracking async calls.
+ Map requestMap = new HashMap<>();
+
+
+ log.info(formatString("Creating SolrCores for new collection {0}, shardNames {1} , replicationFactor : {2}",
+ collectionName, shardNames, repFactor));
+ Map coresToCreate = new LinkedHashMap<>();
+ for (Map.Entry e : positionVsNodes.entrySet()) {
+ ReplicaAssigner.Position position = e.getKey();
+ String nodeName = e.getValue();
+ String coreName = collectionName + "_" + position.shard + "_replica" + (position.index + 1);
+ log.info(formatString("Creating core {0} as part of shard {1} of collection {2} on {3}"
+ , coreName, position.shard, collectionName, nodeName));
+
+
+ String baseUrl = zkStateReader.getBaseUrlForNodeName(nodeName);
+ //in the new mode, create the replica in clusterstate prior to creating the core.
+ // Otherwise the core creation fails
+ if (!isLegacyCloud) {
+ ZkNodeProps props = new ZkNodeProps(
+ Overseer.QUEUE_OPERATION, ADDREPLICA.toString(),
+ ZkStateReader.COLLECTION_PROP, collectionName,
+ ZkStateReader.SHARD_ID_PROP, position.shard,
+ ZkStateReader.CORE_NAME_PROP, coreName,
+ ZkStateReader.STATE_PROP, Replica.State.DOWN.toString(),
+ ZkStateReader.BASE_URL_PROP, baseUrl);
+ Overseer.getStateUpdateQueue(zkStateReader.getZkClient()).offer(Utils.toJSON(props));
+ }
+
+ // Need to create new params for each request
+ ModifiableSolrParams params = new ModifiableSolrParams();
+ params.set(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATE.toString());
+
+ params.set(CoreAdminParams.NAME, coreName);
+ params.set(COLL_CONF, configName);
+ params.set(CoreAdminParams.COLLECTION, collectionName);
+ params.set(CoreAdminParams.SHARD, position.shard);
+ params.set(ZkStateReader.NUM_SHARDS_PROP, numSlices);
+
+ if (async != null) {
+ String coreAdminAsyncId = async + Math.abs(System.nanoTime());
+ params.add(ASYNC, coreAdminAsyncId);
+ requestMap.put(nodeName, coreAdminAsyncId);
+ }
+ ocmh.addPropertyParams(message, params);
+
+ ShardRequest sreq = new ShardRequest();
+ sreq.nodeName = nodeName;
+ params.set("qt", ocmh.adminPath);
+ sreq.purpose = 1;
+ sreq.shards = new String[]{baseUrl};
+ sreq.actualShards = sreq.shards;
+ sreq.params = params;
+
+ if (isLegacyCloud) {
+ shardHandler.submit(sreq, sreq.shards[0], sreq.params);
+ } else {
+ coresToCreate.put(coreName, sreq);
+ }
+ }
+
+ if(!isLegacyCloud) {
+ // wait for all replica entries to be created
+ Map replicas = ocmh.waitToSeeReplicasInState(collectionName, coresToCreate.keySet());
+ for (Map.Entry e : coresToCreate.entrySet()) {
+ ShardRequest sreq = e.getValue();
+ sreq.params.set(CoreAdminParams.CORE_NODE_NAME, replicas.get(e.getKey()).getName());
+ shardHandler.submit(sreq, sreq.shards[0], sreq.params);
+ }
+ }
+
+ ocmh.processResponses(results, shardHandler, false, null, async, requestMap, Collections.emptySet());
+ if(results.get("failure") != null && ((SimpleOrderedMap)results.get("failure")).size() > 0) {
+ // Let's cleanup as we hit an exception
+ // We shouldn't be passing 'results' here for the cleanup as the response would then contain 'success'
+ // element, which may be interpreted by the user as a positive ack
+ ocmh.cleanupCollection(collectionName, new NamedList());
+ log.info("Cleaned up artifacts for failed create collection for [" + collectionName + "]");
+ } else {
+ log.debug("Finished create command on all shards for collection: "
+ + collectionName);
+ }
+ } catch (SolrException ex) {
+ throw ex;
+ } catch (Exception ex) {
+ throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, null, ex);
+ }
+ }
+ String getConfigName(String coll, ZkNodeProps message) throws KeeperException, InterruptedException {
+ String configName = message.getStr(COLL_CONF);
+
+ if (configName == null) {
+ // if there is only one conf, use that
+ List configNames = null;
+ try {
+ configNames = ocmh.zkStateReader.getZkClient().getChildren(ZkConfigManager.CONFIGS_ZKNODE, null, true);
+ if (configNames != null && configNames.size() == 1) {
+ configName = configNames.get(0);
+ // no config set named, but there is only 1 - use it
+ log.info("Only one config set found in zk - using it:" + configName);
+ } else if (configNames.contains(coll)) {
+ configName = coll;
+ }
+ } catch (KeeperException.NoNodeException e) {
+
+ }
+ }
+ return configName;
+ }
+}
diff --git a/solr/core/src/java/org/apache/solr/cloud/CreateShardCmd.java b/solr/core/src/java/org/apache/solr/cloud/CreateShardCmd.java
new file mode 100644
index 00000000000..3d5aa4151ba
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/CreateShardCmd.java
@@ -0,0 +1,120 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.cloud;
+
+
+import java.lang.invoke.MethodHandles;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.solr.cloud.OverseerCollectionMessageHandler.Cmd;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.params.CoreAdminParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.handler.component.ShardHandler;
+import org.apache.solr.util.TimeOut;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.cloud.Assign.getNodesForNewReplicas;
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.COLL_CONF;
+import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
+import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
+import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
+import static org.apache.solr.common.params.CommonParams.NAME;
+
+public class CreateShardCmd implements Cmd {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private final OverseerCollectionMessageHandler ocmh;
+
+ public CreateShardCmd(OverseerCollectionMessageHandler ocmh) {
+ this.ocmh = ocmh;
+ }
+
+ @Override
+ public void call(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception {
+ String collectionName = message.getStr(COLLECTION_PROP);
+ String sliceName = message.getStr(SHARD_ID_PROP);
+
+ log.info("Create shard invoked: {}", message);
+ if (collectionName == null || sliceName == null)
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'collection' and 'shard' are required parameters");
+ int numSlices = 1;
+
+ ShardHandler shardHandler = ocmh.shardHandlerFactory.getShardHandler();
+ DocCollection collection = clusterState.getCollection(collectionName);
+ int repFactor = message.getInt(REPLICATION_FACTOR, collection.getInt(REPLICATION_FACTOR, 1));
+ String createNodeSetStr = message.getStr(OverseerCollectionMessageHandler.CREATE_NODE_SET);
+ List sortedNodeList = getNodesForNewReplicas(clusterState, collectionName, sliceName, repFactor,
+ createNodeSetStr, ocmh.overseer.getZkController().getCoreContainer());
+
+ ZkStateReader zkStateReader = ocmh.zkStateReader;
+ Overseer.getStateUpdateQueue(zkStateReader.getZkClient()).offer(Utils.toJSON(message));
+ // wait for a while until we see the shard
+ TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS);
+ boolean created = false;
+ while (!timeout.hasTimedOut()) {
+ Thread.sleep(100);
+ created = zkStateReader.getClusterState().getCollection(collectionName).getSlice(sliceName) != null;
+ if (created) break;
+ }
+ if (!created)
+ throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Could not fully create shard: " + message.getStr(NAME));
+
+ String configName = message.getStr(COLL_CONF);
+
+ String async = message.getStr(ASYNC);
+ Map requestMap = null;
+ if (async != null) {
+ requestMap = new HashMap<>(repFactor, 1.0f);
+ }
+
+ for (int j = 1; j <= repFactor; j++) {
+ String nodeName = sortedNodeList.get(((j - 1)) % sortedNodeList.size()).nodeName;
+ String shardName = collectionName + "_" + sliceName + "_replica" + j;
+ log.info("Creating shard " + shardName + " as part of slice " + sliceName + " of collection " + collectionName
+ + " on " + nodeName);
+
+ // Need to create new params for each request
+ ModifiableSolrParams params = new ModifiableSolrParams();
+ params.set(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.CREATE.toString());
+ params.set(CoreAdminParams.NAME, shardName);
+ params.set(COLL_CONF, configName);
+ params.set(CoreAdminParams.COLLECTION, collectionName);
+ params.set(CoreAdminParams.SHARD, sliceName);
+ params.set(ZkStateReader.NUM_SHARDS_PROP, numSlices);
+ ocmh.addPropertyParams(message, params);
+
+ ocmh.sendShardRequest(nodeName, params, shardHandler, async, requestMap);
+ }
+
+ ocmh.processResponses(results, shardHandler, true, "Failed to create shard", async, requestMap, Collections.emptySet());
+
+ log.info("Finished create command on all shards for collection: " + collectionName);
+
+ }
+}
diff --git a/solr/core/src/java/org/apache/solr/cloud/DeleteAliasCmd.java b/solr/core/src/java/org/apache/solr/cloud/DeleteAliasCmd.java
new file mode 100644
index 00000000000..7b1993ce4a0
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/DeleteAliasCmd.java
@@ -0,0 +1,95 @@
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+import java.lang.invoke.MethodHandles;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.Aliases;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.util.TimeOut;
+import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.common.params.CommonParams.NAME;
+
+public class DeleteAliasCmd implements OverseerCollectionMessageHandler.Cmd {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private final OverseerCollectionMessageHandler ocmh;
+
+ public DeleteAliasCmd(OverseerCollectionMessageHandler ocmh) {
+ this.ocmh = ocmh;
+ }
+
+ @Override
+ public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception {
+ String aliasName = message.getStr(NAME);
+
+ Map> newAliasesMap = new HashMap<>();
+ Map newCollectionAliasesMap = new HashMap<>();
+ ZkStateReader zkStateReader = ocmh.zkStateReader;
+ newCollectionAliasesMap.putAll(zkStateReader.getAliases().getCollectionAliasMap());
+ newCollectionAliasesMap.remove(aliasName);
+ newAliasesMap.put("collection", newCollectionAliasesMap);
+ Aliases newAliases = new Aliases(newAliasesMap);
+ byte[] jsonBytes = null;
+ if (newAliases.collectionAliasSize() > 0) { // only sub map right now
+ jsonBytes = Utils.toJSON(newAliases.getAliasMap());
+ }
+ try {
+ zkStateReader.getZkClient().setData(ZkStateReader.ALIASES,
+ jsonBytes, true);
+ checkForAliasAbsence(aliasName);
+ // some fudge for other nodes
+ Thread.sleep(100);
+ } catch (KeeperException e) {
+ log.error("", e);
+ throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
+ } catch (InterruptedException e) {
+ log.warn("", e);
+ throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
+ }
+
+ }
+ private void checkForAliasAbsence(String name) {
+
+ TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS);
+ boolean success = false;
+ Aliases aliases = null;
+ while (! timeout.hasTimedOut()) {
+ aliases = ocmh.zkStateReader.getAliases();
+ String collections = aliases.getCollectionAlias(name);
+ if (collections == null) {
+ success = true;
+ break;
+ }
+ }
+ if (!success) {
+ log.warn("Timeout waiting to be notified of Alias change...");
+ }
+ }
+}
diff --git a/solr/core/src/java/org/apache/solr/cloud/DeleteCollectionCmd.java b/solr/core/src/java/org/apache/solr/cloud/DeleteCollectionCmd.java
new file mode 100644
index 00000000000..4c5ae007794
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/DeleteCollectionCmd.java
@@ -0,0 +1,121 @@
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+import java.lang.invoke.MethodHandles;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.solr.common.NonExistentCoreException;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.params.CoreAdminParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.util.TimeOut;
+import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETE;
+import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
+import static org.apache.solr.common.params.CommonParams.NAME;
+
+public class DeleteCollectionCmd implements OverseerCollectionMessageHandler.Cmd {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private final OverseerCollectionMessageHandler ocmh;
+
+ public DeleteCollectionCmd(OverseerCollectionMessageHandler ocmh) {
+ this.ocmh = ocmh;
+ }
+
+ @Override
+ public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception {
+ ZkStateReader zkStateReader = ocmh.zkStateReader;
+ final String collection = message.getStr(NAME);
+ try {
+ if (zkStateReader.getClusterState().getCollectionOrNull(collection) == null) {
+ if (zkStateReader.getZkClient().exists(ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection, true)) {
+ // if the collection is not in the clusterstate, but is listed in zk, do nothing, it will just
+ // be removed in the finally - we cannot continue, because the below code will error if the collection
+ // is not in the clusterstate
+ return;
+ }
+ }
+ ModifiableSolrParams params = new ModifiableSolrParams();
+ params.set(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.UNLOAD.toString());
+ params.set(CoreAdminParams.DELETE_INSTANCE_DIR, true);
+ params.set(CoreAdminParams.DELETE_DATA_DIR, true);
+
+ String asyncId = message.getStr(ASYNC);
+ Map requestMap = null;
+ if (asyncId != null) {
+ requestMap = new HashMap<>();
+ }
+
+ Set okayExceptions = new HashSet<>(1);
+ okayExceptions.add(NonExistentCoreException.class.getName());
+
+ ocmh.collectionCmd(message, params, results, null, asyncId, requestMap, okayExceptions);
+
+ ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, DELETE.toLower(), NAME, collection);
+ Overseer.getStateUpdateQueue(zkStateReader.getZkClient()).offer(Utils.toJSON(m));
+
+ // wait for a while until we don't see the collection
+ TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS);
+ boolean removed = false;
+ while (! timeout.hasTimedOut()) {
+ Thread.sleep(100);
+ removed = !zkStateReader.getClusterState().hasCollection(collection);
+ if (removed) {
+ Thread.sleep(500); // just a bit of time so it's more likely other
+ // readers see on return
+ break;
+ }
+ }
+ if (!removed) {
+ throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+ "Could not fully remove collection: " + collection);
+ }
+
+ } finally {
+
+ try {
+ if (zkStateReader.getZkClient().exists(
+ ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection, true)) {
+ zkStateReader.getZkClient().clean(
+ ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection);
+ }
+ } catch (InterruptedException e) {
+ SolrException.log(log, "Cleaning up collection in zk was interrupted:"
+ + collection, e);
+ Thread.currentThread().interrupt();
+ } catch (KeeperException e) {
+ SolrException.log(log, "Problem cleaning up collection in zk:"
+ + collection, e);
+ }
+ }
+ }
+}
diff --git a/solr/core/src/java/org/apache/solr/cloud/DeleteNodeCmd.java b/solr/core/src/java/org/apache/solr/cloud/DeleteNodeCmd.java
new file mode 100644
index 00000000000..b3c505557ca
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/DeleteNodeCmd.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+
+import java.lang.invoke.MethodHandles;
+import java.util.List;
+import java.util.Locale;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.util.NamedList;
+import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEREPLICA;
+
+public class DeleteNodeCmd implements OverseerCollectionMessageHandler.Cmd {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ private final OverseerCollectionMessageHandler ocmh;
+
+ public DeleteNodeCmd(OverseerCollectionMessageHandler ocmh) {
+ this.ocmh = ocmh;
+ }
+
+ @Override
+ public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception {
+ ocmh.checkRequired(message, "node");
+ String node = message.getStr("node");
+ if (!state.liveNodesContain(node)) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Source Node: " + node + " is not live");
+ }
+ List sourceReplicas = ReplaceNodeCmd.getReplicasOfNode(node, state);
+ cleanupReplicas(results, state, sourceReplicas, ocmh, node);
+ }
+
+ static void cleanupReplicas(NamedList results,
+ ClusterState clusterState,
+ List sourceReplicas,
+ OverseerCollectionMessageHandler ocmh, String node) throws InterruptedException {
+ CountDownLatch cleanupLatch = new CountDownLatch(sourceReplicas.size());
+ for (ZkNodeProps sourceReplica : sourceReplicas) {
+ log.info("Deleting replica for collection={} shard={} on node={}", sourceReplica.getStr(COLLECTION_PROP), sourceReplica.getStr(SHARD_ID_PROP), node);
+ NamedList deleteResult = new NamedList();
+ try {
+ ((DeleteReplicaCmd)ocmh.commandMap.get(DELETEREPLICA)).deleteReplica(clusterState, sourceReplica.plus("parallel", "true"), deleteResult, () -> {
+ cleanupLatch.countDown();
+ if (deleteResult.get("failure") != null) {
+ synchronized (results) {
+ results.add("failure", String.format(Locale.ROOT, "Failed to delete replica for collection=%s shard=%s" +
+ " on node=%s", sourceReplica.getStr(COLLECTION_PROP), sourceReplica.getStr(SHARD_ID_PROP), node));
+ }
+ }
+ });
+ } catch (KeeperException e) {
+ log.warn("Error deleting ", e);
+ cleanupLatch.countDown();
+ } catch (Exception e) {
+ log.warn("Error deleting ", e);
+ cleanupLatch.countDown();
+ throw e;
+ }
+ }
+ log.debug("Waiting for delete node action to complete");
+ cleanupLatch.await(5, TimeUnit.MINUTES);
+ }
+
+
+}
diff --git a/solr/core/src/java/org/apache/solr/cloud/DeleteReplicaCmd.java b/solr/core/src/java/org/apache/solr/cloud/DeleteReplicaCmd.java
new file mode 100644
index 00000000000..6f5fc6223db
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/DeleteReplicaCmd.java
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.cloud;
+
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.Callable;
+import java.util.concurrent.atomic.AtomicReference;
+
+import org.apache.solr.cloud.OverseerCollectionMessageHandler.Cmd;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.params.CoreAdminParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.handler.component.ShardHandler;
+import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.ONLY_IF_DOWN;
+import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.REPLICA_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
+import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
+
+
+public class DeleteReplicaCmd implements Cmd {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private final OverseerCollectionMessageHandler ocmh;
+
+ public DeleteReplicaCmd(OverseerCollectionMessageHandler ocmh) {
+ this.ocmh = ocmh;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+
+ public void call(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception {
+ deleteReplica(clusterState, message, results,null);
+ }
+
+ @SuppressWarnings("unchecked")
+ void deleteReplica(ClusterState clusterState, ZkNodeProps message, NamedList results, Runnable onComplete)
+ throws KeeperException, InterruptedException {
+ ocmh.checkRequired(message, COLLECTION_PROP, SHARD_ID_PROP, REPLICA_PROP);
+ String collectionName = message.getStr(COLLECTION_PROP);
+ String shard = message.getStr(SHARD_ID_PROP);
+ String replicaName = message.getStr(REPLICA_PROP);
+ boolean parallel = message.getBool("parallel", false);
+
+ DocCollection coll = clusterState.getCollection(collectionName);
+ Slice slice = coll.getSlice(shard);
+ if (slice == null) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+ "Invalid shard name : " + shard + " in collection : " + collectionName);
+ }
+ Replica replica = slice.getReplica(replicaName);
+ if (replica == null) {
+ ArrayList l = new ArrayList<>();
+ for (Replica r : slice.getReplicas())
+ l.add(r.getName());
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid replica : " + replicaName + " in shard/collection : "
+ + shard + "/" + collectionName + " available replicas are " + StrUtils.join(l, ','));
+ }
+
+ // If users are being safe and only want to remove a shard if it is down, they can specify onlyIfDown=true
+ // on the command.
+ if (Boolean.parseBoolean(message.getStr(ONLY_IF_DOWN)) && replica.getState() != Replica.State.DOWN) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+ "Attempted to remove replica : " + collectionName + "/" + shard + "/" + replicaName
+ + " with onlyIfDown='true', but state is '" + replica.getStr(ZkStateReader.STATE_PROP) + "'");
+ }
+
+ ShardHandler shardHandler = ocmh.shardHandlerFactory.getShardHandler();
+ String core = replica.getStr(ZkStateReader.CORE_NAME_PROP);
+ String asyncId = message.getStr(ASYNC);
+ AtomicReference