mirror of https://github.com/apache/lucene.git
removed synchronization on value writers, added test for docValues comparators
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/docvalues@1098980 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
9c2a60a0c0
commit
471cb20526
|
@ -298,7 +298,10 @@ final class PerFieldCodecWrapper extends Codec {
|
|||
IOException err = null;
|
||||
while (it.hasNext()) {
|
||||
try {
|
||||
it.next().close();
|
||||
PerDocValues next = it.next();
|
||||
if (next != null) {
|
||||
next.close();
|
||||
}
|
||||
} catch (IOException ioe) {
|
||||
// keep first IOException we hit but keep
|
||||
// closing the rest
|
||||
|
|
|
@ -76,11 +76,7 @@ public class DocValuesCodec extends Codec {
|
|||
|
||||
@Override
|
||||
public PerDocValues docsProducer(SegmentReadState state) throws IOException {
|
||||
try {
|
||||
return new DocValuesProducerBase(state.segmentInfo, state.dir, state.fieldInfos, state.codecId);
|
||||
}catch (IOException e) {
|
||||
return new DocValuesProducerBase(state.segmentInfo, state.dir, state.fieldInfos, state.codecId);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -388,7 +388,7 @@ public final class Bytes {
|
|||
public abstract void add(int docID, BytesRef bytes) throws IOException;
|
||||
|
||||
@Override
|
||||
public synchronized void finish(int docCount) throws IOException {
|
||||
public void finish(int docCount) throws IOException {
|
||||
try {
|
||||
if (datOut != null)
|
||||
datOut.close();
|
||||
|
|
|
@ -71,7 +71,7 @@ class FixedDerefBytesImpl {
|
|||
}
|
||||
|
||||
@Override
|
||||
synchronized public void add(int docID, BytesRef bytes) throws IOException {
|
||||
public void add(int docID, BytesRef bytes) throws IOException {
|
||||
if (bytes.length == 0) // default value - skip it
|
||||
return;
|
||||
if (size == -1) {
|
||||
|
@ -102,7 +102,7 @@ class FixedDerefBytesImpl {
|
|||
// Important that we get docCount, in case there were
|
||||
// some last docs that we didn't see
|
||||
@Override
|
||||
synchronized public void finish(int docCount) throws IOException {
|
||||
public void finish(int docCount) throws IOException {
|
||||
try {
|
||||
if (size == -1) {
|
||||
datOut.writeInt(size);
|
||||
|
|
|
@ -78,7 +78,7 @@ class FixedSortedBytesImpl {
|
|||
}
|
||||
|
||||
@Override
|
||||
synchronized public void add(int docID, BytesRef bytes) throws IOException {
|
||||
public void add(int docID, BytesRef bytes) throws IOException {
|
||||
if (bytes.length == 0)
|
||||
return; // default - skip it
|
||||
if (size == -1) {
|
||||
|
@ -103,7 +103,7 @@ class FixedSortedBytesImpl {
|
|||
// Important that we get docCount, in case there were
|
||||
// some last docs that we didn't see
|
||||
@Override
|
||||
synchronized public void finish(int docCount) throws IOException {
|
||||
public void finish(int docCount) throws IOException {
|
||||
try {
|
||||
if (size == -1) {// no data added
|
||||
datOut.writeInt(size);
|
||||
|
|
|
@ -52,7 +52,7 @@ class FixedStraightBytesImpl {
|
|||
// TODO - impl bulk copy here!
|
||||
|
||||
@Override
|
||||
synchronized public void add(int docID, BytesRef bytes) throws IOException {
|
||||
public void add(int docID, BytesRef bytes) throws IOException {
|
||||
if (size == -1) {
|
||||
size = bytes.length;
|
||||
datOut.writeInt(size);
|
||||
|
@ -103,7 +103,7 @@ class FixedStraightBytesImpl {
|
|||
}
|
||||
|
||||
@Override
|
||||
synchronized public void finish(int docCount) throws IOException {
|
||||
public void finish(int docCount) throws IOException {
|
||||
try {
|
||||
if (size == -1) {// no data added
|
||||
datOut.writeInt(0);
|
||||
|
|
|
@ -153,7 +153,7 @@ public class Floats {
|
|||
}
|
||||
|
||||
@Override
|
||||
synchronized public void add(final int docID, final double v)
|
||||
public void add(final int docID, final double v)
|
||||
throws IOException {
|
||||
assert docID > lastDocId : "docID: " + docID
|
||||
+ " must be greater than the last added doc id: " + lastDocId;
|
||||
|
@ -167,7 +167,7 @@ public class Floats {
|
|||
}
|
||||
|
||||
@Override
|
||||
synchronized public void finish(int docCount) throws IOException {
|
||||
public void finish(int docCount) throws IOException {
|
||||
try {
|
||||
if (docCount > lastDocId + 1)
|
||||
for (int i = lastDocId; i < docCount; i++) {
|
||||
|
@ -196,7 +196,7 @@ public class Floats {
|
|||
}
|
||||
|
||||
@Override
|
||||
synchronized public void add(int docID, double v) throws IOException {
|
||||
public void add(int docID, double v) throws IOException {
|
||||
assert docID > lastDocId : "docID: " + docID
|
||||
+ " must be greater than the last added doc id: " + lastDocId;
|
||||
if (docID - lastDocId > 1) {
|
||||
|
@ -209,7 +209,7 @@ public class Floats {
|
|||
}
|
||||
|
||||
@Override
|
||||
synchronized public void finish(int docCount) throws IOException {
|
||||
public void finish(int docCount) throws IOException {
|
||||
try {
|
||||
if (docCount > lastDocId + 1)
|
||||
for (int i = lastDocId; i < docCount; i++) {
|
||||
|
|
|
@ -191,8 +191,9 @@ public class MultiDocValues extends DocValues {
|
|||
}
|
||||
|
||||
private final int ensureSource(int docID) {
|
||||
int n = docID - start;
|
||||
if (n >= numDocs) {
|
||||
if (docID >= start && docID < start+numDocs) {
|
||||
return docID - start;
|
||||
} else {
|
||||
final int idx = ReaderUtil.subIndex(docID, starts);
|
||||
assert idx >= 0 && idx < docValuesIdx.length : "idx was " + idx
|
||||
+ " for doc id: " + docID + " slices : " + Arrays.toString(starts);
|
||||
|
@ -207,9 +208,8 @@ public class MultiDocValues extends DocValues {
|
|||
|
||||
start = docValuesIdx[idx].start;
|
||||
numDocs = docValuesIdx[idx].length;
|
||||
n = docID - start;
|
||||
return docID - start;
|
||||
}
|
||||
return n;
|
||||
}
|
||||
|
||||
public double getFloat(int docID) {
|
||||
|
|
|
@ -70,7 +70,7 @@ class PackedIntsImpl {
|
|||
}
|
||||
|
||||
@Override
|
||||
public synchronized void add(int docID, long v) throws IOException {
|
||||
public void add(int docID, long v) throws IOException {
|
||||
assert lastDocId < docID;
|
||||
if (!started) {
|
||||
started = true;
|
||||
|
@ -96,7 +96,7 @@ class PackedIntsImpl {
|
|||
}
|
||||
|
||||
@Override
|
||||
public synchronized void finish(int docCount) throws IOException {
|
||||
public void finish(int docCount) throws IOException {
|
||||
try {
|
||||
if (!started) {
|
||||
minValue = maxValue = 0;
|
||||
|
@ -118,8 +118,8 @@ class PackedIntsImpl {
|
|||
|
||||
for (int i = firstDoc; i < lastDocId;) {
|
||||
w.add(docToValue[i] - minValue);
|
||||
final int nextValue = defaultValues.nextSetBit(i);
|
||||
for (i++; i < nextValue; i++) {
|
||||
final int nextValue = defaultValues.nextSetBit(++i);
|
||||
for (; i < nextValue; i++) {
|
||||
w.add(defaultValue); // fill all gaps
|
||||
}
|
||||
}
|
||||
|
@ -198,6 +198,7 @@ class PackedIntsImpl {
|
|||
// TODO -- can we somehow avoid 2X method calls
|
||||
// on each get? must push minValue down, and make
|
||||
// PackedInts implement Ints.Source
|
||||
assert docID >= 0;
|
||||
return minValue + values.get(docID);
|
||||
}
|
||||
|
||||
|
|
|
@ -130,7 +130,7 @@ class VarDerefBytesImpl {
|
|||
}
|
||||
|
||||
@Override
|
||||
synchronized public void add(int docID, BytesRef bytes) throws IOException {
|
||||
public void add(int docID, BytesRef bytes) throws IOException {
|
||||
if (bytes.length == 0)
|
||||
return; // default
|
||||
final int e = hash.add(bytes);
|
||||
|
@ -168,7 +168,7 @@ class VarDerefBytesImpl {
|
|||
// Important that we get docCount, in case there were
|
||||
// some last docs that we didn't see
|
||||
@Override
|
||||
synchronized public void finish(int docCount) throws IOException {
|
||||
public void finish(int docCount) throws IOException {
|
||||
try {
|
||||
idxOut.writeInt(address - 1);
|
||||
// write index
|
||||
|
|
|
@ -78,7 +78,7 @@ class VarSortedBytesImpl {
|
|||
}
|
||||
|
||||
@Override
|
||||
synchronized public void add(int docID, BytesRef bytes) throws IOException {
|
||||
public void add(int docID, BytesRef bytes) throws IOException {
|
||||
if (bytes.length == 0)
|
||||
return;// default
|
||||
if (docID >= docToEntry.length) {
|
||||
|
@ -97,7 +97,7 @@ class VarSortedBytesImpl {
|
|||
// Important that we get docCount, in case there were
|
||||
// some last docs that we didn't see
|
||||
@Override
|
||||
synchronized public void finish(int docCount) throws IOException {
|
||||
public void finish(int docCount) throws IOException {
|
||||
final int count = hash.size();
|
||||
try {
|
||||
final int[] sortedEntries = hash.sort(comp);
|
||||
|
|
|
@ -75,7 +75,7 @@ class VarStraightBytesImpl {
|
|||
}
|
||||
|
||||
@Override
|
||||
synchronized public void add(int docID, BytesRef bytes) throws IOException {
|
||||
public void add(int docID, BytesRef bytes) throws IOException {
|
||||
if (bytes.length == 0)
|
||||
return; // default
|
||||
fill(docID);
|
||||
|
@ -85,7 +85,7 @@ class VarStraightBytesImpl {
|
|||
}
|
||||
|
||||
@Override
|
||||
synchronized public void finish(int docCount) throws IOException {
|
||||
public void finish(int docCount) throws IOException {
|
||||
try {
|
||||
if (lastDocID == -1) {
|
||||
idxOut.writeVLong(0);
|
||||
|
|
|
@ -20,6 +20,8 @@ package org.apache.lucene.search;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
|
||||
import org.apache.lucene.index.values.DocValues;
|
||||
import org.apache.lucene.index.values.DocValues.MissingValue;
|
||||
import org.apache.lucene.index.values.DocValues.Source;
|
||||
import org.apache.lucene.search.FieldCache.DocTerms;
|
||||
import org.apache.lucene.search.FieldCache.DocTermsIndex;
|
||||
|
@ -334,10 +336,13 @@ public abstract class FieldComparator {
|
|||
private Source currentReaderValues;
|
||||
private final String field;
|
||||
private double bottom;
|
||||
private final float missingValue;
|
||||
private MissingValue missing;
|
||||
|
||||
FloatDocValuesComparator(int numHits, String field) {
|
||||
FloatDocValuesComparator(int numHits, String field, Float missingValue) {
|
||||
values = new double[numHits];
|
||||
this.field = field;
|
||||
this.missingValue = missingValue == null ? 0 : missingValue.floatValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -367,12 +372,17 @@ public abstract class FieldComparator {
|
|||
|
||||
@Override
|
||||
public void copy(int slot, int doc) {
|
||||
values[slot] = currentReaderValues.getFloat(doc);
|
||||
final double value = currentReaderValues.getFloat(doc);
|
||||
values[slot] = value == missing.doubleValue ? missingValue : value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
|
||||
currentReaderValues = context.reader.docValues(field).getSource();
|
||||
final DocValues docValues = context.reader.docValues(field);
|
||||
if (docValues != null) {
|
||||
currentReaderValues = docValues.getSource();
|
||||
missing = currentReaderValues.getMissing();
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -601,10 +611,13 @@ public abstract class FieldComparator {
|
|||
private Source currentReaderValues;
|
||||
private final String field;
|
||||
private long bottom;
|
||||
private int missingValue;
|
||||
private MissingValue missing;
|
||||
|
||||
IntDocValuesComparator(int numHits, String field) {
|
||||
IntDocValuesComparator(int numHits, String field, Integer missingValue) {
|
||||
values = new long[numHits];
|
||||
this.field = field;
|
||||
this.missingValue = missingValue == null ? 0 : missingValue.intValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -638,12 +651,17 @@ public abstract class FieldComparator {
|
|||
|
||||
@Override
|
||||
public void copy(int slot, int doc) {
|
||||
values[slot] = currentReaderValues.getInt(doc);
|
||||
final long value = currentReaderValues.getInt(doc);
|
||||
values[slot] = value == missing.longValue ? missingValue : value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
|
||||
currentReaderValues = context.reader.docValues(field).getSource();
|
||||
DocValues docValues = context.reader.docValues(field);
|
||||
if (docValues != null) {
|
||||
currentReaderValues = docValues.getSource();
|
||||
missing = currentReaderValues.getMissing();
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
@ -442,14 +442,14 @@ public class SortField {
|
|||
|
||||
case SortField.INT:
|
||||
if (useIndexValues) {
|
||||
return new FieldComparator.IntDocValuesComparator(numHits, field);
|
||||
return new FieldComparator.IntDocValuesComparator(numHits, field, (Integer) missingValue);
|
||||
} else {
|
||||
return new FieldComparator.IntComparator(numHits, (IntValuesCreator)creator, (Integer) missingValue);
|
||||
}
|
||||
|
||||
case SortField.FLOAT:
|
||||
if (useIndexValues) {
|
||||
return new FieldComparator.FloatDocValuesComparator(numHits, field);
|
||||
return new FieldComparator.FloatDocValuesComparator(numHits, field, (Float) missingValue);
|
||||
} else {
|
||||
return new FieldComparator.FloatComparator(numHits, (FloatValuesCreator) creator, (Float) missingValue);
|
||||
}
|
||||
|
|
|
@ -51,7 +51,6 @@ import org.apache.lucene.search.ScoreDoc;
|
|||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.FloatsRef;
|
||||
import org.apache.lucene.util.LongsRef;
|
||||
|
@ -73,9 +72,8 @@ public class TestDocValuesIndexing extends LuceneTestCase {
|
|||
* - DocValues
|
||||
* - Add @lucene.experimental to all necessary classes
|
||||
* - add test for unoptimized case with deletes
|
||||
* - add multithreaded tests / integrate into stress indexing?
|
||||
* - run RAT
|
||||
* - add tests for FieldComparator FloatIndexValuesComparator vs. FloatValuesComparator etc.
|
||||
*
|
||||
*/
|
||||
|
||||
private CodecProvider provider;
|
||||
|
@ -87,18 +85,12 @@ public class TestDocValuesIndexing extends LuceneTestCase {
|
|||
provider.copyFrom(CodecProvider.getDefault());
|
||||
}
|
||||
|
||||
private Directory newDirectory2() throws IOException {
|
||||
MockDirectoryWrapper newDirectory = newDirectory();
|
||||
newDirectory.setCheckIndexOnClose(false);
|
||||
return newDirectory;
|
||||
}
|
||||
|
||||
/*
|
||||
* Simple test case to show how to use the API
|
||||
*/
|
||||
public void testDocValuesSimple() throws CorruptIndexException, IOException,
|
||||
ParseException {
|
||||
Directory dir = newDirectory2();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, writerConfig(false));
|
||||
for (int i = 0; i < 5; i++) {
|
||||
Document doc = new Document();
|
||||
|
@ -109,15 +101,6 @@ public class TestDocValuesIndexing extends LuceneTestCase {
|
|||
writer.addDocument(doc);
|
||||
}
|
||||
writer.commit();
|
||||
for (int i = 0; i < 5; i++) {
|
||||
Document doc = new Document();
|
||||
DocValuesField valuesField = new DocValuesField("docId1");
|
||||
valuesField.setFloat(i);
|
||||
doc.add(valuesField);
|
||||
doc.add(new Field("docId1", "" + i, Store.NO, Index.ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
writer.commit();
|
||||
writer.optimize(true);
|
||||
|
||||
writer.close(true);
|
||||
|
@ -189,7 +172,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
|
|||
Type second = values.get(1);
|
||||
String msg = "[first=" + first.name() + ", second=" + second.name() + "]";
|
||||
// index first index
|
||||
Directory d_1 = newDirectory2();
|
||||
Directory d_1 = newDirectory();
|
||||
IndexWriter w_1 = new IndexWriter(d_1, writerConfig(random.nextBoolean()));
|
||||
indexValues(w_1, valuesPerIndex, first, values, false, 7);
|
||||
w_1.commit();
|
||||
|
@ -197,14 +180,14 @@ public class TestDocValuesIndexing extends LuceneTestCase {
|
|||
_TestUtil.checkIndex(d_1, w_1.getConfig().getCodecProvider());
|
||||
|
||||
// index second index
|
||||
Directory d_2 = newDirectory2();
|
||||
Directory d_2 = newDirectory();
|
||||
IndexWriter w_2 = new IndexWriter(d_2, writerConfig(random.nextBoolean()));
|
||||
indexValues(w_2, valuesPerIndex, second, values, false, 7);
|
||||
w_2.commit();
|
||||
assertEquals(valuesPerIndex, w_2.maxDoc());
|
||||
_TestUtil.checkIndex(d_2, w_2.getConfig().getCodecProvider());
|
||||
|
||||
Directory target = newDirectory2();
|
||||
Directory target = newDirectory();
|
||||
IndexWriter w = new IndexWriter(target, writerConfig(random.nextBoolean()));
|
||||
IndexReader r_1 = IndexReader.open(w_1, true);
|
||||
IndexReader r_2 = IndexReader.open(w_2, true);
|
||||
|
@ -267,7 +250,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
|
|||
|
||||
public void runTestNumerics(IndexWriterConfig cfg, boolean withDeletions)
|
||||
throws IOException {
|
||||
Directory d = newDirectory2();
|
||||
Directory d = newDirectory();
|
||||
IndexWriter w = new IndexWriter(d, cfg);
|
||||
final int numValues = 179 + random.nextInt(151);
|
||||
final List<Type> numVariantList = new ArrayList<Type>(NUMERICS);
|
||||
|
@ -359,7 +342,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
|
|||
|
||||
public void runTestIndexBytes(IndexWriterConfig cfg, boolean withDeletions)
|
||||
throws CorruptIndexException, LockObtainFailedException, IOException {
|
||||
final Directory d = newDirectory2();
|
||||
final Directory d = newDirectory();
|
||||
IndexWriter w = new IndexWriter(d, cfg);
|
||||
final List<Type> byteVariantList = new ArrayList<Type>(BYTES);
|
||||
// run in random order to test if fill works correctly during merges
|
||||
|
@ -430,7 +413,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
|
|||
for (int i = base; i < r.numDocs(); i++) {
|
||||
String msg = " field: " + byteIndexValue.name() + " at index: " + i
|
||||
+ " base: " + base + " numDocs:" + r.numDocs() + " bytesSize: "
|
||||
+ bytesSize;
|
||||
+ bytesSize + " src: " + bytes;
|
||||
while (withDeletions && deleted.get(v++)) {
|
||||
upto += bytesSize;
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import java.util.concurrent.Executors;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.DocValuesField;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
|
@ -35,6 +36,9 @@ import org.apache.lucene.index.IndexWriterConfig;
|
|||
import org.apache.lucene.index.MultiReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.codecs.CodecProvider;
|
||||
import org.apache.lucene.index.codecs.docvalues.DocValuesCodecProvider;
|
||||
import org.apache.lucene.index.values.Type;
|
||||
import org.apache.lucene.queryParser.ParseException;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.FieldValueHitQueue.Entry;
|
||||
|
@ -118,13 +122,19 @@ public class TestSort extends LuceneTestCase {
|
|||
Document doc = new Document();
|
||||
doc.add (new Field ("tracer", data[i][0], Field.Store.YES, Field.Index.NO));
|
||||
doc.add (new Field ("contents", data[i][1], Field.Store.NO, Field.Index.ANALYZED));
|
||||
if (data[i][2] != null) doc.add (new Field ("int", data[i][2], Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
if (data[i][3] != null) doc.add (new Field ("float", data[i][3], Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
if (data[i][2] != null) {
|
||||
doc.add (DocValuesField.set(new Field ("int", data[i][2], Field.Store.NO, Field.Index.NOT_ANALYZED), Type.INTS));
|
||||
}
|
||||
if (data[i][3] != null) {
|
||||
doc.add (DocValuesField.set(new Field ("float", data[i][3], Field.Store.NO, Field.Index.NOT_ANALYZED), Type.FLOAT_32));
|
||||
}
|
||||
if (data[i][4] != null) doc.add (new Field ("string", data[i][4], Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
if (data[i][5] != null) doc.add (new Field ("custom", data[i][5], Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
if (data[i][6] != null) doc.add (new Field ("i18n", data[i][6], Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
if (data[i][7] != null) doc.add (new Field ("long", data[i][7], Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
if (data[i][8] != null) doc.add (new Field ("double", data[i][8], Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
if (data[i][8] != null) {
|
||||
doc.add (DocValuesField.set(new Field ("double", data[i][8], Field.Store.NO, Field.Index.NOT_ANALYZED), Type.FLOAT_64));
|
||||
}
|
||||
if (data[i][9] != null) doc.add (new Field ("short", data[i][9], Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
if (data[i][10] != null) doc.add (new Field ("byte", data[i][10], Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
if (data[i][11] != null) doc.add (new Field ("parser", data[i][11], Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
|
@ -217,6 +227,11 @@ public class TestSort extends LuceneTestCase {
|
|||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
||||
//nocommit - enable doc values by default for all tests
|
||||
DocValuesCodecProvider provider = new DocValuesCodecProvider();
|
||||
provider.copyFrom(CodecProvider.getDefault());
|
||||
CodecProvider.setDefault(provider);
|
||||
full = getFullIndex();
|
||||
searchX = getXIndex();
|
||||
searchY = getYIndex();
|
||||
|
@ -228,6 +243,7 @@ public class TestSort extends LuceneTestCase {
|
|||
queryG = new TermQuery (new Term ("contents", "g"));
|
||||
queryM = new TermQuery (new Term ("contents", "m"));
|
||||
sort = new Sort();
|
||||
|
||||
}
|
||||
|
||||
private ArrayList<Directory> dirs = new ArrayList<Directory>();
|
||||
|
@ -256,15 +272,27 @@ public class TestSort extends LuceneTestCase {
|
|||
assertMatches (full, queryY, sort, "BDFHJ");
|
||||
}
|
||||
|
||||
private static SortField useDocValues(SortField field) {
|
||||
field.setUseIndexValues(true);
|
||||
return field;
|
||||
}
|
||||
// test sorts where the type of field is specified
|
||||
public void testTypedSort() throws Exception {
|
||||
sort.setSort (new SortField ("int", SortField.INT), SortField.FIELD_DOC );
|
||||
assertMatches (full, queryX, sort, "IGAEC");
|
||||
assertMatches (full, queryY, sort, "DHFJB");
|
||||
|
||||
sort.setSort (useDocValues(new SortField ("int", SortField.INT)), SortField.FIELD_DOC );
|
||||
assertMatches (full, queryX, sort, "IGAEC");
|
||||
assertMatches (full, queryY, sort, "DHFJB");
|
||||
|
||||
sort.setSort (new SortField ("float", SortField.FLOAT), SortField.FIELD_DOC );
|
||||
assertMatches (full, queryX, sort, "GCIEA");
|
||||
assertMatches (full, queryY, sort, "DHJFB");
|
||||
|
||||
sort.setSort (useDocValues(new SortField ("float", SortField.FLOAT)), SortField.FIELD_DOC );
|
||||
assertMatches (full, queryX, sort, "GCIEA");
|
||||
assertMatches (full, queryY, sort, "DHJFB");
|
||||
|
||||
sort.setSort (new SortField ("long", SortField.LONG), SortField.FIELD_DOC );
|
||||
assertMatches (full, queryX, sort, "EACGI");
|
||||
|
@ -273,7 +301,11 @@ public class TestSort extends LuceneTestCase {
|
|||
sort.setSort (new SortField ("double", SortField.DOUBLE), SortField.FIELD_DOC );
|
||||
assertMatches (full, queryX, sort, "AGICE");
|
||||
assertMatches (full, queryY, sort, "DJHBF");
|
||||
|
||||
|
||||
sort.setSort (useDocValues(new SortField ("double", SortField.DOUBLE)), SortField.FIELD_DOC );
|
||||
assertMatches (full, queryX, sort, "AGICE");
|
||||
assertMatches (full, queryY, sort, "DJHBF");
|
||||
|
||||
sort.setSort (new SortField ("byte", SortField.BYTE), SortField.FIELD_DOC );
|
||||
assertMatches (full, queryX, sort, "CIGAE");
|
||||
assertMatches (full, queryY, sort, "DHFBJ");
|
||||
|
@ -458,12 +490,18 @@ public class TestSort extends LuceneTestCase {
|
|||
|
||||
sort.setSort (new SortField ("int", SortField.INT), SortField.FIELD_DOC );
|
||||
assertMatches (empty, queryX, sort, "");
|
||||
|
||||
sort.setSort (useDocValues(new SortField ("int", SortField.INT)), SortField.FIELD_DOC );
|
||||
assertMatches (empty, queryX, sort, "");
|
||||
|
||||
sort.setSort (new SortField ("string", SortField.STRING, true), SortField.FIELD_DOC );
|
||||
assertMatches (empty, queryX, sort, "");
|
||||
|
||||
sort.setSort (new SortField ("float", SortField.FLOAT), new SortField ("string", SortField.STRING) );
|
||||
assertMatches (empty, queryX, sort, "");
|
||||
|
||||
sort.setSort (useDocValues(new SortField ("float", SortField.FLOAT)), new SortField ("string", SortField.STRING) );
|
||||
assertMatches (empty, queryX, sort, "");
|
||||
}
|
||||
|
||||
static class MyFieldComparator extends FieldComparator {
|
||||
|
@ -539,10 +577,18 @@ public class TestSort extends LuceneTestCase {
|
|||
sort.setSort (new SortField ("int", SortField.INT, true) );
|
||||
assertMatches (full, queryX, sort, "CAEGI");
|
||||
assertMatches (full, queryY, sort, "BJFHD");
|
||||
|
||||
sort.setSort (useDocValues(new SortField ("int", SortField.INT, true)) );
|
||||
assertMatches (full, queryX, sort, "CAEGI");
|
||||
assertMatches (full, queryY, sort, "BJFHD");
|
||||
|
||||
sort.setSort (new SortField ("float", SortField.FLOAT, true) );
|
||||
assertMatches (full, queryX, sort, "AECIG");
|
||||
assertMatches (full, queryY, sort, "BFJHD");
|
||||
|
||||
sort.setSort (useDocValues(new SortField ("float", SortField.FLOAT, true)) );
|
||||
assertMatches (full, queryX, sort, "AECIG");
|
||||
assertMatches (full, queryY, sort, "BFJHD");
|
||||
|
||||
sort.setSort (new SortField ("string", SortField.STRING, true) );
|
||||
assertMatches (full, queryX, sort, "CEGIA");
|
||||
|
@ -560,11 +606,17 @@ public class TestSort extends LuceneTestCase {
|
|||
sort.setSort (new SortField ("int", SortField.INT) );
|
||||
assertMatches (full, queryF, sort, "IZJ");
|
||||
|
||||
sort.setSort (useDocValues(new SortField ("int", SortField.INT)) );
|
||||
assertMatches (full, queryF, sort, "IZJ");
|
||||
|
||||
sort.setSort (new SortField ("int", SortField.INT, true) );
|
||||
assertMatches (full, queryF, sort, "JZI");
|
||||
|
||||
sort.setSort (new SortField ("float", SortField.FLOAT) );
|
||||
assertMatches (full, queryF, sort, "ZJI");
|
||||
|
||||
sort.setSort (useDocValues(new SortField ("float", SortField.FLOAT)) );
|
||||
assertMatches (full, queryF, sort, "ZJI");
|
||||
|
||||
// using a nonexisting field as first sort key shouldn't make a difference:
|
||||
sort.setSort (new SortField ("nosuchfield", SortField.STRING),
|
||||
|
@ -887,12 +939,30 @@ public class TestSort extends LuceneTestCase {
|
|||
sort.setSort(new SortField("int", SortField.INT));
|
||||
expected = isFull ? "IDHFGJABEC" : "IDHFGJAEBC";
|
||||
assertMatches(multi, queryA, sort, expected);
|
||||
|
||||
sort.setSort(useDocValues(new SortField ("int", SortField.INT)));
|
||||
expected = isFull ? "IDHFGJABEC" : "IDHFGJAEBC";
|
||||
assertMatches(multi, queryA, sort, expected);
|
||||
|
||||
sort.setSort(useDocValues(new SortField ("int", SortField.INT)), SortField.FIELD_DOC);
|
||||
expected = isFull ? "IDHFGJABEC" : "IDHFGJAEBC";
|
||||
assertMatches(multi, queryA, sort, expected);
|
||||
|
||||
sort.setSort(useDocValues(new SortField("int", SortField.INT)));
|
||||
expected = isFull ? "IDHFGJABEC" : "IDHFGJAEBC";
|
||||
assertMatches(multi, queryA, sort, expected);
|
||||
|
||||
sort.setSort(new SortField ("float", SortField.FLOAT), SortField.FIELD_DOC);
|
||||
assertMatches(multi, queryA, sort, "GDHJCIEFAB");
|
||||
|
||||
sort.setSort(new SortField("float", SortField.FLOAT));
|
||||
assertMatches(multi, queryA, sort, "GDHJCIEFAB");
|
||||
|
||||
sort.setSort(useDocValues(new SortField ("float", SortField.FLOAT)), SortField.FIELD_DOC);
|
||||
assertMatches(multi, queryA, sort, "GDHJCIEFAB");
|
||||
|
||||
sort.setSort(useDocValues(new SortField("float", SortField.FLOAT)));
|
||||
assertMatches(multi, queryA, sort, "GDHJCIEFAB");
|
||||
|
||||
sort.setSort(new SortField("string", SortField.STRING));
|
||||
assertMatches(multi, queryA, sort, "DJAIHGFEBC");
|
||||
|
@ -900,6 +970,10 @@ public class TestSort extends LuceneTestCase {
|
|||
sort.setSort(new SortField("int", SortField.INT, true));
|
||||
expected = isFull ? "CABEJGFHDI" : "CAEBJGFHDI";
|
||||
assertMatches(multi, queryA, sort, expected);
|
||||
|
||||
sort.setSort(useDocValues(new SortField("int", SortField.INT, true)));
|
||||
expected = isFull ? "CABEJGFHDI" : "CAEBJGFHDI";
|
||||
assertMatches(multi, queryA, sort, expected);
|
||||
|
||||
sort.setSort(new SortField("float", SortField.FLOAT, true));
|
||||
assertMatches(multi, queryA, sort, "BAFECIJHDG");
|
||||
|
@ -909,6 +983,9 @@ public class TestSort extends LuceneTestCase {
|
|||
|
||||
sort.setSort(new SortField("int", SortField.INT),new SortField("float", SortField.FLOAT));
|
||||
assertMatches(multi, queryA, sort, "IDHFGJEABC");
|
||||
|
||||
sort.setSort(useDocValues(new SortField("int", SortField.INT)), useDocValues(new SortField("float", SortField.FLOAT)));
|
||||
assertMatches(multi, queryA, sort, "IDHFGJEABC");
|
||||
|
||||
sort.setSort(new SortField("float", SortField.FLOAT),new SortField("string", SortField.STRING));
|
||||
assertMatches(multi, queryA, sort, "GDHJICEFAB");
|
||||
|
@ -918,6 +995,12 @@ public class TestSort extends LuceneTestCase {
|
|||
|
||||
sort.setSort(new SortField ("int", SortField.INT, true));
|
||||
assertMatches(multi, queryF, sort, "JZI");
|
||||
|
||||
sort.setSort(useDocValues(new SortField ("int", SortField.INT)));
|
||||
assertMatches(multi, queryF, sort, "IZJ");
|
||||
|
||||
sort.setSort(useDocValues(new SortField ("int", SortField.INT, true)));
|
||||
assertMatches(multi, queryF, sort, "JZI");
|
||||
|
||||
sort.setSort(new SortField ("float", SortField.FLOAT));
|
||||
assertMatches(multi, queryF, sort, "ZJI");
|
||||
|
|
Loading…
Reference in New Issue