removed synchronization on value writers, added test for docValues comparators

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/docvalues@1098980 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Simon Willnauer 2011-05-03 09:19:32 +00:00
parent 9c2a60a0c0
commit 471cb20526
16 changed files with 151 additions and 67 deletions

View File

@ -298,7 +298,10 @@ final class PerFieldCodecWrapper extends Codec {
IOException err = null; IOException err = null;
while (it.hasNext()) { while (it.hasNext()) {
try { try {
it.next().close(); PerDocValues next = it.next();
if (next != null) {
next.close();
}
} catch (IOException ioe) { } catch (IOException ioe) {
// keep first IOException we hit but keep // keep first IOException we hit but keep
// closing the rest // closing the rest

View File

@ -76,11 +76,7 @@ public class DocValuesCodec extends Codec {
@Override @Override
public PerDocValues docsProducer(SegmentReadState state) throws IOException { public PerDocValues docsProducer(SegmentReadState state) throws IOException {
try {
return new DocValuesProducerBase(state.segmentInfo, state.dir, state.fieldInfos, state.codecId); return new DocValuesProducerBase(state.segmentInfo, state.dir, state.fieldInfos, state.codecId);
}catch (IOException e) {
return new DocValuesProducerBase(state.segmentInfo, state.dir, state.fieldInfos, state.codecId);
}
} }
@Override @Override

View File

@ -388,7 +388,7 @@ public final class Bytes {
public abstract void add(int docID, BytesRef bytes) throws IOException; public abstract void add(int docID, BytesRef bytes) throws IOException;
@Override @Override
public synchronized void finish(int docCount) throws IOException { public void finish(int docCount) throws IOException {
try { try {
if (datOut != null) if (datOut != null)
datOut.close(); datOut.close();

View File

@ -71,7 +71,7 @@ class FixedDerefBytesImpl {
} }
@Override @Override
synchronized public void add(int docID, BytesRef bytes) throws IOException { public void add(int docID, BytesRef bytes) throws IOException {
if (bytes.length == 0) // default value - skip it if (bytes.length == 0) // default value - skip it
return; return;
if (size == -1) { if (size == -1) {
@ -102,7 +102,7 @@ class FixedDerefBytesImpl {
// Important that we get docCount, in case there were // Important that we get docCount, in case there were
// some last docs that we didn't see // some last docs that we didn't see
@Override @Override
synchronized public void finish(int docCount) throws IOException { public void finish(int docCount) throws IOException {
try { try {
if (size == -1) { if (size == -1) {
datOut.writeInt(size); datOut.writeInt(size);

View File

@ -78,7 +78,7 @@ class FixedSortedBytesImpl {
} }
@Override @Override
synchronized public void add(int docID, BytesRef bytes) throws IOException { public void add(int docID, BytesRef bytes) throws IOException {
if (bytes.length == 0) if (bytes.length == 0)
return; // default - skip it return; // default - skip it
if (size == -1) { if (size == -1) {
@ -103,7 +103,7 @@ class FixedSortedBytesImpl {
// Important that we get docCount, in case there were // Important that we get docCount, in case there were
// some last docs that we didn't see // some last docs that we didn't see
@Override @Override
synchronized public void finish(int docCount) throws IOException { public void finish(int docCount) throws IOException {
try { try {
if (size == -1) {// no data added if (size == -1) {// no data added
datOut.writeInt(size); datOut.writeInt(size);

View File

@ -52,7 +52,7 @@ class FixedStraightBytesImpl {
// TODO - impl bulk copy here! // TODO - impl bulk copy here!
@Override @Override
synchronized public void add(int docID, BytesRef bytes) throws IOException { public void add(int docID, BytesRef bytes) throws IOException {
if (size == -1) { if (size == -1) {
size = bytes.length; size = bytes.length;
datOut.writeInt(size); datOut.writeInt(size);
@ -103,7 +103,7 @@ class FixedStraightBytesImpl {
} }
@Override @Override
synchronized public void finish(int docCount) throws IOException { public void finish(int docCount) throws IOException {
try { try {
if (size == -1) {// no data added if (size == -1) {// no data added
datOut.writeInt(0); datOut.writeInt(0);

View File

@ -153,7 +153,7 @@ public class Floats {
} }
@Override @Override
synchronized public void add(final int docID, final double v) public void add(final int docID, final double v)
throws IOException { throws IOException {
assert docID > lastDocId : "docID: " + docID assert docID > lastDocId : "docID: " + docID
+ " must be greater than the last added doc id: " + lastDocId; + " must be greater than the last added doc id: " + lastDocId;
@ -167,7 +167,7 @@ public class Floats {
} }
@Override @Override
synchronized public void finish(int docCount) throws IOException { public void finish(int docCount) throws IOException {
try { try {
if (docCount > lastDocId + 1) if (docCount > lastDocId + 1)
for (int i = lastDocId; i < docCount; i++) { for (int i = lastDocId; i < docCount; i++) {
@ -196,7 +196,7 @@ public class Floats {
} }
@Override @Override
synchronized public void add(int docID, double v) throws IOException { public void add(int docID, double v) throws IOException {
assert docID > lastDocId : "docID: " + docID assert docID > lastDocId : "docID: " + docID
+ " must be greater than the last added doc id: " + lastDocId; + " must be greater than the last added doc id: " + lastDocId;
if (docID - lastDocId > 1) { if (docID - lastDocId > 1) {
@ -209,7 +209,7 @@ public class Floats {
} }
@Override @Override
synchronized public void finish(int docCount) throws IOException { public void finish(int docCount) throws IOException {
try { try {
if (docCount > lastDocId + 1) if (docCount > lastDocId + 1)
for (int i = lastDocId; i < docCount; i++) { for (int i = lastDocId; i < docCount; i++) {

View File

@ -191,8 +191,9 @@ public class MultiDocValues extends DocValues {
} }
private final int ensureSource(int docID) { private final int ensureSource(int docID) {
int n = docID - start; if (docID >= start && docID < start+numDocs) {
if (n >= numDocs) { return docID - start;
} else {
final int idx = ReaderUtil.subIndex(docID, starts); final int idx = ReaderUtil.subIndex(docID, starts);
assert idx >= 0 && idx < docValuesIdx.length : "idx was " + idx assert idx >= 0 && idx < docValuesIdx.length : "idx was " + idx
+ " for doc id: " + docID + " slices : " + Arrays.toString(starts); + " for doc id: " + docID + " slices : " + Arrays.toString(starts);
@ -207,9 +208,8 @@ public class MultiDocValues extends DocValues {
start = docValuesIdx[idx].start; start = docValuesIdx[idx].start;
numDocs = docValuesIdx[idx].length; numDocs = docValuesIdx[idx].length;
n = docID - start; return docID - start;
} }
return n;
} }
public double getFloat(int docID) { public double getFloat(int docID) {

View File

@ -70,7 +70,7 @@ class PackedIntsImpl {
} }
@Override @Override
public synchronized void add(int docID, long v) throws IOException { public void add(int docID, long v) throws IOException {
assert lastDocId < docID; assert lastDocId < docID;
if (!started) { if (!started) {
started = true; started = true;
@ -96,7 +96,7 @@ class PackedIntsImpl {
} }
@Override @Override
public synchronized void finish(int docCount) throws IOException { public void finish(int docCount) throws IOException {
try { try {
if (!started) { if (!started) {
minValue = maxValue = 0; minValue = maxValue = 0;
@ -118,8 +118,8 @@ class PackedIntsImpl {
for (int i = firstDoc; i < lastDocId;) { for (int i = firstDoc; i < lastDocId;) {
w.add(docToValue[i] - minValue); w.add(docToValue[i] - minValue);
final int nextValue = defaultValues.nextSetBit(i); final int nextValue = defaultValues.nextSetBit(++i);
for (i++; i < nextValue; i++) { for (; i < nextValue; i++) {
w.add(defaultValue); // fill all gaps w.add(defaultValue); // fill all gaps
} }
} }
@ -198,6 +198,7 @@ class PackedIntsImpl {
// TODO -- can we somehow avoid 2X method calls // TODO -- can we somehow avoid 2X method calls
// on each get? must push minValue down, and make // on each get? must push minValue down, and make
// PackedInts implement Ints.Source // PackedInts implement Ints.Source
assert docID >= 0;
return minValue + values.get(docID); return minValue + values.get(docID);
} }

View File

@ -130,7 +130,7 @@ class VarDerefBytesImpl {
} }
@Override @Override
synchronized public void add(int docID, BytesRef bytes) throws IOException { public void add(int docID, BytesRef bytes) throws IOException {
if (bytes.length == 0) if (bytes.length == 0)
return; // default return; // default
final int e = hash.add(bytes); final int e = hash.add(bytes);
@ -168,7 +168,7 @@ class VarDerefBytesImpl {
// Important that we get docCount, in case there were // Important that we get docCount, in case there were
// some last docs that we didn't see // some last docs that we didn't see
@Override @Override
synchronized public void finish(int docCount) throws IOException { public void finish(int docCount) throws IOException {
try { try {
idxOut.writeInt(address - 1); idxOut.writeInt(address - 1);
// write index // write index

View File

@ -78,7 +78,7 @@ class VarSortedBytesImpl {
} }
@Override @Override
synchronized public void add(int docID, BytesRef bytes) throws IOException { public void add(int docID, BytesRef bytes) throws IOException {
if (bytes.length == 0) if (bytes.length == 0)
return;// default return;// default
if (docID >= docToEntry.length) { if (docID >= docToEntry.length) {
@ -97,7 +97,7 @@ class VarSortedBytesImpl {
// Important that we get docCount, in case there were // Important that we get docCount, in case there were
// some last docs that we didn't see // some last docs that we didn't see
@Override @Override
synchronized public void finish(int docCount) throws IOException { public void finish(int docCount) throws IOException {
final int count = hash.size(); final int count = hash.size();
try { try {
final int[] sortedEntries = hash.sort(comp); final int[] sortedEntries = hash.sort(comp);

View File

@ -75,7 +75,7 @@ class VarStraightBytesImpl {
} }
@Override @Override
synchronized public void add(int docID, BytesRef bytes) throws IOException { public void add(int docID, BytesRef bytes) throws IOException {
if (bytes.length == 0) if (bytes.length == 0)
return; // default return; // default
fill(docID); fill(docID);
@ -85,7 +85,7 @@ class VarStraightBytesImpl {
} }
@Override @Override
synchronized public void finish(int docCount) throws IOException { public void finish(int docCount) throws IOException {
try { try {
if (lastDocID == -1) { if (lastDocID == -1) {
idxOut.writeVLong(0); idxOut.writeVLong(0);

View File

@ -20,6 +20,8 @@ package org.apache.lucene.search;
import java.io.IOException; import java.io.IOException;
import org.apache.lucene.index.IndexReader.AtomicReaderContext; import org.apache.lucene.index.IndexReader.AtomicReaderContext;
import org.apache.lucene.index.values.DocValues;
import org.apache.lucene.index.values.DocValues.MissingValue;
import org.apache.lucene.index.values.DocValues.Source; import org.apache.lucene.index.values.DocValues.Source;
import org.apache.lucene.search.FieldCache.DocTerms; import org.apache.lucene.search.FieldCache.DocTerms;
import org.apache.lucene.search.FieldCache.DocTermsIndex; import org.apache.lucene.search.FieldCache.DocTermsIndex;
@ -334,10 +336,13 @@ public abstract class FieldComparator {
private Source currentReaderValues; private Source currentReaderValues;
private final String field; private final String field;
private double bottom; private double bottom;
private final float missingValue;
private MissingValue missing;
FloatDocValuesComparator(int numHits, String field) { FloatDocValuesComparator(int numHits, String field, Float missingValue) {
values = new double[numHits]; values = new double[numHits];
this.field = field; this.field = field;
this.missingValue = missingValue == null ? 0 : missingValue.floatValue();
} }
@Override @Override
@ -367,12 +372,17 @@ public abstract class FieldComparator {
@Override @Override
public void copy(int slot, int doc) { public void copy(int slot, int doc) {
values[slot] = currentReaderValues.getFloat(doc); final double value = currentReaderValues.getFloat(doc);
values[slot] = value == missing.doubleValue ? missingValue : value;
} }
@Override @Override
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException { public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
currentReaderValues = context.reader.docValues(field).getSource(); final DocValues docValues = context.reader.docValues(field);
if (docValues != null) {
currentReaderValues = docValues.getSource();
missing = currentReaderValues.getMissing();
}
return this; return this;
} }
@ -601,10 +611,13 @@ public abstract class FieldComparator {
private Source currentReaderValues; private Source currentReaderValues;
private final String field; private final String field;
private long bottom; private long bottom;
private int missingValue;
private MissingValue missing;
IntDocValuesComparator(int numHits, String field) { IntDocValuesComparator(int numHits, String field, Integer missingValue) {
values = new long[numHits]; values = new long[numHits];
this.field = field; this.field = field;
this.missingValue = missingValue == null ? 0 : missingValue.intValue();
} }
@Override @Override
@ -638,12 +651,17 @@ public abstract class FieldComparator {
@Override @Override
public void copy(int slot, int doc) { public void copy(int slot, int doc) {
values[slot] = currentReaderValues.getInt(doc); final long value = currentReaderValues.getInt(doc);
values[slot] = value == missing.longValue ? missingValue : value;
} }
@Override @Override
public FieldComparator setNextReader(AtomicReaderContext context) throws IOException { public FieldComparator setNextReader(AtomicReaderContext context) throws IOException {
currentReaderValues = context.reader.docValues(field).getSource(); DocValues docValues = context.reader.docValues(field);
if (docValues != null) {
currentReaderValues = docValues.getSource();
missing = currentReaderValues.getMissing();
}
return this; return this;
} }

View File

@ -442,14 +442,14 @@ public class SortField {
case SortField.INT: case SortField.INT:
if (useIndexValues) { if (useIndexValues) {
return new FieldComparator.IntDocValuesComparator(numHits, field); return new FieldComparator.IntDocValuesComparator(numHits, field, (Integer) missingValue);
} else { } else {
return new FieldComparator.IntComparator(numHits, (IntValuesCreator)creator, (Integer) missingValue); return new FieldComparator.IntComparator(numHits, (IntValuesCreator)creator, (Integer) missingValue);
} }
case SortField.FLOAT: case SortField.FLOAT:
if (useIndexValues) { if (useIndexValues) {
return new FieldComparator.FloatDocValuesComparator(numHits, field); return new FieldComparator.FloatDocValuesComparator(numHits, field, (Float) missingValue);
} else { } else {
return new FieldComparator.FloatComparator(numHits, (FloatValuesCreator) creator, (Float) missingValue); return new FieldComparator.FloatComparator(numHits, (FloatValuesCreator) creator, (Float) missingValue);
} }

View File

@ -51,7 +51,6 @@ import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FloatsRef; import org.apache.lucene.util.FloatsRef;
import org.apache.lucene.util.LongsRef; import org.apache.lucene.util.LongsRef;
@ -73,9 +72,8 @@ public class TestDocValuesIndexing extends LuceneTestCase {
* - DocValues * - DocValues
* - Add @lucene.experimental to all necessary classes * - Add @lucene.experimental to all necessary classes
* - add test for unoptimized case with deletes * - add test for unoptimized case with deletes
* - add multithreaded tests / integrate into stress indexing?
* - run RAT * - run RAT
* - add tests for FieldComparator FloatIndexValuesComparator vs. FloatValuesComparator etc.
*
*/ */
private CodecProvider provider; private CodecProvider provider;
@ -87,18 +85,12 @@ public class TestDocValuesIndexing extends LuceneTestCase {
provider.copyFrom(CodecProvider.getDefault()); provider.copyFrom(CodecProvider.getDefault());
} }
private Directory newDirectory2() throws IOException {
MockDirectoryWrapper newDirectory = newDirectory();
newDirectory.setCheckIndexOnClose(false);
return newDirectory;
}
/* /*
* Simple test case to show how to use the API * Simple test case to show how to use the API
*/ */
public void testDocValuesSimple() throws CorruptIndexException, IOException, public void testDocValuesSimple() throws CorruptIndexException, IOException,
ParseException { ParseException {
Directory dir = newDirectory2(); Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, writerConfig(false)); IndexWriter writer = new IndexWriter(dir, writerConfig(false));
for (int i = 0; i < 5; i++) { for (int i = 0; i < 5; i++) {
Document doc = new Document(); Document doc = new Document();
@ -109,15 +101,6 @@ public class TestDocValuesIndexing extends LuceneTestCase {
writer.addDocument(doc); writer.addDocument(doc);
} }
writer.commit(); writer.commit();
for (int i = 0; i < 5; i++) {
Document doc = new Document();
DocValuesField valuesField = new DocValuesField("docId1");
valuesField.setFloat(i);
doc.add(valuesField);
doc.add(new Field("docId1", "" + i, Store.NO, Index.ANALYZED));
writer.addDocument(doc);
}
writer.commit();
writer.optimize(true); writer.optimize(true);
writer.close(true); writer.close(true);
@ -189,7 +172,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
Type second = values.get(1); Type second = values.get(1);
String msg = "[first=" + first.name() + ", second=" + second.name() + "]"; String msg = "[first=" + first.name() + ", second=" + second.name() + "]";
// index first index // index first index
Directory d_1 = newDirectory2(); Directory d_1 = newDirectory();
IndexWriter w_1 = new IndexWriter(d_1, writerConfig(random.nextBoolean())); IndexWriter w_1 = new IndexWriter(d_1, writerConfig(random.nextBoolean()));
indexValues(w_1, valuesPerIndex, first, values, false, 7); indexValues(w_1, valuesPerIndex, first, values, false, 7);
w_1.commit(); w_1.commit();
@ -197,14 +180,14 @@ public class TestDocValuesIndexing extends LuceneTestCase {
_TestUtil.checkIndex(d_1, w_1.getConfig().getCodecProvider()); _TestUtil.checkIndex(d_1, w_1.getConfig().getCodecProvider());
// index second index // index second index
Directory d_2 = newDirectory2(); Directory d_2 = newDirectory();
IndexWriter w_2 = new IndexWriter(d_2, writerConfig(random.nextBoolean())); IndexWriter w_2 = new IndexWriter(d_2, writerConfig(random.nextBoolean()));
indexValues(w_2, valuesPerIndex, second, values, false, 7); indexValues(w_2, valuesPerIndex, second, values, false, 7);
w_2.commit(); w_2.commit();
assertEquals(valuesPerIndex, w_2.maxDoc()); assertEquals(valuesPerIndex, w_2.maxDoc());
_TestUtil.checkIndex(d_2, w_2.getConfig().getCodecProvider()); _TestUtil.checkIndex(d_2, w_2.getConfig().getCodecProvider());
Directory target = newDirectory2(); Directory target = newDirectory();
IndexWriter w = new IndexWriter(target, writerConfig(random.nextBoolean())); IndexWriter w = new IndexWriter(target, writerConfig(random.nextBoolean()));
IndexReader r_1 = IndexReader.open(w_1, true); IndexReader r_1 = IndexReader.open(w_1, true);
IndexReader r_2 = IndexReader.open(w_2, true); IndexReader r_2 = IndexReader.open(w_2, true);
@ -267,7 +250,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void runTestNumerics(IndexWriterConfig cfg, boolean withDeletions) public void runTestNumerics(IndexWriterConfig cfg, boolean withDeletions)
throws IOException { throws IOException {
Directory d = newDirectory2(); Directory d = newDirectory();
IndexWriter w = new IndexWriter(d, cfg); IndexWriter w = new IndexWriter(d, cfg);
final int numValues = 179 + random.nextInt(151); final int numValues = 179 + random.nextInt(151);
final List<Type> numVariantList = new ArrayList<Type>(NUMERICS); final List<Type> numVariantList = new ArrayList<Type>(NUMERICS);
@ -359,7 +342,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void runTestIndexBytes(IndexWriterConfig cfg, boolean withDeletions) public void runTestIndexBytes(IndexWriterConfig cfg, boolean withDeletions)
throws CorruptIndexException, LockObtainFailedException, IOException { throws CorruptIndexException, LockObtainFailedException, IOException {
final Directory d = newDirectory2(); final Directory d = newDirectory();
IndexWriter w = new IndexWriter(d, cfg); IndexWriter w = new IndexWriter(d, cfg);
final List<Type> byteVariantList = new ArrayList<Type>(BYTES); final List<Type> byteVariantList = new ArrayList<Type>(BYTES);
// run in random order to test if fill works correctly during merges // run in random order to test if fill works correctly during merges
@ -430,7 +413,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
for (int i = base; i < r.numDocs(); i++) { for (int i = base; i < r.numDocs(); i++) {
String msg = " field: " + byteIndexValue.name() + " at index: " + i String msg = " field: " + byteIndexValue.name() + " at index: " + i
+ " base: " + base + " numDocs:" + r.numDocs() + " bytesSize: " + " base: " + base + " numDocs:" + r.numDocs() + " bytesSize: "
+ bytesSize; + bytesSize + " src: " + bytes;
while (withDeletions && deleted.get(v++)) { while (withDeletions && deleted.get(v++)) {
upto += bytesSize; upto += bytesSize;
} }

View File

@ -25,6 +25,7 @@ import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.DocValuesField;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
@ -35,6 +36,9 @@ import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.MultiReader; import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.index.codecs.CodecProvider;
import org.apache.lucene.index.codecs.docvalues.DocValuesCodecProvider;
import org.apache.lucene.index.values.Type;
import org.apache.lucene.queryParser.ParseException; import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.FieldValueHitQueue.Entry; import org.apache.lucene.search.FieldValueHitQueue.Entry;
@ -118,13 +122,19 @@ public class TestSort extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
doc.add (new Field ("tracer", data[i][0], Field.Store.YES, Field.Index.NO)); doc.add (new Field ("tracer", data[i][0], Field.Store.YES, Field.Index.NO));
doc.add (new Field ("contents", data[i][1], Field.Store.NO, Field.Index.ANALYZED)); doc.add (new Field ("contents", data[i][1], Field.Store.NO, Field.Index.ANALYZED));
if (data[i][2] != null) doc.add (new Field ("int", data[i][2], Field.Store.NO, Field.Index.NOT_ANALYZED)); if (data[i][2] != null) {
if (data[i][3] != null) doc.add (new Field ("float", data[i][3], Field.Store.NO, Field.Index.NOT_ANALYZED)); doc.add (DocValuesField.set(new Field ("int", data[i][2], Field.Store.NO, Field.Index.NOT_ANALYZED), Type.INTS));
}
if (data[i][3] != null) {
doc.add (DocValuesField.set(new Field ("float", data[i][3], Field.Store.NO, Field.Index.NOT_ANALYZED), Type.FLOAT_32));
}
if (data[i][4] != null) doc.add (new Field ("string", data[i][4], Field.Store.NO, Field.Index.NOT_ANALYZED)); if (data[i][4] != null) doc.add (new Field ("string", data[i][4], Field.Store.NO, Field.Index.NOT_ANALYZED));
if (data[i][5] != null) doc.add (new Field ("custom", data[i][5], Field.Store.NO, Field.Index.NOT_ANALYZED)); if (data[i][5] != null) doc.add (new Field ("custom", data[i][5], Field.Store.NO, Field.Index.NOT_ANALYZED));
if (data[i][6] != null) doc.add (new Field ("i18n", data[i][6], Field.Store.NO, Field.Index.NOT_ANALYZED)); if (data[i][6] != null) doc.add (new Field ("i18n", data[i][6], Field.Store.NO, Field.Index.NOT_ANALYZED));
if (data[i][7] != null) doc.add (new Field ("long", data[i][7], Field.Store.NO, Field.Index.NOT_ANALYZED)); if (data[i][7] != null) doc.add (new Field ("long", data[i][7], Field.Store.NO, Field.Index.NOT_ANALYZED));
if (data[i][8] != null) doc.add (new Field ("double", data[i][8], Field.Store.NO, Field.Index.NOT_ANALYZED)); if (data[i][8] != null) {
doc.add (DocValuesField.set(new Field ("double", data[i][8], Field.Store.NO, Field.Index.NOT_ANALYZED), Type.FLOAT_64));
}
if (data[i][9] != null) doc.add (new Field ("short", data[i][9], Field.Store.NO, Field.Index.NOT_ANALYZED)); if (data[i][9] != null) doc.add (new Field ("short", data[i][9], Field.Store.NO, Field.Index.NOT_ANALYZED));
if (data[i][10] != null) doc.add (new Field ("byte", data[i][10], Field.Store.NO, Field.Index.NOT_ANALYZED)); if (data[i][10] != null) doc.add (new Field ("byte", data[i][10], Field.Store.NO, Field.Index.NOT_ANALYZED));
if (data[i][11] != null) doc.add (new Field ("parser", data[i][11], Field.Store.NO, Field.Index.NOT_ANALYZED)); if (data[i][11] != null) doc.add (new Field ("parser", data[i][11], Field.Store.NO, Field.Index.NOT_ANALYZED));
@ -217,6 +227,11 @@ public class TestSort extends LuceneTestCase {
@Override @Override
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
//nocommit - enable doc values by default for all tests
DocValuesCodecProvider provider = new DocValuesCodecProvider();
provider.copyFrom(CodecProvider.getDefault());
CodecProvider.setDefault(provider);
full = getFullIndex(); full = getFullIndex();
searchX = getXIndex(); searchX = getXIndex();
searchY = getYIndex(); searchY = getYIndex();
@ -228,6 +243,7 @@ public class TestSort extends LuceneTestCase {
queryG = new TermQuery (new Term ("contents", "g")); queryG = new TermQuery (new Term ("contents", "g"));
queryM = new TermQuery (new Term ("contents", "m")); queryM = new TermQuery (new Term ("contents", "m"));
sort = new Sort(); sort = new Sort();
} }
private ArrayList<Directory> dirs = new ArrayList<Directory>(); private ArrayList<Directory> dirs = new ArrayList<Directory>();
@ -256,16 +272,28 @@ public class TestSort extends LuceneTestCase {
assertMatches (full, queryY, sort, "BDFHJ"); assertMatches (full, queryY, sort, "BDFHJ");
} }
private static SortField useDocValues(SortField field) {
field.setUseIndexValues(true);
return field;
}
// test sorts where the type of field is specified // test sorts where the type of field is specified
public void testTypedSort() throws Exception { public void testTypedSort() throws Exception {
sort.setSort (new SortField ("int", SortField.INT), SortField.FIELD_DOC ); sort.setSort (new SortField ("int", SortField.INT), SortField.FIELD_DOC );
assertMatches (full, queryX, sort, "IGAEC"); assertMatches (full, queryX, sort, "IGAEC");
assertMatches (full, queryY, sort, "DHFJB"); assertMatches (full, queryY, sort, "DHFJB");
sort.setSort (useDocValues(new SortField ("int", SortField.INT)), SortField.FIELD_DOC );
assertMatches (full, queryX, sort, "IGAEC");
assertMatches (full, queryY, sort, "DHFJB");
sort.setSort (new SortField ("float", SortField.FLOAT), SortField.FIELD_DOC ); sort.setSort (new SortField ("float", SortField.FLOAT), SortField.FIELD_DOC );
assertMatches (full, queryX, sort, "GCIEA"); assertMatches (full, queryX, sort, "GCIEA");
assertMatches (full, queryY, sort, "DHJFB"); assertMatches (full, queryY, sort, "DHJFB");
sort.setSort (useDocValues(new SortField ("float", SortField.FLOAT)), SortField.FIELD_DOC );
assertMatches (full, queryX, sort, "GCIEA");
assertMatches (full, queryY, sort, "DHJFB");
sort.setSort (new SortField ("long", SortField.LONG), SortField.FIELD_DOC ); sort.setSort (new SortField ("long", SortField.LONG), SortField.FIELD_DOC );
assertMatches (full, queryX, sort, "EACGI"); assertMatches (full, queryX, sort, "EACGI");
assertMatches (full, queryY, sort, "FBJHD"); assertMatches (full, queryY, sort, "FBJHD");
@ -274,6 +302,10 @@ public class TestSort extends LuceneTestCase {
assertMatches (full, queryX, sort, "AGICE"); assertMatches (full, queryX, sort, "AGICE");
assertMatches (full, queryY, sort, "DJHBF"); assertMatches (full, queryY, sort, "DJHBF");
sort.setSort (useDocValues(new SortField ("double", SortField.DOUBLE)), SortField.FIELD_DOC );
assertMatches (full, queryX, sort, "AGICE");
assertMatches (full, queryY, sort, "DJHBF");
sort.setSort (new SortField ("byte", SortField.BYTE), SortField.FIELD_DOC ); sort.setSort (new SortField ("byte", SortField.BYTE), SortField.FIELD_DOC );
assertMatches (full, queryX, sort, "CIGAE"); assertMatches (full, queryX, sort, "CIGAE");
assertMatches (full, queryY, sort, "DHFBJ"); assertMatches (full, queryY, sort, "DHFBJ");
@ -459,11 +491,17 @@ public class TestSort extends LuceneTestCase {
sort.setSort (new SortField ("int", SortField.INT), SortField.FIELD_DOC ); sort.setSort (new SortField ("int", SortField.INT), SortField.FIELD_DOC );
assertMatches (empty, queryX, sort, ""); assertMatches (empty, queryX, sort, "");
sort.setSort (useDocValues(new SortField ("int", SortField.INT)), SortField.FIELD_DOC );
assertMatches (empty, queryX, sort, "");
sort.setSort (new SortField ("string", SortField.STRING, true), SortField.FIELD_DOC ); sort.setSort (new SortField ("string", SortField.STRING, true), SortField.FIELD_DOC );
assertMatches (empty, queryX, sort, ""); assertMatches (empty, queryX, sort, "");
sort.setSort (new SortField ("float", SortField.FLOAT), new SortField ("string", SortField.STRING) ); sort.setSort (new SortField ("float", SortField.FLOAT), new SortField ("string", SortField.STRING) );
assertMatches (empty, queryX, sort, ""); assertMatches (empty, queryX, sort, "");
sort.setSort (useDocValues(new SortField ("float", SortField.FLOAT)), new SortField ("string", SortField.STRING) );
assertMatches (empty, queryX, sort, "");
} }
static class MyFieldComparator extends FieldComparator { static class MyFieldComparator extends FieldComparator {
@ -540,10 +578,18 @@ public class TestSort extends LuceneTestCase {
assertMatches (full, queryX, sort, "CAEGI"); assertMatches (full, queryX, sort, "CAEGI");
assertMatches (full, queryY, sort, "BJFHD"); assertMatches (full, queryY, sort, "BJFHD");
sort.setSort (useDocValues(new SortField ("int", SortField.INT, true)) );
assertMatches (full, queryX, sort, "CAEGI");
assertMatches (full, queryY, sort, "BJFHD");
sort.setSort (new SortField ("float", SortField.FLOAT, true) ); sort.setSort (new SortField ("float", SortField.FLOAT, true) );
assertMatches (full, queryX, sort, "AECIG"); assertMatches (full, queryX, sort, "AECIG");
assertMatches (full, queryY, sort, "BFJHD"); assertMatches (full, queryY, sort, "BFJHD");
sort.setSort (useDocValues(new SortField ("float", SortField.FLOAT, true)) );
assertMatches (full, queryX, sort, "AECIG");
assertMatches (full, queryY, sort, "BFJHD");
sort.setSort (new SortField ("string", SortField.STRING, true) ); sort.setSort (new SortField ("string", SortField.STRING, true) );
assertMatches (full, queryX, sort, "CEGIA"); assertMatches (full, queryX, sort, "CEGIA");
assertMatches (full, queryY, sort, "BFHJD"); assertMatches (full, queryY, sort, "BFHJD");
@ -560,12 +606,18 @@ public class TestSort extends LuceneTestCase {
sort.setSort (new SortField ("int", SortField.INT) ); sort.setSort (new SortField ("int", SortField.INT) );
assertMatches (full, queryF, sort, "IZJ"); assertMatches (full, queryF, sort, "IZJ");
sort.setSort (useDocValues(new SortField ("int", SortField.INT)) );
assertMatches (full, queryF, sort, "IZJ");
sort.setSort (new SortField ("int", SortField.INT, true) ); sort.setSort (new SortField ("int", SortField.INT, true) );
assertMatches (full, queryF, sort, "JZI"); assertMatches (full, queryF, sort, "JZI");
sort.setSort (new SortField ("float", SortField.FLOAT) ); sort.setSort (new SortField ("float", SortField.FLOAT) );
assertMatches (full, queryF, sort, "ZJI"); assertMatches (full, queryF, sort, "ZJI");
sort.setSort (useDocValues(new SortField ("float", SortField.FLOAT)) );
assertMatches (full, queryF, sort, "ZJI");
// using a nonexisting field as first sort key shouldn't make a difference: // using a nonexisting field as first sort key shouldn't make a difference:
sort.setSort (new SortField ("nosuchfield", SortField.STRING), sort.setSort (new SortField ("nosuchfield", SortField.STRING),
new SortField ("float", SortField.FLOAT) ); new SortField ("float", SortField.FLOAT) );
@ -888,12 +940,30 @@ public class TestSort extends LuceneTestCase {
expected = isFull ? "IDHFGJABEC" : "IDHFGJAEBC"; expected = isFull ? "IDHFGJABEC" : "IDHFGJAEBC";
assertMatches(multi, queryA, sort, expected); assertMatches(multi, queryA, sort, expected);
sort.setSort(useDocValues(new SortField ("int", SortField.INT)));
expected = isFull ? "IDHFGJABEC" : "IDHFGJAEBC";
assertMatches(multi, queryA, sort, expected);
sort.setSort(useDocValues(new SortField ("int", SortField.INT)), SortField.FIELD_DOC);
expected = isFull ? "IDHFGJABEC" : "IDHFGJAEBC";
assertMatches(multi, queryA, sort, expected);
sort.setSort(useDocValues(new SortField("int", SortField.INT)));
expected = isFull ? "IDHFGJABEC" : "IDHFGJAEBC";
assertMatches(multi, queryA, sort, expected);
sort.setSort(new SortField ("float", SortField.FLOAT), SortField.FIELD_DOC); sort.setSort(new SortField ("float", SortField.FLOAT), SortField.FIELD_DOC);
assertMatches(multi, queryA, sort, "GDHJCIEFAB"); assertMatches(multi, queryA, sort, "GDHJCIEFAB");
sort.setSort(new SortField("float", SortField.FLOAT)); sort.setSort(new SortField("float", SortField.FLOAT));
assertMatches(multi, queryA, sort, "GDHJCIEFAB"); assertMatches(multi, queryA, sort, "GDHJCIEFAB");
sort.setSort(useDocValues(new SortField ("float", SortField.FLOAT)), SortField.FIELD_DOC);
assertMatches(multi, queryA, sort, "GDHJCIEFAB");
sort.setSort(useDocValues(new SortField("float", SortField.FLOAT)));
assertMatches(multi, queryA, sort, "GDHJCIEFAB");
sort.setSort(new SortField("string", SortField.STRING)); sort.setSort(new SortField("string", SortField.STRING));
assertMatches(multi, queryA, sort, "DJAIHGFEBC"); assertMatches(multi, queryA, sort, "DJAIHGFEBC");
@ -901,6 +971,10 @@ public class TestSort extends LuceneTestCase {
expected = isFull ? "CABEJGFHDI" : "CAEBJGFHDI"; expected = isFull ? "CABEJGFHDI" : "CAEBJGFHDI";
assertMatches(multi, queryA, sort, expected); assertMatches(multi, queryA, sort, expected);
sort.setSort(useDocValues(new SortField("int", SortField.INT, true)));
expected = isFull ? "CABEJGFHDI" : "CAEBJGFHDI";
assertMatches(multi, queryA, sort, expected);
sort.setSort(new SortField("float", SortField.FLOAT, true)); sort.setSort(new SortField("float", SortField.FLOAT, true));
assertMatches(multi, queryA, sort, "BAFECIJHDG"); assertMatches(multi, queryA, sort, "BAFECIJHDG");
@ -910,6 +984,9 @@ public class TestSort extends LuceneTestCase {
sort.setSort(new SortField("int", SortField.INT),new SortField("float", SortField.FLOAT)); sort.setSort(new SortField("int", SortField.INT),new SortField("float", SortField.FLOAT));
assertMatches(multi, queryA, sort, "IDHFGJEABC"); assertMatches(multi, queryA, sort, "IDHFGJEABC");
sort.setSort(useDocValues(new SortField("int", SortField.INT)), useDocValues(new SortField("float", SortField.FLOAT)));
assertMatches(multi, queryA, sort, "IDHFGJEABC");
sort.setSort(new SortField("float", SortField.FLOAT),new SortField("string", SortField.STRING)); sort.setSort(new SortField("float", SortField.FLOAT),new SortField("string", SortField.STRING));
assertMatches(multi, queryA, sort, "GDHJICEFAB"); assertMatches(multi, queryA, sort, "GDHJICEFAB");
@ -919,6 +996,12 @@ public class TestSort extends LuceneTestCase {
sort.setSort(new SortField ("int", SortField.INT, true)); sort.setSort(new SortField ("int", SortField.INT, true));
assertMatches(multi, queryF, sort, "JZI"); assertMatches(multi, queryF, sort, "JZI");
sort.setSort(useDocValues(new SortField ("int", SortField.INT)));
assertMatches(multi, queryF, sort, "IZJ");
sort.setSort(useDocValues(new SortField ("int", SortField.INT, true)));
assertMatches(multi, queryF, sort, "JZI");
sort.setSort(new SortField ("float", SortField.FLOAT)); sort.setSort(new SortField ("float", SortField.FLOAT));
assertMatches(multi, queryF, sort, "ZJI"); assertMatches(multi, queryF, sort, "ZJI");