fix MismatchedLeafReader to carry over dimensional count/numBytes for each field; add some toString; improve test debuggability

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1711351 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2015-10-29 19:51:37 +00:00
parent 295b9972f4
commit dc66518401
5 changed files with 47 additions and 4 deletions

View File

@ -173,4 +173,9 @@ class SimpleTextDimensionalReader extends DimensionalReader {
public void close() throws IOException {
dataIn.close();
}
@Override
public String toString() {
return "SimpleTextDimensionalReader(segment=" + readState.segmentInfo.name + " maxDoc=" + readState.segmentInfo.maxDoc() + ")";
}
}

View File

@ -40,7 +40,6 @@ class MultiDimensionalValues extends DimensionalValues {
return leaves.get(0).reader().getDimensionalValues();
}
boolean anyReal = false;
List<DimensionalValues> values = new ArrayList<>();
List<Integer> docBases = new ArrayList<>();
for (int i = 0; i < size; i++) {
@ -76,4 +75,20 @@ class MultiDimensionalValues extends DimensionalValues {
});
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder();
b.append("MultiDimensionalValues(");
for(int i=0;i<subs.size();i++) {
if (i > 0) {
b.append(", ");
}
b.append("docBase=");
b.append(docBases.get(i));
b.append(" sub=" + subs.get(i));
}
b.append(')');
return b.toString();
}
}

View File

@ -929,7 +929,7 @@ public class TestDimensionalValues extends LuceneTestCase {
}
//System.out.println("visit check docID=" + docID);
for(int dim=0;dim<numDims;dim++) {
//System.out.println(" dim=" + dim + " value=" + new BytesRef(packedValue, dim*bytesPerDim, bytesPerDim));
//System.out.println(" dim=" + dim + " value=" + new BytesRef(packedValue, dim*numBytesPerDim, numBytesPerDim));
if (BKDUtil.compare(numBytesPerDim, packedValue, dim, queryMin[dim], 0) < 0 ||
BKDUtil.compare(numBytesPerDim, packedValue, dim, queryMax[dim], 0) > 0) {
//System.out.println(" no");
@ -990,8 +990,23 @@ public class TestDimensionalValues extends LuceneTestCase {
}
int limit = Math.max(expected.length(), hits.length());
int failCount = 0;
int successCount = 0;
for(int id=0;id<limit;id++) {
assertEquals("docID=" + id, expected.get(id), hits.get(id));
if (expected.get(id) != hits.get(id)) {
System.out.println("FAIL: id=" + id);
failCount++;
} else {
successCount++;
}
}
if (failCount != 0) {
for(int docID=0;docID<r.maxDoc();docID++) {
System.out.println(" docID=" + docID + " id=" + idValues.get(docID));
}
fail(failCount + " docs failed; " + successCount + " docs succeeded");
}
}
} finally {

View File

@ -67,7 +67,8 @@ public class MismatchedLeafReader extends FilterLeafReader {
oldInfo.getDocValuesType(), // docValuesType
oldInfo.getDocValuesGen(), // dvGen
oldInfo.attributes(), // attributes
0, 0); // dimensional count,numBytes
oldInfo.getDimensionCount(),
oldInfo.getDimensionNumBytes());
shuffled.set(i, newInfo);
}

View File

@ -25,6 +25,7 @@ import java.util.List;
import java.util.Map;
import java.util.Random;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
/**
@ -156,12 +157,18 @@ public class MockRandomMergePolicy extends MergePolicy {
int thingToDo = r.nextInt(7);
if (thingToDo == 0) {
// simple no-op FilterReader
if (LuceneTestCase.VERBOSE) {
System.out.println("NOTE: MockRandomMergePolicy now swaps in a SlowCodecReaderWrapper for merging reader=" + readers.get(i));
}
readers.set(i, SlowCodecReaderWrapper.wrap(new FilterLeafReader(readers.get(i))));
} else if (thingToDo == 1) {
// renumber fields
// NOTE: currently this only "blocks" bulk merges just by
// being a FilterReader. But it might find bugs elsewhere,
// and maybe the situation can be improved in the future.
if (LuceneTestCase.VERBOSE) {
System.out.println("NOTE: MockRandomMergePolicy now swaps in a MismatchedLeafReader for merging reader=" + readers.get(i));
}
readers.set(i, SlowCodecReaderWrapper.wrap(new MismatchedLeafReader(readers.get(i), r)));
}
// otherwise, reader is unchanged