LUCENE-6766: MultiXXX now refuse to merge if there is an index sort

This commit is contained in:
Mike McCandless 2016-05-06 19:02:41 -04:00
parent 8fe78da23c
commit eb8b1a92d8
11 changed files with 179 additions and 36 deletions

View File

@ -94,10 +94,8 @@ public class Lucene50RWSegmentInfoFormat extends Lucene50SegmentInfoFormat {
@Override @Override
public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException { public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene50SegmentInfoFormat.SI_EXTENSION); final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene50SegmentInfoFormat.SI_EXTENSION);
// nocommit indexSort
if (si.getIndexSort() != null) { assert si.getIndexSort() == null;
throw new IllegalArgumentException("teach me to write indexSort");
}
try (IndexOutput output = dir.createOutput(fileName, ioContext)) { try (IndexOutput output = dir.createOutput(fileName, ioContext)) {
// Only add the file once we've successfully created it, else IFD assert can trip: // Only add the file once we've successfully created it, else IFD assert can trip:

View File

@ -84,7 +84,9 @@ public class DocIDMerger<T extends DocIDMerger.Sub> {
/** Reuse API, currently only used by postings during merge */ /** Reuse API, currently only used by postings during merge */
public void reset() { public void reset() {
if (queue != null) { if (queue != null) {
assert queue.size() == 0; queue.clear();
// nocommit why does bloom filter wrapper trip this?
// assert queue.size() == 0: "queue.size() = " + queue.size();
for(T sub : subs) { for(T sub : subs) {
while (true) { while (true) {
int docID = sub.nextDoc(); int docID = sub.nextDoc();

View File

@ -78,6 +78,9 @@ public class MultiDocValues {
final int[] starts = new int[size+1]; final int[] starts = new int[size+1];
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
LeafReaderContext context = leaves.get(i); LeafReaderContext context = leaves.get(i);
if (context.reader().getIndexSort() != null) {
throw new IllegalArgumentException("cannot handle index sort: reader=" + context.reader());
}
NumericDocValues v = context.reader().getNormValues(field); NumericDocValues v = context.reader().getNormValues(field);
if (v == null) { if (v == null) {
v = DocValues.emptyNumeric(); v = DocValues.emptyNumeric();
@ -120,6 +123,9 @@ public class MultiDocValues {
final int[] starts = new int[size+1]; final int[] starts = new int[size+1];
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
LeafReaderContext context = leaves.get(i); LeafReaderContext context = leaves.get(i);
if (context.reader().getIndexSort() != null) {
throw new IllegalArgumentException("cannot handle index sort: reader=" + context.reader());
}
NumericDocValues v = context.reader().getNumericDocValues(field); NumericDocValues v = context.reader().getNumericDocValues(field);
if (v == null) { if (v == null) {
v = DocValues.emptyNumeric(); v = DocValues.emptyNumeric();
@ -165,6 +171,9 @@ public class MultiDocValues {
final int[] starts = new int[size+1]; final int[] starts = new int[size+1];
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
LeafReaderContext context = leaves.get(i); LeafReaderContext context = leaves.get(i);
if (context.reader().getIndexSort() != null) {
throw new IllegalArgumentException("cannot handle index sort: reader=" + context.reader());
}
Bits v = context.reader().getDocsWithField(field); Bits v = context.reader().getDocsWithField(field);
if (v == null) { if (v == null) {
v = new Bits.MatchNoBits(context.reader().maxDoc()); v = new Bits.MatchNoBits(context.reader().maxDoc());
@ -210,6 +219,9 @@ public class MultiDocValues {
final int[] starts = new int[size+1]; final int[] starts = new int[size+1];
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
LeafReaderContext context = leaves.get(i); LeafReaderContext context = leaves.get(i);
if (context.reader().getIndexSort() != null) {
throw new IllegalArgumentException("cannot handle index sort: reader=" + context.reader());
}
BinaryDocValues v = context.reader().getBinaryDocValues(field); BinaryDocValues v = context.reader().getBinaryDocValues(field);
if (v == null) { if (v == null) {
v = DocValues.emptyBinary(); v = DocValues.emptyBinary();
@ -254,6 +266,9 @@ public class MultiDocValues {
final int[] starts = new int[size+1]; final int[] starts = new int[size+1];
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
LeafReaderContext context = leaves.get(i); LeafReaderContext context = leaves.get(i);
if (context.reader().getIndexSort() != null) {
throw new IllegalArgumentException("cannot handle index sort: reader=" + context.reader());
}
SortedNumericDocValues v = context.reader().getSortedNumericDocValues(field); SortedNumericDocValues v = context.reader().getSortedNumericDocValues(field);
if (v == null) { if (v == null) {
v = DocValues.emptySortedNumeric(context.reader().maxDoc()); v = DocValues.emptySortedNumeric(context.reader().maxDoc());
@ -312,6 +327,9 @@ public class MultiDocValues {
final int[] starts = new int[size+1]; final int[] starts = new int[size+1];
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
LeafReaderContext context = leaves.get(i); LeafReaderContext context = leaves.get(i);
if (context.reader().getIndexSort() != null) {
throw new IllegalArgumentException("cannot handle index sort: reader=" + context.reader());
}
SortedDocValues v = context.reader().getSortedDocValues(field); SortedDocValues v = context.reader().getSortedDocValues(field);
if (v == null) { if (v == null) {
v = DocValues.emptySorted(); v = DocValues.emptySorted();
@ -352,6 +370,9 @@ public class MultiDocValues {
final int[] starts = new int[size+1]; final int[] starts = new int[size+1];
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
LeafReaderContext context = leaves.get(i); LeafReaderContext context = leaves.get(i);
if (context.reader().getIndexSort() != null) {
throw new IllegalArgumentException("cannot handle index sort: reader=" + context.reader());
}
SortedSetDocValues v = context.reader().getSortedSetDocValues(field); SortedSetDocValues v = context.reader().getSortedSetDocValues(field);
if (v == null) { if (v == null) {
v = DocValues.emptySortedSet(); v = DocValues.emptySortedSet();

View File

@ -51,7 +51,7 @@ public final class MultiFields extends Fields {
private final ReaderSlice[] subSlices; private final ReaderSlice[] subSlices;
private final Map<String,Terms> terms = new ConcurrentHashMap<>(); private final Map<String,Terms> terms = new ConcurrentHashMap<>();
// nocommit should we somehow throw exc if you try to pass in "sorted" Fields? // nocommit make test for sorted fields
/** Returns a single {@link Fields} instance for this /** Returns a single {@link Fields} instance for this
* reader, merging fields/terms/docs/positions on the * reader, merging fields/terms/docs/positions on the
@ -72,6 +72,9 @@ public final class MultiFields extends Fields {
final List<ReaderSlice> slices = new ArrayList<>(leaves.size()); final List<ReaderSlice> slices = new ArrayList<>(leaves.size());
for (final LeafReaderContext ctx : leaves) { for (final LeafReaderContext ctx : leaves) {
final LeafReader r = ctx.reader(); final LeafReader r = ctx.reader();
if (r.getIndexSort() != null) {
throw new IllegalArgumentException("cannot handle index sort: reader=" + r);
}
final Fields f = r.fields(); final Fields f = r.fields();
fields.add(f); fields.add(f);
slices.add(new ReaderSlice(ctx.docBase, r.maxDoc(), fields.size()-1)); slices.add(new ReaderSlice(ctx.docBase, r.maxDoc(), fields.size()-1));
@ -107,6 +110,10 @@ public final class MultiFields extends Fields {
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
// record all liveDocs, even if they are null // record all liveDocs, even if they are null
final LeafReaderContext ctx = leaves.get(i); final LeafReaderContext ctx = leaves.get(i);
if (ctx.reader().getIndexSort() != null) {
throw new IllegalArgumentException("cannot handle index sort: reader=" + ctx.reader());
}
liveDocs[i] = ctx.reader().getLiveDocs(); liveDocs[i] = ctx.reader().getLiveDocs();
starts[i] = ctx.docBase; starts[i] = ctx.docBase;
} }

View File

@ -65,6 +65,8 @@ public class MultiReader extends BaseCompositeReader<IndexReader> {
} }
} }
// nocommit what if there is an indexSort?
@Override @Override
protected synchronized void doClose() throws IOException { protected synchronized void doClose() throws IOException {
IOException ioe = null; IOException ioe = null;

View File

@ -37,7 +37,6 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.IndexWriter; // javadocs import org.apache.lucene.index.IndexWriter; // javadocs
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.StoredFieldVisitor;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
@ -803,21 +802,34 @@ public class IndexSearcher {
* @lucene.experimental * @lucene.experimental
*/ */
public CollectionStatistics collectionStatistics(String field) throws IOException { public CollectionStatistics collectionStatistics(String field) throws IOException {
final int docCount; int docCount = 0;
final long sumTotalTermFreq; long sumTotalTermFreq = 0;
final long sumDocFreq; long sumDocFreq = 0;
assert field != null; assert field != null;
Terms terms = MultiFields.getTerms(reader, field); for(LeafReaderContext ctx : reader.leaves()) {
if (terms == null) { Terms terms = ctx.reader().fields().terms(field);
docCount = 0; if (terms != null) {
sumTotalTermFreq = 0; int subDocCount = terms.getDocCount();
sumDocFreq = 0; if (subDocCount == -1) {
} else { docCount = -1;
docCount = terms.getDocCount(); } else if (docCount != -1) {
sumTotalTermFreq = terms.getSumTotalTermFreq(); docCount += subDocCount;
sumDocFreq = terms.getSumDocFreq(); }
long subSumDocFreq = terms.getSumDocFreq();
if (subSumDocFreq == -1) {
sumDocFreq = -1;
} else if (sumDocFreq != -1) {
sumDocFreq += subSumDocFreq;
}
long subSumTotalTermFreq = terms.getSumTotalTermFreq();
if (subSumTotalTermFreq == -1) {
sumTotalTermFreq = -1;
} else if (sumTotalTermFreq != -1) {
sumTotalTermFreq += subSumTotalTermFreq;
}
}
} }
return new CollectionStatistics(field, reader.maxDoc(), docCount, sumTotalTermFreq, sumDocFreq); return new CollectionStatistics(field, reader.maxDoc(), docCount, sumTotalTermFreq, sumDocFreq);
} }

View File

@ -137,7 +137,7 @@ public class TestIndexSorting extends LuceneTestCase {
assertEquals(0, topDocs.totalHits); assertEquals(0, topDocs.totalHits);
} else { } else {
assertEquals(1, topDocs.totalHits); assertEquals(1, topDocs.totalHits);
assertEquals(i, MultiDocValues.getNumericValues(reader, "id").get(topDocs.scoreDocs[0].doc)); assertEquals(i, getNumericDocValue(reader, "id", topDocs.scoreDocs[0].doc));
Document document = reader.document(topDocs.scoreDocs[0].doc); Document document = reader.document(topDocs.scoreDocs[0].doc);
assertEquals(Integer.toString(i), document.get("id")); assertEquals(Integer.toString(i), document.get("id"));
} }
@ -148,6 +148,14 @@ public class TestIndexSorting extends LuceneTestCase {
dir.close(); dir.close();
} }
private static long getNumericDocValue(IndexReader reader, String field, int docID) throws IOException {
// We can't use MultiDocValues because it gets angry about the sorting:
List<LeafReaderContext> leaves = reader.leaves();
int sub = ReaderUtil.subIndex(docID, leaves);
LeafReaderContext leaf = leaves.get(sub);
return leaf.reader().getNumericDocValues(field).get(docID - leaf.docBase);
}
public void testSortOnMerge() throws IOException { public void testSortOnMerge() throws IOException {
testSortOnMerge(false); testSortOnMerge(false);
} }
@ -241,7 +249,7 @@ public class TestIndexSorting extends LuceneTestCase {
assertEquals(0, topDocs.totalHits); assertEquals(0, topDocs.totalHits);
} else { } else {
assertEquals(1, topDocs.totalHits); assertEquals(1, topDocs.totalHits);
assertEquals(values.get(i).longValue(), MultiDocValues.getNumericValues(reader, "foo").get(topDocs.scoreDocs[0].doc)); assertEquals(values.get(i).longValue(), getNumericDocValue(reader, "foo", topDocs.scoreDocs[0].doc));
} }
} }
reader.close(); reader.close();
@ -335,7 +343,7 @@ public class TestIndexSorting extends LuceneTestCase {
for (int i = 0; i < numDocs; ++i) { for (int i = 0; i < numDocs; ++i) {
final TopDocs topDocs = searcher.search(new TermQuery(new Term("id", Integer.toString(i))), 1); final TopDocs topDocs = searcher.search(new TermQuery(new Term("id", Integer.toString(i))), 1);
assertEquals(1, topDocs.totalHits); assertEquals(1, topDocs.totalHits);
assertEquals(values.get(i).longValue(), MultiDocValues.getNumericValues(reader, "foo").get(topDocs.scoreDocs[0].doc)); assertEquals(values.get(i).longValue(), getNumericDocValue(reader, "foo", topDocs.scoreDocs[0].doc));
} }
reader.close(); reader.close();
w.close(); w.close();
@ -380,8 +388,8 @@ public class TestIndexSorting extends LuceneTestCase {
assertEquals(topDocs.totalHits, topDocs2.totalHits); assertEquals(topDocs.totalHits, topDocs2.totalHits);
if (topDocs.totalHits == 1) { if (topDocs.totalHits == 1) {
assertEquals( assertEquals(
MultiDocValues.getNumericValues(reader, "foo").get(topDocs.scoreDocs[0].doc), getNumericDocValue(reader, "foo", topDocs.scoreDocs[0].doc),
MultiDocValues.getNumericValues(reader2, "foo").get(topDocs2.scoreDocs[0].doc)); getNumericDocValue(reader2, "foo", topDocs2.scoreDocs[0].doc));
} }
} }

View File

@ -26,6 +26,8 @@ import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -412,4 +414,59 @@ public class TestMultiDocValues extends LuceneTestCase {
ir2.close(); ir2.close();
dir.close(); dir.close();
} }
public void testNoIndexSort() throws Exception {
IndexWriterConfig iwc = newIndexWriterConfig();
iwc.setIndexSort(new Sort(new SortField("foo", SortField.Type.INT)));
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, iwc);
w.addDocument(new Document());
DirectoryReader.open(w).close();
w.addDocument(new Document());
// this makes a sorted segment:
w.forceMerge(1);
// this makes another segment, so that MultiDocValues isn't just a no-op:
w.addDocument(new Document());
IndexReader r = DirectoryReader.open(w);
String message = expectThrows(IllegalArgumentException.class, () -> {
MultiDocValues.getDocsWithField(r, "foo");
}).getMessage();
assertTrue(message.contains("cannot handle index sort"));
assertTrue(message.contains("indexSort=<int: \"foo\">"));
message = expectThrows(IllegalArgumentException.class, () -> {
MultiDocValues.getNumericValues(r, "foo");
}).getMessage();
assertTrue(message.contains("cannot handle index sort"));
assertTrue(message.contains("indexSort=<int: \"foo\">"));
message = expectThrows(IllegalArgumentException.class, () -> {
MultiDocValues.getBinaryValues(r, "foo");
}).getMessage();
assertTrue(message.contains("cannot handle index sort"));
assertTrue(message.contains("indexSort=<int: \"foo\">"));
message = expectThrows(IllegalArgumentException.class, () -> {
MultiDocValues.getSortedValues(r, "foo");
}).getMessage();
assertTrue(message.contains("cannot handle index sort"));
assertTrue(message.contains("indexSort=<int: \"foo\">"));
message = expectThrows(IllegalArgumentException.class, () -> {
MultiDocValues.getSortedSetValues(r, "foo");
}).getMessage();
assertTrue(message.contains("cannot handle index sort"));
assertTrue(message.contains("indexSort=<int: \"foo\">"));
message = expectThrows(IllegalArgumentException.class, () -> {
MultiDocValues.getSortedNumericValues(r, "foo");
}).getMessage();
assertTrue(message.contains("cannot handle index sort"));
assertTrue(message.contains("indexSort=<int: \"foo\">"));
r.close();
w.close();
dir.close();
}
} }

View File

@ -29,6 +29,8 @@ import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -199,4 +201,28 @@ public class TestMultiFields extends LuceneTestCase {
r.close(); r.close();
dir.close(); dir.close();
} }
public void testNoIndexSort() throws Exception {
IndexWriterConfig iwc = newIndexWriterConfig();
iwc.setIndexSort(new Sort(new SortField("foo", SortField.Type.INT)));
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, iwc);
w.addDocument(new Document());
DirectoryReader.open(w).close();
w.addDocument(new Document());
// this makes a sorted segment:
w.forceMerge(1);
// this makes another segment, so that MultiFields.getFields isn't just a no-op:
w.addDocument(new Document());
IndexReader r = DirectoryReader.open(w);
Exception e = expectThrows(IllegalArgumentException.class, () -> {
MultiFields.getFields(r);
});
assertTrue(e.getMessage().contains("cannot handle index sort"));
assertTrue(e.getMessage().contains("indexSort=<int: \"foo\">"));
r.close();
w.close();
dir.close();
}
} }

View File

@ -48,6 +48,7 @@ import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.FilterLeafReader;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReader;
@ -586,8 +587,8 @@ public class AnalyzingInfixSuggester extends Lookup implements Closeable {
// We sorted postings by weight during indexing, so we // We sorted postings by weight during indexing, so we
// only retrieve the first num hits now: // only retrieve the first num hits now:
Collector c2 = new EarlyTerminatingSortingCollector(c, SORT, num); Collector c2 = new EarlyTerminatingSortingCollector(c, SORT, num);
IndexSearcher searcher = searcherMgr.acquire();
List<LookupResult> results = null; List<LookupResult> results = null;
IndexSearcher searcher = searcherMgr.acquire();
try { try {
//System.out.println("got searcher=" + searcher); //System.out.println("got searcher=" + searcher);
searcher.search(finalQuery, c2); searcher.search(finalQuery, c2);
@ -607,6 +608,19 @@ public class AnalyzingInfixSuggester extends Lookup implements Closeable {
return results; return results;
} }
private static BytesRef getBinaryDocValue(IndexReader reader, String field, int docID) throws IOException {
// We can't use MultiDocValues because it gets angry about the sorting:
List<LeafReaderContext> leaves = reader.leaves();
int sub = ReaderUtil.subIndex(docID, leaves);
LeafReaderContext leaf = leaves.get(sub);
BinaryDocValues bdv = leaf.reader().getBinaryDocValues(field);
if (bdv == null) {
return null;
} else {
return bdv.get(docID - leaf.docBase);
}
}
/** /**
* Create the results based on the search hits. * Create the results based on the search hits.
* Can be overridden by subclass to add particular behavior (e.g. weight transformation). * Can be overridden by subclass to add particular behavior (e.g. weight transformation).
@ -621,24 +635,20 @@ public class AnalyzingInfixSuggester extends Lookup implements Closeable {
boolean doHighlight, Set<String> matchedTokens, String prefixToken) boolean doHighlight, Set<String> matchedTokens, String prefixToken)
throws IOException { throws IOException {
BinaryDocValues textDV = MultiDocValues.getBinaryValues(searcher.getIndexReader(), TEXT_FIELD_NAME);
// This will just be null if app didn't pass payloads to build(): // This will just be null if app didn't pass payloads to build():
// TODO: maybe just stored fields? they compress... // TODO: maybe just stored fields? they compress...
BinaryDocValues payloadsDV = MultiDocValues.getBinaryValues(searcher.getIndexReader(), "payloads");
List<LeafReaderContext> leaves = searcher.getIndexReader().leaves(); List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
List<LookupResult> results = new ArrayList<>(); List<LookupResult> results = new ArrayList<>();
for (int i=0;i<hits.scoreDocs.length;i++) { for (int i=0;i<hits.scoreDocs.length;i++) {
FieldDoc fd = (FieldDoc) hits.scoreDocs[i]; FieldDoc fd = (FieldDoc) hits.scoreDocs[i];
BytesRef term = textDV.get(fd.doc);
BytesRef term = getBinaryDocValue(searcher.getIndexReader(), TEXT_FIELD_NAME, fd.doc);
String text = term.utf8ToString(); String text = term.utf8ToString();
long score = (Long) fd.fields[0]; long score = (Long) fd.fields[0];
BytesRef payload; BytesRef payload = getBinaryDocValue(searcher.getIndexReader(), "payloads", fd.doc);
if (payloadsDV != null) { if (payload != null) {
payload = BytesRef.deepCopyOf(payloadsDV.get(fd.doc)); payload = BytesRef.deepCopyOf(payload);
} else {
payload = null;
} }
// Must look up sorted-set by segment: // Must look up sorted-set by segment:

View File

@ -66,7 +66,7 @@ public class AnalyzingInfixSuggesterTest extends LuceneTestCase {
assertEquals("a penny saved is a penny earned", results.get(0).key); assertEquals("a penny saved is a penny earned", results.get(0).key);
assertEquals("a penny saved is a penny <b>ear</b>ned", results.get(0).highlightKey); assertEquals("a penny saved is a penny <b>ear</b>ned", results.get(0).highlightKey);
assertEquals(10, results.get(0).value); assertEquals(10, results.get(0).value);
assertEquals(new BytesRef("foobaz"), results.get(0).payload); assertEquals("foobaz", results.get(0).payload.utf8ToString());
assertEquals("lend me your ear", results.get(1).key); assertEquals("lend me your ear", results.get(1).key);
assertEquals("lend me your <b>ear</b>", results.get(1).highlightKey); assertEquals("lend me your <b>ear</b>", results.get(1).highlightKey);