mirror of https://github.com/apache/lucene.git
Merge remote-tracking branch 'origin/master'
This commit is contained in:
commit
53129ba670
|
@ -491,7 +491,44 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close
|
|||
docsWithField = getLiveBits(entry.missingOffset, maxDoc);
|
||||
}
|
||||
}
|
||||
return new LegacyNumericDocValuesWrapper(docsWithField, getNumeric(entry));
|
||||
final LongValues values = getNumeric(entry);
|
||||
return new NumericDocValues() {
|
||||
|
||||
int doc = -1;
|
||||
long value;
|
||||
|
||||
@Override
|
||||
public long longValue() throws IOException {
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int docID() {
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int nextDoc() throws IOException {
|
||||
return advance(doc + 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int advance(int target) throws IOException {
|
||||
for (int doc = target; doc < maxDoc; ++doc) {
|
||||
value = values.get(doc);
|
||||
if (value != 0 || docsWithField.get(doc)) {
|
||||
return this.doc = doc;
|
||||
}
|
||||
}
|
||||
return doc = NO_MORE_DOCS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long cost() {
|
||||
return maxDoc;
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -42,7 +42,6 @@ import org.apache.lucene.util.FixedBitSet;
|
|||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.IntsRef;
|
||||
import org.apache.lucene.util.IntsRefBuilder;
|
||||
import org.apache.lucene.util.LongValues;
|
||||
import org.apache.lucene.util.PagedBytes;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.apache.lucene.util.fst.BytesRefFSTEnum.InputOutput;
|
||||
|
@ -371,7 +370,12 @@ class MemoryDocValuesProducer extends DocValuesProducer {
|
|||
ramBytesUsed.addAndGet(reader.ramBytesUsed());
|
||||
numericInfo.put(field.name, Accountables.namedAccountable("block compressed", reader));
|
||||
}
|
||||
return reader;
|
||||
return new LegacyNumericDocValues() {
|
||||
@Override
|
||||
public long get(int docID) {
|
||||
return reader.get(docID);
|
||||
}
|
||||
};
|
||||
case GCD_COMPRESSED:
|
||||
final long min = data.readLong();
|
||||
final long mult = data.readLong();
|
||||
|
@ -568,51 +572,26 @@ class MemoryDocValuesProducer extends DocValuesProducer {
|
|||
}
|
||||
addr = res;
|
||||
}
|
||||
if (values instanceof LongValues) {
|
||||
// probably not the greatest codec choice for this situation, but we support it
|
||||
final LongValues longValues = (LongValues) values;
|
||||
return new LegacySortedNumericDocValuesWrapper(new LegacySortedNumericDocValues() {
|
||||
long startOffset;
|
||||
long endOffset;
|
||||
|
||||
@Override
|
||||
public void setDocument(int doc) {
|
||||
startOffset = (int) addr.get(doc);
|
||||
endOffset = (int) addr.get(doc+1L);
|
||||
}
|
||||
return new LegacySortedNumericDocValuesWrapper(new LegacySortedNumericDocValues() {
|
||||
int startOffset;
|
||||
int endOffset;
|
||||
|
||||
@Override
|
||||
public void setDocument(int doc) {
|
||||
startOffset = (int) addr.get(doc);
|
||||
endOffset = (int) addr.get(doc+1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long valueAt(int index) {
|
||||
return longValues.get(startOffset + index);
|
||||
}
|
||||
@Override
|
||||
public long valueAt(int index) {
|
||||
return values.get(startOffset + index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int count() {
|
||||
return (int) (endOffset - startOffset);
|
||||
}
|
||||
}, maxDoc);
|
||||
} else {
|
||||
return new LegacySortedNumericDocValuesWrapper(new LegacySortedNumericDocValues() {
|
||||
int startOffset;
|
||||
int endOffset;
|
||||
|
||||
@Override
|
||||
public void setDocument(int doc) {
|
||||
startOffset = (int) addr.get(doc);
|
||||
endOffset = (int) addr.get(doc+1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long valueAt(int index) {
|
||||
return values.get(startOffset + index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int count() {
|
||||
return (endOffset - startOffset);
|
||||
}
|
||||
}, maxDoc);
|
||||
}
|
||||
@Override
|
||||
public int count() {
|
||||
return (endOffset - startOffset);
|
||||
}
|
||||
}, maxDoc);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,21 +16,9 @@
|
|||
*/
|
||||
package org.apache.lucene.util;
|
||||
|
||||
|
||||
import org.apache.lucene.index.LegacyNumericDocValues;
|
||||
import org.apache.lucene.index.NumericDocValues;
|
||||
import org.apache.lucene.util.packed.PackedInts;
|
||||
|
||||
/** Abstraction over an array of longs.
|
||||
* This class extends NumericDocValues so that we don't need to add another
|
||||
* level of abstraction every time we want eg. to use the {@link PackedInts}
|
||||
* utility classes to represent a {@link LegacyNumericDocValues} instance.
|
||||
* @lucene.internal
|
||||
*
|
||||
* @deprecated Switch to {@link NumericDocValues} instead. */
|
||||
@Deprecated
|
||||
// TODO: cutover to iterator once codecs have all cutover?
|
||||
public abstract class LongValues extends LegacyNumericDocValues {
|
||||
* @lucene.internal */
|
||||
public abstract class LongValues {
|
||||
|
||||
/** An instance that returns the provided value. */
|
||||
public static final LongValues IDENTITY = new LongValues() {
|
||||
|
@ -45,9 +33,4 @@ public abstract class LongValues extends LegacyNumericDocValues {
|
|||
/** Get value at <code>index</code>. */
|
||||
public abstract long get(long index);
|
||||
|
||||
@Override
|
||||
public long get(int idx) {
|
||||
return get((long) idx);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -19,12 +19,12 @@ package org.apache.lucene.util.packed;
|
|||
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.index.LegacyNumericDocValues;
|
||||
import org.apache.lucene.store.ByteArrayDataInput;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.util.LongValues;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.lucene.util.packed.DirectReader;
|
||||
|
@ -46,7 +46,7 @@ public class TestDirectPacked extends LuceneTestCase {
|
|||
writer.finish();
|
||||
output.close();
|
||||
IndexInput input = dir.openInput("foo", IOContext.DEFAULT);
|
||||
LegacyNumericDocValues reader = DirectReader.getInstance(input.randomAccessSlice(0, input.length()), bitsPerValue, 0);
|
||||
LongValues reader = DirectReader.getInstance(input.randomAccessSlice(0, input.length()), bitsPerValue, 0);
|
||||
assertEquals(1, reader.get(0));
|
||||
assertEquals(0, reader.get(1));
|
||||
assertEquals(2, reader.get(2));
|
||||
|
@ -110,7 +110,7 @@ public class TestDirectPacked extends LuceneTestCase {
|
|||
writer.finish();
|
||||
output.close();
|
||||
IndexInput input = directory.openInput(name, IOContext.DEFAULT);
|
||||
LegacyNumericDocValues reader = DirectReader.getInstance(input.randomAccessSlice(0, input.length()), bitsRequired, offset);
|
||||
LongValues reader = DirectReader.getInstance(input.randomAccessSlice(0, input.length()), bitsRequired, offset);
|
||||
for (int j = 0; j < original.length; j++) {
|
||||
assertEquals("bpv=" + bpv, original[j], reader.get(j));
|
||||
}
|
||||
|
|
|
@ -203,7 +203,7 @@ public class ToParentBlockJoinQuery extends Query {
|
|||
public abstract int[] swapChildDocs(int[] other);
|
||||
}
|
||||
|
||||
static class BlockJoinScorer extends ChildrenMatchesScorer{
|
||||
static class BlockJoinScorer extends ChildrenMatchesScorer {
|
||||
private final Scorer childScorer;
|
||||
private final BitSet parentBits;
|
||||
private final ScoreMode scoreMode;
|
||||
|
|
|
@ -187,7 +187,73 @@ public class TestBlockJoin extends LuceneTestCase {
|
|||
dir.close();
|
||||
}
|
||||
|
||||
// You must use ToParentBlockJoinSearcher if you want to do BQ SHOULD queries:
|
||||
public void testBQShouldJoinedChild() throws Exception {
|
||||
final Directory dir = newDirectory();
|
||||
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
|
||||
|
||||
final List<Document> docs = new ArrayList<>();
|
||||
|
||||
docs.add(makeJob("java", 2007));
|
||||
docs.add(makeJob("python", 2010));
|
||||
docs.add(makeResume("Lisa", "United Kingdom"));
|
||||
w.addDocuments(docs);
|
||||
|
||||
docs.clear();
|
||||
docs.add(makeJob("ruby", 2005));
|
||||
docs.add(makeJob("java", 2006));
|
||||
docs.add(makeResume("Frank", "United States"));
|
||||
w.addDocuments(docs);
|
||||
|
||||
IndexReader r = w.getReader();
|
||||
w.close();
|
||||
IndexSearcher s = new ToParentBlockJoinIndexSearcher(r);
|
||||
//IndexSearcher s = newSearcher(r, false);
|
||||
//IndexSearcher s = new IndexSearcher(r);
|
||||
|
||||
// Create a filter that defines "parent" documents in the index - in this case resumes
|
||||
BitSetProducer parentsFilter = new QueryBitSetProducer(new TermQuery(new Term("docType", "resume")));
|
||||
CheckJoinIndex.check(r, parentsFilter);
|
||||
|
||||
// Define child document criteria (finds an example of relevant work experience)
|
||||
BooleanQuery.Builder childQuery = new BooleanQuery.Builder();
|
||||
childQuery.add(new BooleanClause(new TermQuery(new Term("skill", "java")), Occur.MUST));
|
||||
childQuery.add(new BooleanClause(IntPoint.newRangeQuery("year", 2006, 2011), Occur.MUST));
|
||||
|
||||
// Define parent document criteria (find a resident in the UK)
|
||||
Query parentQuery = new TermQuery(new Term("country", "United Kingdom"));
|
||||
|
||||
// Wrap the child document query to 'join' any matches
|
||||
// up to corresponding parent:
|
||||
ToParentBlockJoinQuery childJoinQuery = new ToParentBlockJoinQuery(childQuery.build(), parentsFilter, ScoreMode.Avg);
|
||||
|
||||
// Combine the parent and nested child queries into a single query for a candidate
|
||||
BooleanQuery.Builder fullQuery = new BooleanQuery.Builder();
|
||||
fullQuery.add(new BooleanClause(parentQuery, Occur.SHOULD));
|
||||
fullQuery.add(new BooleanClause(childJoinQuery, Occur.SHOULD));
|
||||
|
||||
ToParentBlockJoinCollector c = new ToParentBlockJoinCollector(Sort.RELEVANCE, 1, true, true);
|
||||
s.search(fullQuery.build(), c);
|
||||
TopGroups<Integer> results = c.getTopGroups(childJoinQuery, null, 0, 10, 0, true);
|
||||
assertEquals(1, results.totalGroupedHitCount);
|
||||
assertEquals(1, results.groups.length);
|
||||
|
||||
final GroupDocs<Integer> group = results.groups[0];
|
||||
assertEquals(1, group.totalHits);
|
||||
assertFalse(Float.isNaN(group.score));
|
||||
|
||||
Document childDoc = s.doc(group.scoreDocs[0].doc);
|
||||
//System.out.println(" doc=" + group.scoreDocs[0].doc);
|
||||
assertEquals("java", childDoc.get("skill"));
|
||||
assertNotNull(group.groupValue);
|
||||
Document parentDoc = s.doc(group.groupValue);
|
||||
assertEquals("Lisa", parentDoc.get("name"));
|
||||
|
||||
|
||||
r.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testSimple() throws Exception {
|
||||
|
||||
final Directory dir = newDirectory();
|
||||
|
|
Loading…
Reference in New Issue