mirror of https://github.com/apache/lucene.git
SOLR-14258: DocList should not extend DocSet
This commit is contained in:
parent
1770797387
commit
25892271e8
|
@ -35,6 +35,8 @@ Other Changes
|
||||||
* SOLR-14272: Remove autoReplicaFailoverBadNodeExpiration and autoReplicaFailoverWorkLoopDelay for 9.0 as it was
|
* SOLR-14272: Remove autoReplicaFailoverBadNodeExpiration and autoReplicaFailoverWorkLoopDelay for 9.0 as it was
|
||||||
deprecated in 7.1 (Anshum Gupta)
|
deprecated in 7.1 (Anshum Gupta)
|
||||||
|
|
||||||
|
* SOLR-14258: DocList no longer extends DocSet. (David Smiley)
|
||||||
|
|
||||||
================== 8.5.0 ==================
|
================== 8.5.0 ==================
|
||||||
|
|
||||||
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
|
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
|
||||||
|
|
|
@ -25,7 +25,7 @@ package org.apache.solr.search;
|
||||||
*
|
*
|
||||||
* @since solr 0.9
|
* @since solr 0.9
|
||||||
*/
|
*/
|
||||||
public interface DocList extends DocSet {
|
public interface DocList {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the zero based offset of this list within the total ordered list of matches to the query.
|
* Returns the zero based offset of this list within the total ordered list of matches to the query.
|
||||||
|
@ -35,7 +35,6 @@ public interface DocList extends DocSet {
|
||||||
/**
|
/**
|
||||||
* Returns the number of ids in this list.
|
* Returns the number of ids in this list.
|
||||||
*/
|
*/
|
||||||
@Override
|
|
||||||
public int size();
|
public int size();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -76,7 +75,6 @@ public interface DocList extends DocSet {
|
||||||
* </p>
|
* </p>
|
||||||
* @see #hasScores
|
* @see #hasScores
|
||||||
*/
|
*/
|
||||||
@Override
|
|
||||||
public DocIterator iterator();
|
public DocIterator iterator();
|
||||||
|
|
||||||
/** True if scores were retained */
|
/** True if scores were retained */
|
||||||
|
@ -87,55 +85,3 @@ public interface DocList extends DocSet {
|
||||||
*/
|
*/
|
||||||
public float maxScore();
|
public float maxScore();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**** Maybe do this at a higher level (more efficient)
|
|
||||||
|
|
||||||
class SmartDocSet implements DocSet {
|
|
||||||
static int INITIAL_SIZE=10;
|
|
||||||
static int TRANSITION_SIZE=10;
|
|
||||||
|
|
||||||
protected BitSet bits;
|
|
||||||
int size;
|
|
||||||
|
|
||||||
protected int[] arr; // keep small set as an array, or as a hash?
|
|
||||||
protected int arrsize;
|
|
||||||
|
|
||||||
public SmartDocSet() {
|
|
||||||
if (INITIAL_SIZE>0) {
|
|
||||||
arr=new int[INITIAL_SIZE];
|
|
||||||
} else {
|
|
||||||
bits=new BitSet();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public void addUnique(int doc) {
|
|
||||||
size++;
|
|
||||||
if (bits != null) {
|
|
||||||
bits.set(doc);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
if (arrsize<10) {
|
|
||||||
arr[arrsize++]=doc;
|
|
||||||
} else {
|
|
||||||
// TODO: transition to bit set
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
public int size() {
|
|
||||||
return size;
|
|
||||||
}
|
|
||||||
public boolean exists(int docid) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
public DocSet intersection(DocSet other) {
|
|
||||||
return null;
|
|
||||||
|
|
||||||
}
|
|
||||||
public DocSet union(DocSet other) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
***/
|
|
||||||
|
|
|
@ -28,7 +28,7 @@ import org.apache.lucene.util.RamUsageEstimator;
|
||||||
*
|
*
|
||||||
* @since solr 0.9
|
* @since solr 0.9
|
||||||
*/
|
*/
|
||||||
public class DocSlice extends DocSetBase implements DocList {
|
public class DocSlice implements DocList, Accountable {
|
||||||
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DocSlice.class) + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER;
|
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DocSlice.class) + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER;
|
||||||
|
|
||||||
final int offset; // starting position of the docs (zero based)
|
final int offset; // starting position of the docs (zero based)
|
||||||
|
@ -92,15 +92,6 @@ public class DocSlice extends DocSetBase implements DocList {
|
||||||
public long matches() { return matches; }
|
public long matches() { return matches; }
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean exists(int doc) {
|
|
||||||
int end = offset+len;
|
|
||||||
for (int i=offset; i<end; i++) {
|
|
||||||
if (docs[i]==doc) return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Hmmm, maybe I could have reused the scorer interface here...
|
// Hmmm, maybe I could have reused the scorer interface here...
|
||||||
// except that it carries Similarity baggage...
|
// except that it carries Similarity baggage...
|
||||||
@Override
|
@Override
|
||||||
|
@ -138,39 +129,6 @@ public class DocSlice extends DocSetBase implements DocList {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public DocSet intersection(DocSet other) {
|
|
||||||
if (other instanceof SortedIntDocSet || other instanceof HashDocSet) {
|
|
||||||
return other.intersection(this);
|
|
||||||
}
|
|
||||||
HashDocSet h = new HashDocSet(docs,offset,len);
|
|
||||||
return h.intersection(other);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int intersectionSize(DocSet other) {
|
|
||||||
if (other instanceof SortedIntDocSet || other instanceof HashDocSet) {
|
|
||||||
return other.intersectionSize(this);
|
|
||||||
}
|
|
||||||
HashDocSet h = new HashDocSet(docs,offset,len);
|
|
||||||
return h.intersectionSize(other);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean intersects(DocSet other) {
|
|
||||||
if (other instanceof SortedIntDocSet || other instanceof HashDocSet) {
|
|
||||||
return other.intersects(this);
|
|
||||||
}
|
|
||||||
HashDocSet h = new HashDocSet(docs,offset,len);
|
|
||||||
return h.intersects(other);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public DocSlice clone() {
|
|
||||||
return (DocSlice) super.clone();
|
|
||||||
}
|
|
||||||
|
|
||||||
/** WARNING: this can over-estimate real memory use since backing arrays are shared with other DocSlice instances */
|
/** WARNING: this can over-estimate real memory use since backing arrays are shared with other DocSlice instances */
|
||||||
@Override
|
@Override
|
||||||
public long ramBytesUsed() {
|
public long ramBytesUsed() {
|
||||||
|
|
|
@ -111,7 +111,7 @@ public class TestDocSet extends SolrTestCase {
|
||||||
return new BitDocSet(bs);
|
return new BitDocSet(bs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public DocSet getDocSlice(FixedBitSet bs) {
|
public DocSlice getDocSlice(FixedBitSet bs) {
|
||||||
int len = bs.cardinality();
|
int len = bs.cardinality();
|
||||||
int[] arr = new int[len+5];
|
int[] arr = new int[len+5];
|
||||||
arr[0]=10; arr[1]=20; arr[2]=30; arr[arr.length-1]=1; arr[arr.length-2]=2;
|
arr[0]=10; arr[1]=20; arr[2]=30; arr[arr.length-1]=1; arr[arr.length-2]=2;
|
||||||
|
@ -129,7 +129,7 @@ public class TestDocSet extends SolrTestCase {
|
||||||
|
|
||||||
|
|
||||||
public DocSet getDocSet(FixedBitSet bs) {
|
public DocSet getDocSet(FixedBitSet bs) {
|
||||||
switch(rand.nextInt(10)) {
|
switch(rand.nextInt(9)) {
|
||||||
case 0: return getHashDocSet(bs);
|
case 0: return getHashDocSet(bs);
|
||||||
|
|
||||||
case 1: return getBitDocSet(bs);
|
case 1: return getBitDocSet(bs);
|
||||||
|
@ -141,8 +141,6 @@ public class TestDocSet extends SolrTestCase {
|
||||||
case 6: return getIntDocSet(bs);
|
case 6: return getIntDocSet(bs);
|
||||||
case 7: return getIntDocSet(bs);
|
case 7: return getIntDocSet(bs);
|
||||||
case 8: return getIntDocSet(bs);
|
case 8: return getIntDocSet(bs);
|
||||||
|
|
||||||
case 9: return getDocSlice(bs);
|
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,8 @@ import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
import com.carrotsearch.hppc.IntHashSet;
|
||||||
import org.apache.lucene.util.TestUtil;
|
import org.apache.lucene.util.TestUtil;
|
||||||
import org.apache.solr.SolrTestCaseJ4;
|
import org.apache.solr.SolrTestCaseJ4;
|
||||||
import org.apache.solr.common.SolrInputDocument;
|
import org.apache.solr.common.SolrInputDocument;
|
||||||
|
@ -684,14 +686,19 @@ public class TestRangeQuery extends SolrTestCaseJ4 {
|
||||||
return randomInt(cardinality);
|
return randomInt(cardinality);
|
||||||
}
|
}
|
||||||
|
|
||||||
static boolean sameDocs(String msg, DocSet a, DocSet b) {
|
static boolean sameDocs(String msg, DocList a, DocList b) {
|
||||||
DocIterator i = a.iterator();
|
|
||||||
// System.out.println("SIZES="+a.size() + "," + b.size());
|
|
||||||
assertEquals(msg, a.size(), b.size());
|
assertEquals(msg, a.size(), b.size());
|
||||||
while (i.hasNext()) {
|
|
||||||
int doc = i.nextDoc();
|
IntHashSet bIds = new IntHashSet(b.size());
|
||||||
assertTrue(msg, b.exists(doc));
|
DocIterator bIter = b.iterator();
|
||||||
// System.out.println("MATCH! " + doc);
|
while (bIter.hasNext()) {
|
||||||
|
bIds.add(bIter.nextDoc());
|
||||||
|
}
|
||||||
|
|
||||||
|
DocIterator aIter = a.iterator();
|
||||||
|
while (aIter.hasNext()) {
|
||||||
|
int doc = aIter.nextDoc();
|
||||||
|
assertTrue(msg, bIds.contains(doc));
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue