mirror of https://github.com/apache/lucene.git
Reformatted. Do we have any style guides special to Lucene different than Jakarta standards?
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@150022 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
8b2bd68e84
commit
0b53524a11
|
@ -61,133 +61,144 @@ import org.apache.lucene.document.Document;
|
|||
|
||||
/** A ranked list of documents, used to hold search results. */
|
||||
public final class Hits {
|
||||
private Query query;
|
||||
private Searcher searcher;
|
||||
private Filter filter = null;
|
||||
private Query query;
|
||||
private Searcher searcher;
|
||||
private Filter filter = null;
|
||||
|
||||
private int length; // the total number of hits
|
||||
private Vector hitDocs = new Vector(); // cache of hits retrieved
|
||||
private int length; // the total number of hits
|
||||
private Vector hitDocs = new Vector(); // cache of hits retrieved
|
||||
|
||||
private HitDoc first; // head of LRU cache
|
||||
private HitDoc last; // tail of LRU cache
|
||||
private int numDocs = 0; // number cached
|
||||
private int maxDocs = 200; // max to cache
|
||||
private HitDoc first; // head of LRU cache
|
||||
private HitDoc last; // tail of LRU cache
|
||||
private int numDocs = 0; // number cached
|
||||
private int maxDocs = 200; // max to cache
|
||||
|
||||
Hits(Searcher s, Query q, Filter f) throws IOException {
|
||||
query = q;
|
||||
searcher = s;
|
||||
filter = f;
|
||||
getMoreDocs(50); // retrieve 100 initially
|
||||
}
|
||||
|
||||
// Tries to add new documents to hitDocs.
|
||||
// Ensures that the hit numbered <code>min</code> has been retrieved.
|
||||
private final void getMoreDocs(int min) throws IOException {
|
||||
if (hitDocs.size() > min)
|
||||
min = hitDocs.size();
|
||||
|
||||
int n = min * 2; // double # retrieved
|
||||
TopDocs topDocs = searcher.search(query, filter, n);
|
||||
length = topDocs.totalHits;
|
||||
ScoreDoc[] scoreDocs = topDocs.scoreDocs;
|
||||
|
||||
float scoreNorm = 1.0f;
|
||||
if (length > 0 && scoreDocs[0].score > 1.0f)
|
||||
scoreNorm = 1.0f / scoreDocs[0].score;
|
||||
|
||||
int end = scoreDocs.length < length ? scoreDocs.length : length;
|
||||
for (int i = hitDocs.size(); i < end; i++)
|
||||
hitDocs.addElement(new HitDoc(scoreDocs[i].score*scoreNorm,
|
||||
scoreDocs[i].doc));
|
||||
}
|
||||
|
||||
/** Returns the total number of hits available in this set. */
|
||||
public final int length() {
|
||||
return length;
|
||||
}
|
||||
|
||||
/** Returns the nth document in this set.
|
||||
<p>Documents are cached, so that repeated requests for the same element may
|
||||
return the same Document object. */
|
||||
public final Document doc(int n) throws IOException {
|
||||
HitDoc hitDoc = hitDoc(n);
|
||||
|
||||
// Update LRU cache of documents
|
||||
remove(hitDoc); // remove from list, if there
|
||||
addToFront(hitDoc); // add to front of list
|
||||
if (numDocs > maxDocs) { // if cache is full
|
||||
HitDoc oldLast = last;
|
||||
remove(last); // flush last
|
||||
oldLast.doc = null; // let doc get gc'd
|
||||
Hits(Searcher s, Query q, Filter f) throws IOException {
|
||||
query = q;
|
||||
searcher = s;
|
||||
filter = f;
|
||||
getMoreDocs(50); // retrieve 100 initially
|
||||
}
|
||||
|
||||
if (hitDoc.doc == null)
|
||||
hitDoc.doc = searcher.doc(hitDoc.id); // cache miss: read document
|
||||
|
||||
return hitDoc.doc;
|
||||
}
|
||||
// Tries to add new documents to hitDocs.
|
||||
// Ensures that the hit numbered <code>min</code> has been retrieved.
|
||||
private final void getMoreDocs(int min) throws IOException {
|
||||
if (hitDocs.size() > min) {
|
||||
min = hitDocs.size();
|
||||
}
|
||||
|
||||
/** Returns the score for the nth document in this set. */
|
||||
public final float score(int n) throws IOException {
|
||||
return hitDoc(n).score;
|
||||
}
|
||||
|
||||
/** Returns the id for the nth document in this set. */
|
||||
public final int id(int n) throws IOException {
|
||||
return hitDoc(n).id;
|
||||
}
|
||||
int n = min * 2; // double # retrieved
|
||||
TopDocs topDocs = searcher.search(query, filter, n);
|
||||
length = topDocs.totalHits;
|
||||
ScoreDoc[] scoreDocs = topDocs.scoreDocs;
|
||||
|
||||
float scoreNorm = 1.0f;
|
||||
if (length > 0 && scoreDocs[0].score > 1.0f) {
|
||||
scoreNorm = 1.0f / scoreDocs[0].score;
|
||||
}
|
||||
|
||||
int end = scoreDocs.length < length ? scoreDocs.length : length;
|
||||
for (int i = hitDocs.size(); i < end; i++) {
|
||||
hitDocs.addElement(new HitDoc(scoreDocs[i].score * scoreNorm,
|
||||
scoreDocs[i].doc));
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns the total number of hits available in this set. */
|
||||
public final int length() {
|
||||
return length;
|
||||
}
|
||||
|
||||
/** Returns the nth document in this set.
|
||||
<p>Documents are cached, so that repeated requests for the same element may
|
||||
return the same Document object. */
|
||||
public final Document doc(int n) throws IOException {
|
||||
HitDoc hitDoc = hitDoc(n);
|
||||
|
||||
// Update LRU cache of documents
|
||||
remove(hitDoc); // remove from list, if there
|
||||
addToFront(hitDoc); // add to front of list
|
||||
if (numDocs > maxDocs) { // if cache is full
|
||||
HitDoc oldLast = last;
|
||||
remove(last); // flush last
|
||||
oldLast.doc = null; // let doc get gc'd
|
||||
}
|
||||
|
||||
if (hitDoc.doc == null) {
|
||||
hitDoc.doc = searcher.doc(hitDoc.id); // cache miss: read document
|
||||
}
|
||||
|
||||
return hitDoc.doc;
|
||||
}
|
||||
|
||||
/** Returns the score for the nth document in this set. */
|
||||
public final float score(int n) throws IOException {
|
||||
return hitDoc(n).score;
|
||||
}
|
||||
|
||||
/** Returns the id for the nth document in this set. */
|
||||
public final int id(int n) throws IOException {
|
||||
return hitDoc(n).id;
|
||||
}
|
||||
|
||||
|
||||
private final HitDoc hitDoc(int n) throws IOException {
|
||||
if (n >= length)
|
||||
throw new IndexOutOfBoundsException("Not a valid hit number: " + n);
|
||||
if (n >= hitDocs.size())
|
||||
getMoreDocs(n);
|
||||
private final HitDoc hitDoc(int n) throws IOException {
|
||||
if (n >= length) {
|
||||
throw new IndexOutOfBoundsException("Not a valid hit number: " + n);
|
||||
}
|
||||
|
||||
return (HitDoc)hitDocs.elementAt(n);
|
||||
}
|
||||
if (n >= hitDocs.size()) {
|
||||
getMoreDocs(n);
|
||||
}
|
||||
|
||||
private final void addToFront(HitDoc hitDoc) { // insert at front of cache
|
||||
if (first == null)
|
||||
last = hitDoc;
|
||||
else
|
||||
first.prev = hitDoc;
|
||||
|
||||
hitDoc.next = first;
|
||||
first = hitDoc;
|
||||
hitDoc.prev = null;
|
||||
return (HitDoc) hitDocs.elementAt(n);
|
||||
}
|
||||
|
||||
numDocs++;
|
||||
}
|
||||
private final void addToFront(HitDoc hitDoc) { // insert at front of cache
|
||||
if (first == null) {
|
||||
last = hitDoc;
|
||||
} else {
|
||||
first.prev = hitDoc;
|
||||
}
|
||||
|
||||
private final void remove(HitDoc hitDoc) { // remove from cache
|
||||
if (hitDoc.doc == null) // it's not in the list
|
||||
return; // abort
|
||||
hitDoc.next = first;
|
||||
first = hitDoc;
|
||||
hitDoc.prev = null;
|
||||
|
||||
if (hitDoc.next == null)
|
||||
last = hitDoc.prev;
|
||||
else
|
||||
hitDoc.next.prev = hitDoc.prev;
|
||||
|
||||
if (hitDoc.prev == null)
|
||||
first = hitDoc.next;
|
||||
else
|
||||
hitDoc.prev.next = hitDoc.next;
|
||||
numDocs++;
|
||||
}
|
||||
|
||||
numDocs--;
|
||||
}
|
||||
private final void remove(HitDoc hitDoc) { // remove from cache
|
||||
if (hitDoc.doc == null) { // it's not in the list
|
||||
return; // abort
|
||||
}
|
||||
|
||||
if (hitDoc.next == null) {
|
||||
last = hitDoc.prev;
|
||||
} else {
|
||||
hitDoc.next.prev = hitDoc.prev;
|
||||
}
|
||||
|
||||
if (hitDoc.prev == null) {
|
||||
first = hitDoc.next;
|
||||
} else {
|
||||
hitDoc.prev.next = hitDoc.next;
|
||||
}
|
||||
|
||||
numDocs--;
|
||||
}
|
||||
}
|
||||
|
||||
final class HitDoc {
|
||||
float score;
|
||||
int id;
|
||||
Document doc = null;
|
||||
float score;
|
||||
int id;
|
||||
Document doc = null;
|
||||
|
||||
HitDoc next; // in doubly-linked cache
|
||||
HitDoc prev; // in doubly-linked cache
|
||||
HitDoc next; // in doubly-linked cache
|
||||
HitDoc prev; // in doubly-linked cache
|
||||
|
||||
HitDoc(float s, int i) {
|
||||
score = s;
|
||||
id = i;
|
||||
}
|
||||
HitDoc(float s, int i) {
|
||||
score = s;
|
||||
id = i;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -70,37 +70,37 @@ import org.apache.lucene.index.IndexReader;
|
|||
* once per day.
|
||||
*/
|
||||
public class QueryFilter extends Filter {
|
||||
private Query query;
|
||||
private transient WeakHashMap cache = new WeakHashMap();
|
||||
private Query query;
|
||||
private transient WeakHashMap cache = new WeakHashMap();
|
||||
|
||||
/** Constructs a filter which only matches documents matching
|
||||
* <code>query</code>.
|
||||
*/
|
||||
public QueryFilter(Query query) {
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
public BitSet bits(IndexReader reader) throws IOException {
|
||||
|
||||
synchronized (cache) { // check cache
|
||||
BitSet cached = (BitSet)cache.get(reader);
|
||||
if (cached != null)
|
||||
return cached;
|
||||
/** Constructs a filter which only matches documents matching
|
||||
* <code>query</code>.
|
||||
*/
|
||||
public QueryFilter(Query query) {
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
final BitSet bits = new BitSet(reader.maxDoc());
|
||||
public BitSet bits(IndexReader reader) throws IOException {
|
||||
|
||||
new IndexSearcher(reader).search(query, new HitCollector() {
|
||||
public final void collect(int doc, float score) {
|
||||
bits.set(doc); // set bit for hit
|
||||
synchronized (cache) { // check cache
|
||||
BitSet cached = (BitSet) cache.get(reader);
|
||||
if (cached != null)
|
||||
return cached;
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
synchronized (cache) { // update cache
|
||||
cache.put(reader, bits);
|
||||
final BitSet bits = new BitSet(reader.maxDoc());
|
||||
|
||||
new IndexSearcher(reader).search(query, new HitCollector() {
|
||||
public final void collect(int doc, float score) {
|
||||
bits.set(doc); // set bit for hit
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
synchronized (cache) { // update cache
|
||||
cache.put(reader, bits);
|
||||
}
|
||||
|
||||
return bits;
|
||||
}
|
||||
|
||||
return bits;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue