use an array to represent the keys in the uid filter

This commit is contained in:
Shay Banon 2012-06-13 16:03:45 +02:00
parent 6eb419649a
commit dfe6e58e37
6 changed files with 67 additions and 48 deletions

View File

@ -23,6 +23,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.lucene.Lucene;
import java.io.IOException;
import java.util.Iterator;
@ -77,8 +78,8 @@ public class PublicTermsFilter extends Filter {
TermDocs td = reader.termDocs();
try {
// batch read, in Lucene 4.0 its no longer needed
int[] docs = new int[32];
int[] freqs = new int[32];
int[] docs = new int[Lucene.BATCH_ENUM_DOCS];
int[] freqs = new int[Lucene.BATCH_ENUM_DOCS];
for (Term term : terms) {
td.seek(term);
int number = td.read(docs, freqs);

View File

@ -23,6 +23,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.lucene.Lucene;
import java.io.IOException;
import java.util.Arrays;
@ -70,8 +71,8 @@ public class XTermsFilter extends Filter {
TermDocs td = reader.termDocs();
try {
// batch read, in Lucene 4.0 its no longer needed
int[] docs = new int[32];
int[] freqs = new int[32];
int[] docs = new int[Lucene.BATCH_ENUM_DOCS];
int[] freqs = new int[Lucene.BATCH_ENUM_DOCS];
for (Term term : terms) {
td.seek(term);
int number = td.read(docs, freqs);

View File

@ -51,6 +51,8 @@ public class Lucene {
public static ScoreDoc[] EMPTY_SCORE_DOCS = new ScoreDoc[0];
public static final int BATCH_ENUM_DOCS = 32;
public static Version parseVersion(@Nullable String version, Version defaultVersion, ESLogger logger) {
if (version == null) {
return defaultVersion;

View File

@ -25,6 +25,7 @@ import org.apache.lucene.index.TermDocs;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.lucene.Lucene;
import java.io.IOException;
@ -50,8 +51,8 @@ public class TermFilter extends Filter {
try {
td.seek(term);
// batch read, in Lucene 4.0 its no longer needed
int[] docs = new int[32];
int[] freqs = new int[32];
int[] docs = new int[Lucene.BATCH_ENUM_DOCS];
int[] freqs = new int[Lucene.BATCH_ENUM_DOCS];
int number = td.read(docs, freqs);
if (number > 0) {
result = new FixedBitSet(reader.maxDoc());

View File

@ -33,22 +33,28 @@ import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import java.io.IOException;
import java.util.*;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
public class UidFilter extends Filter {
private final Set<Term> uids;
private final Term[] uids;
private final BloomCache bloomCache;
public UidFilter(Collection<String> types, List<String> ids, BloomCache bloomCache) {
this.bloomCache = bloomCache;
this.uids = new TreeSet<Term>();
this.uids = new Term[types.size() * ids.size()];
int i = 0;
for (String type : types) {
for (String id : ids) {
uids.add(UidFieldMapper.TERM_FACTORY.createTerm(Uid.createUid(type, id)));
uids[i++] = UidFieldMapper.TERM_FACTORY.createTerm(Uid.createUid(type, id));
}
}
if (this.uids.length > 1) {
Arrays.sort(this.uids);
}
}
// TODO Optimizations
@ -69,6 +75,7 @@ public class UidFilter extends Filter {
td = reader.termDocs();
}
td.seek(uid);
// no need for batching, its on the UID, there will be only one doc
while (td.next()) {
if (set == null) {
set = new FixedBitSet(reader.maxDoc());
@ -94,7 +101,14 @@ public class UidFilter extends Filter {
@Override
public String toString() {
return "UidFilter(" + uids + ")";
StringBuilder builder = new StringBuilder();
for (Term term : uids) {
if (builder.length() > 0) {
builder.append(' ');
}
builder.append(term);
}
return builder.toString();
}
@Override

View File

@ -116,10 +116,10 @@ public class SimpleValidateQueryTests extends AbstractNodesTests {
assertThat(response.queryExplanations().get(0).error(), containsString("Failed to parse"));
assertThat(response.queryExplanations().get(0).explanation(), nullValue());
assertExplanation(QueryBuilders.queryString("_id:1"), equalTo("ConstantScore(UidFilter([_uid:type1#1]))"));
assertExplanation(QueryBuilders.queryString("_id:1"), equalTo("ConstantScore(_uid:type1#1)"));
assertExplanation(QueryBuilders.idsQuery("type1").addIds("1").addIds("2"),
equalTo("ConstantScore(UidFilter([_uid:type1#1, _uid:type1#2]))"));
equalTo("ConstantScore(_uid:type1#1 _uid:type1#2)"));
assertExplanation(QueryBuilders.queryString("foo"), equalTo("_all:foo"));