remove use of deprecated lucene methods

git-svn-id: https://svn.apache.org/repos/asf/incubator/solr/trunk@385619 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Yonik Seeley 2006-03-13 19:02:21 +00:00
parent 286b985709
commit 3cf1cfb107
10 changed files with 24 additions and 35 deletions

View File

@ -1,2 +1,3 @@
Solr Change Log
hi

View File

@ -146,7 +146,10 @@ public abstract class FieldType extends FieldProperties {
public Field createField(SchemaField field, String externalVal, float boost) {
String val = toInternal(externalVal);
if (val==null) return null;
Field f = new Field(field.getName(), val, field.stored(), field.indexed(), isTokenized());
Field f = new Field(field.getName(),val,
field.stored() ? Field.Store.YES : Field.Store.NO ,
field.indexed() ? (isTokenized() ? Field.Index.TOKENIZED : Field.Index.UN_TOKENIZED)
: Field.Index.NO);
f.setOmitNorms(field.omitNorms());
f.setBoost(boost);
return f;

View File

@ -76,14 +76,15 @@ if (c.query instanceof TermQuery) {
System.out.println("docFreq="+searcher.docFreq(((TermQuery)c.query).getTerm()));
}
***/
if (c.required // required
&& c.query.getBoost() == 0.0f // boost is zero
&& c.query instanceof TermQuery // TermQuery
&& (searcher.docFreq(((TermQuery)c.query).getTerm())
Query q = c.getQuery();
if (c.isRequired() // required
&& q.getBoost() == 0.0f // boost is zero
&& q instanceof TermQuery // TermQuery
&& (searcher.docFreq(((TermQuery)q).getTerm())
/ (float)searcher.maxDoc()) >= threshold) { // check threshold
if (filterQuery == null)
filterQuery = new BooleanQuery();
filterQuery.add(c.query, true, false); // filter it
filterQuery.add(q, BooleanClause.Occur.MUST); // filter it
//System.out.println("WooHoo... qualified to be hoisted to a filter!");
} else {
query.add(c); // query it

View File

@ -203,7 +203,7 @@ public class QueryParsing {
static void writeFieldVal(String val, FieldType ft, Appendable out, int flags) throws IOException {
if (ft!=null) {
out.append(ft.toExternal(new Field("",val,true,true,false)));
out.append(ft.toExternal(new Field("",val, Field.Store.YES, Field.Index.UN_TOKENIZED)));
} else {
out.append(val);
}
@ -281,12 +281,12 @@ public class QueryParsing {
first=false;
}
if (c.prohibited) {
if (c.isProhibited()) {
out.append('-');
} else if (c.required) {
} else if (c.isRequired()) {
out.append('+');
}
Query subQuery = c.query;
Query subQuery = c.getQuery();
boolean wrapQuery=false;
// TODO: may need to put parens around other types

View File

@ -342,22 +342,10 @@ public class SolrIndexSearcher extends Searcher implements SolrInfoMBean {
return searcher.search(weight, filter, i);
}
public TopDocs search(Query query, Filter filter, int nDocs) throws IOException {
return searcher.search(query, filter, nDocs);
}
public TopFieldDocs search(Query query, Filter filter, int nDocs, Sort sort) throws IOException {
return searcher.search(query, filter, nDocs, sort);
}
public void search(Weight weight, Filter filter, HitCollector hitCollector) throws IOException {
searcher.search(weight, filter, hitCollector);
}
public void search(Query query, Filter filter, HitCollector results) throws IOException {
searcher.search(query, filter, results);
}
public Query rewrite(Query original) throws IOException {
return searcher.rewrite(original);
}
@ -366,10 +354,6 @@ public class SolrIndexSearcher extends Searcher implements SolrInfoMBean {
return searcher.explain(weight, i);
}
public Explanation explain(Query query, int doc) throws IOException {
return searcher.explain(query, doc);
}
public TopFieldDocs search(Weight weight, Filter filter, int i, Sort sort) throws IOException {
return searcher.search(weight, filter, i, sort);
}

View File

@ -148,7 +148,7 @@ public class TestRequestHandler implements SolrRequestHandler {
nl.add("myLong",999999999999L);
Document doc = new Document();
doc.add(new Field("id","55",true,true,false));
doc.add(new Field("id","55",Field.Store.YES, Field.Index.UN_TOKENIZED));
nl.add("myDoc",doc);
nl.add("myResult",results);

View File

@ -130,7 +130,7 @@ public class DirectUpdateHandler extends UpdateHandler {
TermDocs tdocs = null;
int num=0;
try {
num = ir.delete(idTerm(id));
num = ir.deleteDocuments(idTerm(id));
if (SolrCore.log.isLoggable(Level.FINEST)) {
SolrCore.log.finest("deleted " + num + " docs matching id " + id);
}

View File

@ -408,7 +408,7 @@ public class DirectUpdateHandler2 extends UpdateHandler {
while (tdocs.next()) {
if (saveLast==0) {
// special case - delete all the docs as we see them.
reader.delete(tdocs.doc());
reader.deleteDocument(tdocs.doc());
numDeletes++;
continue;
}
@ -416,7 +416,7 @@ public class DirectUpdateHandler2 extends UpdateHandler {
int prev=docnums[pos];
docnums[pos]=tdocs.doc();
if (prev != -1) {
reader.delete(prev);
reader.deleteDocument(prev);
numDeletes++;
}

View File

@ -46,10 +46,10 @@ public class SolrIndexWriter extends IndexWriter {
if (config != null) {
setUseCompoundFile(config.useCompoundFile);
if (config.maxBufferedDocs != -1) minMergeDocs=config.maxBufferedDocs;
if (config.maxMergeDocs != -1) maxMergeDocs=config.maxMergeDocs;
if (config.mergeFactor != -1) mergeFactor =config.mergeFactor;
if (config.maxFieldLength != -1) maxFieldLength =config.maxFieldLength;
if (config.maxBufferedDocs != -1) setMaxBufferedDocs(config.maxBufferedDocs);
if (config.maxMergeDocs != -1) setMaxMergeDocs(config.maxMergeDocs);
if (config.mergeFactor != -1) setMergeFactor(config.mergeFactor);
if (config.maxFieldLength != -1) setMaxFieldLength(config.maxFieldLength);
}
}

View File

@ -133,7 +133,7 @@ public abstract class UpdateHandler implements SolrInfoMBean {
public void collect(int doc, float score) {
try {
searcher.getReader().delete(doc);
searcher.getReader().deleteDocument(doc);
deleted++;
} catch (IOException e) {
// don't try to close the searcher on failure for now...