LUCENE-5666: get solr compiling

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene5666@1593856 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2014-05-11 20:38:21 +00:00
parent 639f03475a
commit 9714faa314
21 changed files with 76 additions and 95 deletions

View File

@ -33,7 +33,6 @@ import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;
import org.apache.lucene.queries.function.valuesource.FloatFieldSource; import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
import org.apache.lucene.queries.function.valuesource.IntFieldSource; import org.apache.lucene.queries.function.valuesource.IntFieldSource;
import org.apache.lucene.queries.function.valuesource.LongFieldSource; import org.apache.lucene.queries.function.valuesource.LongFieldSource;
import org.apache.lucene.search.FieldCache;
import org.apache.solr.analytics.expression.ExpressionFactory; import org.apache.solr.analytics.expression.ExpressionFactory;
import org.apache.solr.analytics.request.ExpressionRequest; import org.apache.solr.analytics.request.ExpressionRequest;
import org.apache.solr.analytics.util.AnalyticsParams; import org.apache.solr.analytics.util.AnalyticsParams;
@ -357,7 +356,7 @@ public class StatsCollectorSupplierFactory {
if (sourceType!=NUMBER_TYPE&&sourceType!=FIELD_TYPE) { if (sourceType!=NUMBER_TYPE&&sourceType!=FIELD_TYPE) {
return null; return null;
} }
return new IntFieldSource(expressionString, FieldCache.NUMERIC_UTILS_INT_PARSER) { return new IntFieldSource(expressionString) {
public String description() { public String description() {
return field; return field;
} }
@ -366,7 +365,7 @@ public class StatsCollectorSupplierFactory {
if (sourceType!=NUMBER_TYPE&&sourceType!=FIELD_TYPE) { if (sourceType!=NUMBER_TYPE&&sourceType!=FIELD_TYPE) {
return null; return null;
} }
return new LongFieldSource(expressionString, FieldCache.NUMERIC_UTILS_LONG_PARSER) { return new LongFieldSource(expressionString) {
public String description() { public String description() {
return field; return field;
} }
@ -375,7 +374,7 @@ public class StatsCollectorSupplierFactory {
if (sourceType!=NUMBER_TYPE&&sourceType!=FIELD_TYPE) { if (sourceType!=NUMBER_TYPE&&sourceType!=FIELD_TYPE) {
return null; return null;
} }
return new FloatFieldSource(expressionString, FieldCache.NUMERIC_UTILS_FLOAT_PARSER) { return new FloatFieldSource(expressionString) {
public String description() { public String description() {
return field; return field;
} }
@ -384,7 +383,7 @@ public class StatsCollectorSupplierFactory {
if (sourceType!=NUMBER_TYPE&&sourceType!=FIELD_TYPE) { if (sourceType!=NUMBER_TYPE&&sourceType!=FIELD_TYPE) {
return null; return null;
} }
return new DoubleFieldSource(expressionString, FieldCache.NUMERIC_UTILS_DOUBLE_PARSER) { return new DoubleFieldSource(expressionString) {
public String description() { public String description() {
return field; return field;
} }

View File

@ -18,13 +18,9 @@
package org.apache.solr.analytics.util; package org.apache.solr.analytics.util;
import java.io.IOException; import java.io.IOException;
import java.text.ParseException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Date; import java.util.Date;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
import org.apache.solr.schema.FieldType; import org.apache.solr.schema.FieldType;

View File

@ -18,18 +18,18 @@
package org.apache.solr.analytics.util.valuesource; package org.apache.solr.analytics.util.valuesource;
import java.io.IOException; import java.io.IOException;
import java.text.ParseException;
import java.util.Date; import java.util.Date;
import java.util.Map; import java.util.Map;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.docvalues.LongDocValues; import org.apache.lucene.queries.function.docvalues.LongDocValues;
import org.apache.lucene.queries.function.valuesource.LongFieldSource; import org.apache.lucene.queries.function.valuesource.LongFieldSource;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.mutable.MutableValue; import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueDate; import org.apache.lucene.util.mutable.MutableValueDate;
import org.apache.solr.schema.TrieDateField; import org.apache.solr.schema.TrieDateField;
@ -41,15 +41,11 @@ import org.apache.solr.schema.TrieDateField;
public class DateFieldSource extends LongFieldSource { public class DateFieldSource extends LongFieldSource {
public DateFieldSource(String field) { public DateFieldSource(String field) {
super(field, FieldCache.NUMERIC_UTILS_LONG_PARSER); super(field);
}
public DateFieldSource(String field, FieldCache.Parser parser) {
super(field, parser);
} }
public long externalToLong(String extVal) { public long externalToLong(String extVal) {
return parser.parseValue(new BytesRef(extVal)); return NumericUtils.prefixCodedToLong(new BytesRef(extVal));
} }
public Object longToObject(long val) { public Object longToObject(long val) {
@ -63,8 +59,8 @@ public class DateFieldSource extends LongFieldSource {
@Override @Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final NumericDocValues arr = cache.getNumerics(readerContext.reader(), field, parser, true); final NumericDocValues arr = DocValues.getNumeric(readerContext.reader(), field);
final Bits valid = cache.getDocsWithField(readerContext.reader(), field); final Bits valid = DocValues.getDocsWithField(readerContext.reader(), field);
return new LongDocValues(this) { return new LongDocValues(this) {
@Override @Override
public long longVal(int doc) { public long longVal(int doc) {
@ -111,16 +107,12 @@ public class DateFieldSource extends LongFieldSource {
public boolean equals(Object o) { public boolean equals(Object o) {
if (o.getClass() != this.getClass()) return false; if (o.getClass() != this.getClass()) return false;
DateFieldSource other = (DateFieldSource) o; DateFieldSource other = (DateFieldSource) o;
if (parser==null) { return field.equals(other.field);
return field.equals(other.field);
} else {
return field.equals(other.field) && parser.equals(other.parser);
}
} }
@Override @Override
public int hashCode() { public int hashCode() {
int h = parser == null ? this.getClass().hashCode() : parser.getClass().hashCode(); int h = this.getClass().hashCode();
h += super.hashCode(); h += super.hashCode();
return h; return h;
} }

View File

@ -19,6 +19,7 @@ package org.apache.solr.handler.component;
import org.apache.lucene.index.AtomicReader; import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
@ -27,7 +28,6 @@ import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SimpleCollector; import org.apache.lucene.search.SimpleCollector;
import org.apache.lucene.search.Sort; import org.apache.lucene.search.Sort;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.Collector; import org.apache.lucene.search.Collector;
import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopDocsCollector;
@ -188,7 +188,7 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia
SolrIndexSearcher searcher = req.getSearcher(); SolrIndexSearcher searcher = req.getSearcher();
AtomicReader reader = searcher.getAtomicReader(); AtomicReader reader = searcher.getAtomicReader();
SortedDocValues values = FieldCache.DEFAULT.getTermsIndex(reader, field); SortedDocValues values = DocValues.getSorted(reader, field);
FixedBitSet groupBits = new FixedBitSet(values.getValueCount()); FixedBitSet groupBits = new FixedBitSet(values.getValueCount());
DocList docList = rb.getResults().docList; DocList docList = rb.getResults().docList;
IntOpenHashSet collapsedSet = new IntOpenHashSet(docList.size()*2); IntOpenHashSet collapsedSet = new IntOpenHashSet(docList.size()*2);

View File

@ -25,10 +25,10 @@ import java.util.Map;
import org.apache.lucene.index.AtomicReader; import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.solr.schema.SchemaField; import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.search.SolrIndexSearcher;
@ -100,7 +100,7 @@ public class FieldFacetStats {
// Currently only used by UnInvertedField stats // Currently only used by UnInvertedField stats
public boolean facetTermNum(int docID, int statsTermNum) throws IOException { public boolean facetTermNum(int docID, int statsTermNum) throws IOException {
if (topLevelSortedValues == null) { if (topLevelSortedValues == null) {
topLevelSortedValues = FieldCache.DEFAULT.getTermsIndex(topLevelReader, name); topLevelSortedValues = DocValues.getSorted(topLevelReader, name);
} }
int term = topLevelSortedValues.getOrd(docID); int term = topLevelSortedValues.getOrd(docID);

View File

@ -30,13 +30,13 @@ import java.util.Set;
import org.apache.lucene.document.FieldType.NumericType; import org.apache.lucene.document.FieldType.NumericType;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRef;
@ -156,13 +156,13 @@ final class NumericFacets {
assert doc >= ctx.docBase; assert doc >= ctx.docBase;
switch (numericType) { switch (numericType) {
case LONG: case LONG:
longs = FieldCache.DEFAULT.getNumerics(ctx.reader(), fieldName, FieldCache.NUMERIC_UTILS_LONG_PARSER, true); longs = DocValues.getNumeric(ctx.reader(), fieldName);
break; break;
case INT: case INT:
longs = FieldCache.DEFAULT.getNumerics(ctx.reader(), fieldName, FieldCache.NUMERIC_UTILS_INT_PARSER, true); longs = DocValues.getNumeric(ctx.reader(), fieldName);
break; break;
case FLOAT: case FLOAT:
final NumericDocValues floats = FieldCache.DEFAULT.getNumerics(ctx.reader(), fieldName, FieldCache.NUMERIC_UTILS_FLOAT_PARSER, true); final NumericDocValues floats = DocValues.getNumeric(ctx.reader(), fieldName);
// TODO: this bit flipping should probably be moved to tie-break in the PQ comparator // TODO: this bit flipping should probably be moved to tie-break in the PQ comparator
longs = new NumericDocValues() { longs = new NumericDocValues() {
@Override @Override
@ -174,7 +174,7 @@ final class NumericFacets {
}; };
break; break;
case DOUBLE: case DOUBLE:
final NumericDocValues doubles = FieldCache.DEFAULT.getNumerics(ctx.reader(), fieldName, FieldCache.NUMERIC_UTILS_DOUBLE_PARSER, true); final NumericDocValues doubles = DocValues.getNumeric(ctx.reader(), fieldName);
// TODO: this bit flipping should probably be moved to tie-break in the PQ comparator // TODO: this bit flipping should probably be moved to tie-break in the PQ comparator
longs = new NumericDocValues() { longs = new NumericDocValues() {
@Override @Override
@ -188,7 +188,7 @@ final class NumericFacets {
default: default:
throw new AssertionError(); throw new AssertionError();
} }
docsWithField = FieldCache.DEFAULT.getDocsWithField(ctx.reader(), fieldName); docsWithField = DocValues.getDocsWithField(ctx.reader(), fieldName);
} }
long v = longs.get(doc - ctx.docBase); long v = longs.get(doc - ctx.docBase);
if (v != 0 || docsWithField.get(doc - ctx.docBase)) { if (v != 0 || docsWithField.get(doc - ctx.docBase)) {

View File

@ -22,11 +22,11 @@ import java.util.*;
import java.util.concurrent.*; import java.util.concurrent.*;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRef;
@ -236,7 +236,7 @@ class PerSegmentSingleValuedFaceting {
BytesRef tempBR = new BytesRef(); BytesRef tempBR = new BytesRef();
void countTerms() throws IOException { void countTerms() throws IOException {
si = FieldCache.DEFAULT.getTermsIndex(context.reader(), fieldName); si = DocValues.getSorted(context.reader(), fieldName);
// SolrCore.log.info("reader= " + reader + " FC=" + System.identityHashCode(si)); // SolrCore.log.info("reader= " + reader + " FC=" + System.identityHashCode(si));
if (prefix!=null) { if (prefix!=null) {

View File

@ -38,6 +38,7 @@ import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.apache.lucene.index.AtomicReader; import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.Fields; import org.apache.lucene.index.Fields;
import org.apache.lucene.index.MultiDocsEnum; import org.apache.lucene.index.MultiDocsEnum;
@ -46,7 +47,6 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
@ -644,7 +644,7 @@ public class SimpleFacets {
FieldType ft = searcher.getSchema().getFieldType(fieldName); FieldType ft = searcher.getSchema().getFieldType(fieldName);
NamedList<Integer> res = new NamedList<>(); NamedList<Integer> res = new NamedList<>();
SortedDocValues si = FieldCache.DEFAULT.getTermsIndex(searcher.getAtomicReader(), fieldName); SortedDocValues si = DocValues.getSorted(searcher.getAtomicReader(), fieldName);
final BytesRef br = new BytesRef(); final BytesRef br = new BytesRef();

View File

@ -23,12 +23,12 @@ import java.util.Map;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.index.AtomicReader; import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.DocTermOrds;
import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.uninverting.DocTermOrds;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;

View File

@ -25,6 +25,7 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.GeneralField; import org.apache.lucene.index.GeneralField;
import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.StorableField; import org.apache.lucene.index.StorableField;
@ -32,7 +33,6 @@ import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.docvalues.BoolDocValues; import org.apache.lucene.queries.function.docvalues.BoolDocValues;
import org.apache.lucene.queries.function.valuesource.OrdFieldSource; import org.apache.lucene.queries.function.valuesource.OrdFieldSource;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRef;
@ -179,7 +179,7 @@ class BoolFieldSource extends ValueSource {
@Override @Override
public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException { public FunctionValues getValues(Map context, AtomicReaderContext readerContext) throws IOException {
final SortedDocValues sindex = FieldCache.DEFAULT.getTermsIndex(readerContext.reader(), field); final SortedDocValues sindex = DocValues.getSorted(readerContext.reader(), field);
// figure out what ord maps to true // figure out what ord maps to true
int nord = sindex.getValueCount(); int nord = sindex.getValueCount();

View File

@ -178,7 +178,7 @@ public class EnumField extends PrimitiveFieldType {
public SortField getSortField(SchemaField field, boolean top) { public SortField getSortField(SchemaField field, boolean top) {
field.checkSortability(); field.checkSortability();
final Object missingValue = Integer.MIN_VALUE; final Object missingValue = Integer.MIN_VALUE;
SortField sf = new SortField(field.getName(), SortField.Type.INT, top, FieldCache.NUMERIC_UTILS_INT_PARSER); SortField sf = new SortField(field.getName(), SortField.Type.INT, top);
sf.setMissingValue(missingValue); sf.setMissingValue(missingValue);
return sf; return sf;
} }
@ -189,7 +189,7 @@ public class EnumField extends PrimitiveFieldType {
@Override @Override
public ValueSource getValueSource(SchemaField field, QParser qparser) { public ValueSource getValueSource(SchemaField field, QParser qparser) {
field.checkFieldCacheSource(qparser); field.checkFieldCacheSource(qparser);
return new EnumFieldSource(field.getName(), FieldCache.NUMERIC_UTILS_INT_PARSER, enumIntToStringMap, enumStringToIntMap); return new EnumFieldSource(field.getName(), enumIntToStringMap, enumStringToIntMap);
} }
/** /**

View File

@ -39,7 +39,6 @@ import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
import org.apache.lucene.queries.function.valuesource.IntFieldSource; import org.apache.lucene.queries.function.valuesource.IntFieldSource;
import org.apache.lucene.queries.function.valuesource.LongFieldSource; import org.apache.lucene.queries.function.valuesource.LongFieldSource;
import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.FieldCacheRangeFilter; import org.apache.lucene.search.FieldCacheRangeFilter;
import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
@ -153,7 +152,7 @@ public class TrieField extends PrimitiveFieldType {
else if( sortMissingFirst ) { else if( sortMissingFirst ) {
missingValue = top ? Integer.MAX_VALUE : Integer.MIN_VALUE; missingValue = top ? Integer.MAX_VALUE : Integer.MIN_VALUE;
} }
sf = new SortField( field.getName(), SortField.Type.INT, top, FieldCache.NUMERIC_UTILS_INT_PARSER); sf = new SortField( field.getName(), SortField.Type.INT, top);
sf.setMissingValue(missingValue); sf.setMissingValue(missingValue);
return sf; return sf;
@ -164,7 +163,7 @@ public class TrieField extends PrimitiveFieldType {
else if( sortMissingFirst ) { else if( sortMissingFirst ) {
missingValue = top ? Float.POSITIVE_INFINITY : Float.NEGATIVE_INFINITY; missingValue = top ? Float.POSITIVE_INFINITY : Float.NEGATIVE_INFINITY;
} }
sf = new SortField( field.getName(), SortField.Type.FLOAT, top, FieldCache.NUMERIC_UTILS_FLOAT_PARSER); sf = new SortField( field.getName(), SortField.Type.FLOAT, top);
sf.setMissingValue(missingValue); sf.setMissingValue(missingValue);
return sf; return sf;
@ -176,7 +175,7 @@ public class TrieField extends PrimitiveFieldType {
else if( sortMissingFirst ) { else if( sortMissingFirst ) {
missingValue = top ? Long.MAX_VALUE : Long.MIN_VALUE; missingValue = top ? Long.MAX_VALUE : Long.MIN_VALUE;
} }
sf = new SortField( field.getName(), SortField.Type.LONG, top, FieldCache.NUMERIC_UTILS_LONG_PARSER); sf = new SortField( field.getName(), SortField.Type.LONG, top);
sf.setMissingValue(missingValue); sf.setMissingValue(missingValue);
return sf; return sf;
@ -187,7 +186,7 @@ public class TrieField extends PrimitiveFieldType {
else if( sortMissingFirst ) { else if( sortMissingFirst ) {
missingValue = top ? Double.POSITIVE_INFINITY : Double.NEGATIVE_INFINITY; missingValue = top ? Double.POSITIVE_INFINITY : Double.NEGATIVE_INFINITY;
} }
sf = new SortField( field.getName(), SortField.Type.DOUBLE, top, FieldCache.NUMERIC_UTILS_DOUBLE_PARSER); sf = new SortField( field.getName(), SortField.Type.DOUBLE, top);
sf.setMissingValue(missingValue); sf.setMissingValue(missingValue);
return sf; return sf;
@ -201,15 +200,15 @@ public class TrieField extends PrimitiveFieldType {
field.checkFieldCacheSource(qparser); field.checkFieldCacheSource(qparser);
switch (type) { switch (type) {
case INTEGER: case INTEGER:
return new IntFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_INT_PARSER ); return new IntFieldSource( field.getName());
case FLOAT: case FLOAT:
return new FloatFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_FLOAT_PARSER ); return new FloatFieldSource( field.getName());
case DATE: case DATE:
return new TrieDateFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER ); return new TrieDateFieldSource( field.getName());
case LONG: case LONG:
return new LongFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER ); return new LongFieldSource( field.getName());
case DOUBLE: case DOUBLE:
return new DoubleFieldSource( field.getName(), FieldCache.NUMERIC_UTILS_DOUBLE_PARSER ); return new DoubleFieldSource( field.getName());
default: default:
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + field.name); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + field.name);
} }
@ -706,8 +705,8 @@ public class TrieField extends PrimitiveFieldType {
class TrieDateFieldSource extends LongFieldSource { class TrieDateFieldSource extends LongFieldSource {
public TrieDateFieldSource(String field, FieldCache.Parser parser) { public TrieDateFieldSource(String field) {
super(field, parser); super(field);
} }
@Override @Override

View File

@ -27,6 +27,7 @@ import java.util.Set;
import org.apache.lucene.index.AtomicReader; import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedDocValues;
@ -38,7 +39,6 @@ import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.Collector; import org.apache.lucene.search.Collector;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.FilterCollector; import org.apache.lucene.search.FilterCollector;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
@ -289,11 +289,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
SortedDocValues docValues = null; SortedDocValues docValues = null;
FunctionQuery funcQuery = null; FunctionQuery funcQuery = null;
if(schemaField.hasDocValues()) { docValues = DocValues.getSorted(searcher.getAtomicReader(), this.field);
docValues = searcher.getAtomicReader().getSortedDocValues(this.field);
} else {
docValues = FieldCache.DEFAULT.getTermsIndex(searcher.getAtomicReader(), this.field);
}
FieldType fieldType = null; FieldType fieldType = null;
@ -830,7 +826,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
} }
public void setNextReader(AtomicReaderContext context) throws IOException { public void setNextReader(AtomicReaderContext context) throws IOException {
this.vals = FieldCache.DEFAULT.getNumerics(context.reader(), this.field, FieldCache.NUMERIC_UTILS_INT_PARSER, false); this.vals = DocValues.getNumeric(context.reader(), this.field);
} }
public void collapse(int ord, int contextDoc, int globalDoc) throws IOException { public void collapse(int ord, int contextDoc, int globalDoc) throws IOException {
@ -898,7 +894,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
} }
public void setNextReader(AtomicReaderContext context) throws IOException { public void setNextReader(AtomicReaderContext context) throws IOException {
this.vals = FieldCache.DEFAULT.getNumerics(context.reader(), this.field, FieldCache.NUMERIC_UTILS_LONG_PARSER, false); this.vals = DocValues.getNumeric(context.reader(), this.field);
} }
public void collapse(int ord, int contextDoc, int globalDoc) throws IOException { public void collapse(int ord, int contextDoc, int globalDoc) throws IOException {
@ -967,7 +963,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
} }
public void setNextReader(AtomicReaderContext context) throws IOException { public void setNextReader(AtomicReaderContext context) throws IOException {
this.vals = FieldCache.DEFAULT.getNumerics(context.reader(), this.field, FieldCache.NUMERIC_UTILS_FLOAT_PARSER, false); this.vals = DocValues.getNumeric(context.reader(), this.field);
} }
public void collapse(int ord, int contextDoc, int globalDoc) throws IOException { public void collapse(int ord, int contextDoc, int globalDoc) throws IOException {

View File

@ -25,18 +25,21 @@ import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean; import org.apache.solr.core.SolrInfoMBean;
/*
import org.apache.lucene.search.FieldCache; import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.FieldCache.CacheEntry; import org.apache.lucene.search.FieldCache.CacheEntry;
import org.apache.lucene.util.FieldCacheSanityChecker; import org.apache.lucene.util.FieldCacheSanityChecker;
import org.apache.lucene.util.FieldCacheSanityChecker.Insanity; import org.apache.lucene.util.FieldCacheSanityChecker.Insanity;
*/
// nocommit: maybe provide something useful here instead.
/** /**
* A SolrInfoMBean that provides introspection of the Lucene FieldCache, this is <b>NOT</b> a cache that is managed by Solr. * A SolrInfoMBean that provides introspection of the Solr FieldCache
* *
*/ */
public class SolrFieldCacheMBean implements SolrInfoMBean { public class SolrFieldCacheMBean implements SolrInfoMBean {
protected FieldCacheSanityChecker checker = new FieldCacheSanityChecker(); //protected FieldCacheSanityChecker checker = new FieldCacheSanityChecker();
@Override @Override
public String getName() { return this.getClass().getName(); } public String getName() { return this.getClass().getName(); }
@ -44,8 +47,7 @@ public class SolrFieldCacheMBean implements SolrInfoMBean {
public String getVersion() { return SolrCore.version; } public String getVersion() { return SolrCore.version; }
@Override @Override
public String getDescription() { public String getDescription() {
return "Provides introspection of the Lucene FieldCache, " return "Provides introspection of the Solr FieldCache ";
+ "this is **NOT** a cache that is managed by Solr.";
} }
@Override @Override
public Category getCategory() { return Category.CACHE; } public Category getCategory() { return Category.CACHE; }
@ -60,6 +62,7 @@ public class SolrFieldCacheMBean implements SolrInfoMBean {
@Override @Override
public NamedList getStatistics() { public NamedList getStatistics() {
NamedList stats = new SimpleOrderedMap(); NamedList stats = new SimpleOrderedMap();
/*
CacheEntry[] entries = FieldCache.DEFAULT.getCacheEntries(); CacheEntry[] entries = FieldCache.DEFAULT.getCacheEntries();
stats.add("entries_count", entries.length); stats.add("entries_count", entries.length);
for (int i = 0; i < entries.length; i++) { for (int i = 0; i < entries.length; i++) {
@ -72,16 +75,8 @@ public class SolrFieldCacheMBean implements SolrInfoMBean {
stats.add("insanity_count", insanity.length); stats.add("insanity_count", insanity.length);
for (int i = 0; i < insanity.length; i++) { for (int i = 0; i < insanity.length; i++) {
/** RAM estimation is both CPU and memory intensive... we don't want to do it unless asked.
// we only estimate the size of insane entries
for (CacheEntry e : insanity[i].getCacheEntries()) {
// don't re-estimate if we've already done it.
if (null == e.getEstimatedSize()) e.estimateSize();
}
**/
stats.add("insanity#" + i, insanity[i].toString()); stats.add("insanity#" + i, insanity[i].toString());
} }*/
return stats; return stats;
} }

View File

@ -23,7 +23,6 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
@ -422,7 +421,8 @@ public class TestDistributedSearch extends BaseDistributedSearchTestCase {
// Thread.sleep(10000000000L); // Thread.sleep(10000000000L);
FieldCache.DEFAULT.purgeAllCaches(); // avoid FC insanity // nocommit: split test if needed
// FieldCache.DEFAULT.purgeAllCaches(); // hide FC insanity
del("*:*"); // delete all docs and test stats request del("*:*"); // delete all docs and test stats request
commit(); commit();

View File

@ -17,7 +17,6 @@
package org.apache.solr; package org.apache.solr;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.index.LogDocMergePolicy; import org.apache.lucene.index.LogDocMergePolicy;
import org.noggit.JSONUtil; import org.noggit.JSONUtil;
import org.noggit.ObjectBuilder; import org.noggit.ObjectBuilder;
@ -518,7 +517,8 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
,"/grouped/"+f+"/matches==10" ,"/grouped/"+f+"/matches==10"
,"/facet_counts/facet_fields/"+f+"==['1',3, '2',3, '3',2, '4',1, '5',1]" ,"/facet_counts/facet_fields/"+f+"==['1',3, '2',3, '3',2, '4',1, '5',1]"
); );
FieldCache.DEFAULT.purgeAllCaches(); // avoid FC insanity // nocommit: split test if needed
// FieldCache.DEFAULT.purgeAllCaches(); // hide FC insanity
// test that grouping works with highlighting // test that grouping works with highlighting
assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id" assertJQ(req("fq",filt, "q","{!func}"+f2, "group","true", "group.field",f, "fl","id"

View File

@ -23,7 +23,6 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Random; import java.util.Random;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
@ -134,7 +133,8 @@ public class TestRandomDVFaceting extends SolrTestCaseJ4 {
} }
} }
} finally { } finally {
FieldCache.DEFAULT.purgeAllCaches(); // avoid FC insanity // nocommit: split test if needed
// FieldCache.DEFAULT.purgeAllCaches(); // hide FC insanity
} }
} }

View File

@ -17,7 +17,6 @@
package org.apache.solr; package org.apache.solr;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.ModifiableSolrParams;
@ -133,7 +132,8 @@ public class TestRandomFaceting extends SolrTestCaseJ4 {
} }
} }
} finally { } finally {
FieldCache.DEFAULT.purgeAllCaches(); // avoid FC insanity // nocommit: split test if needed
// FieldCache.DEFAULT.purgeAllCaches(); // hide FC insanity
} }
} }

View File

@ -22,9 +22,9 @@ import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.Random; import java.util.Random;
import org.apache.lucene.index.DocTermOrds;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.uninverting.DocTermOrds;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.params.FacetParams; import org.apache.solr.common.params.FacetParams;

View File

@ -19,7 +19,6 @@ package org.apache.solr.search.function;
import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.Codec;
import org.apache.lucene.index.FieldInvertState; import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.similarities.DefaultSimilarity; import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.search.similarities.TFIDFSimilarity; import org.apache.lucene.search.similarities.TFIDFSimilarity;
import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.SolrTestCaseJ4;
@ -203,7 +202,8 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
Arrays.asList("v1","\0:[* TO *]"), 88,12 Arrays.asList("v1","\0:[* TO *]"), 88,12
); );
FieldCache.DEFAULT.purgeAllCaches(); // avoid FC insanity // nocommit: split test if needed
// FieldCache.DEFAULT.purgeAllCaches(); // hide FC insanity
} }
@Test @Test
@ -285,7 +285,8 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
// System.out.println("Done test "+i); // System.out.println("Done test "+i);
} }
FieldCache.DEFAULT.purgeAllCaches(); // avoid FC insanity // nocommit: split test if needed
// FieldCache.DEFAULT.purgeAllCaches(); // hide FC insanity
} }
@Test @Test
@ -424,7 +425,8 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
); );
FieldCache.DEFAULT.purgeAllCaches(); // avoid FC insanity // nocommit: split test if needed
// FieldCache.DEFAULT.purgeAllCaches(); // hide FC insanity
} }
/** /**
@ -642,7 +644,8 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
singleTest(fieldAsFunc, "sqrt(\0)"); singleTest(fieldAsFunc, "sqrt(\0)");
assertTrue(orig != FileFloatSource.onlyForTesting); assertTrue(orig != FileFloatSource.onlyForTesting);
FieldCache.DEFAULT.purgeAllCaches(); // avoid FC insanity // nocommit: split test if needed
// FieldCache.DEFAULT.purgeAllCaches(); // hide FC insanity
} }
/** /**
@ -669,7 +672,8 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
100,10, 25,5, 0,0, 1,1); 100,10, 25,5, 0,0, 1,1);
singleTest(fieldAsFunc, "log(\0)", 1,0); singleTest(fieldAsFunc, "log(\0)", 1,0);
FieldCache.DEFAULT.purgeAllCaches(); // avoid FC insanity // nocommit: split test if needed
// FieldCache.DEFAULT.purgeAllCaches(); // hide FC insanity
} }
@Test @Test

View File

@ -35,7 +35,6 @@ import java.util.concurrent.atomic.AtomicInteger;
import junit.framework.Assert; import junit.framework.Assert;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.util.Constants; import org.apache.lucene.util.Constants;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
@ -284,7 +283,8 @@ public abstract class BaseDistributedSearchTestCase extends SolrTestCaseJ4 {
@Override @Override
public void tearDown() throws Exception { public void tearDown() throws Exception {
destroyServers(); destroyServers();
FieldCache.DEFAULT.purgeAllCaches(); // avoid FC insanity // nocommit: split test if needed
// FieldCache.DEFAULT.purgeAllCaches(); // hide FC insanity
super.tearDown(); super.tearDown();
} }