mirror of https://github.com/apache/lucene.git
LUCENE-1257: More generified APIs and implementations. Thanks Kay Kay & Robert Muir!
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@826601 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
2b274fd0eb
commit
3e086c6028
|
@ -52,7 +52,7 @@ public abstract class Analyzer implements Closeable {
|
|||
return tokenStream(fieldName, reader);
|
||||
}
|
||||
|
||||
private CloseableThreadLocal tokenStreams = new CloseableThreadLocal();
|
||||
private CloseableThreadLocal<Object> tokenStreams = new CloseableThreadLocal<Object>();
|
||||
|
||||
/** Used by Analyzers that implement reusableTokenStream
|
||||
* to retrieve previously saved TokenStreams for re-use
|
||||
|
|
|
@ -33,8 +33,7 @@ import java.util.List;
|
|||
*/
|
||||
public abstract class BaseCharFilter extends CharFilter {
|
||||
|
||||
//private List<OffCorrectMap> pcmList;
|
||||
private List pcmList;
|
||||
private List<OffCorrectMap> pcmList;
|
||||
|
||||
public BaseCharFilter(CharStream in) {
|
||||
super(in);
|
||||
|
@ -64,7 +63,7 @@ public abstract class BaseCharFilter extends CharFilter {
|
|||
|
||||
protected void addOffCorrectMap(int off, int cumulativeDiff) {
|
||||
if (pcmList == null) {
|
||||
pcmList = new ArrayList();
|
||||
pcmList = new ArrayList<OffCorrectMap>();
|
||||
}
|
||||
pcmList.add(new OffCorrectMap(off, cumulativeDiff));
|
||||
}
|
||||
|
|
|
@ -34,8 +34,8 @@ import org.apache.lucene.util.AttributeSource;
|
|||
* stream to the first Token.
|
||||
*/
|
||||
public final class CachingTokenFilter extends TokenFilter {
|
||||
private List cache = null;
|
||||
private Iterator iterator = null;
|
||||
private List<AttributeSource.State> cache = null;
|
||||
private Iterator<AttributeSource.State> iterator = null;
|
||||
private AttributeSource.State finalState;
|
||||
|
||||
public CachingTokenFilter(TokenStream input) {
|
||||
|
@ -45,7 +45,7 @@ public final class CachingTokenFilter extends TokenFilter {
|
|||
public final boolean incrementToken() throws IOException {
|
||||
if (cache == null) {
|
||||
// fill cache lazily
|
||||
cache = new LinkedList();
|
||||
cache = new LinkedList<AttributeSource.State>();
|
||||
fillCache();
|
||||
iterator = cache.iterator();
|
||||
}
|
||||
|
@ -55,7 +55,7 @@ public final class CachingTokenFilter extends TokenFilter {
|
|||
return false;
|
||||
}
|
||||
// Since the TokenFilter can be reset, the tokens need to be preserved as immutable.
|
||||
restoreState((AttributeSource.State) iterator.next());
|
||||
restoreState(iterator.next());
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ import java.util.HashMap;
|
|||
*/
|
||||
public class PerFieldAnalyzerWrapper extends Analyzer {
|
||||
private Analyzer defaultAnalyzer;
|
||||
private Map analyzerMap = new HashMap();
|
||||
private Map<String,Analyzer> analyzerMap = new HashMap<String,Analyzer>();
|
||||
|
||||
|
||||
/**
|
||||
|
@ -67,7 +67,7 @@ public class PerFieldAnalyzerWrapper extends Analyzer {
|
|||
* used for those fields
|
||||
*/
|
||||
public PerFieldAnalyzerWrapper(Analyzer defaultAnalyzer,
|
||||
Map /*<String, Analyzer>*/ fieldAnalyzers) {
|
||||
Map<String,Analyzer> fieldAnalyzers) {
|
||||
this.defaultAnalyzer = defaultAnalyzer;
|
||||
if (fieldAnalyzers != null) {
|
||||
analyzerMap.putAll(fieldAnalyzers);
|
||||
|
@ -87,7 +87,7 @@ public class PerFieldAnalyzerWrapper extends Analyzer {
|
|||
}
|
||||
|
||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||
Analyzer analyzer = (Analyzer) analyzerMap.get(fieldName);
|
||||
Analyzer analyzer = analyzerMap.get(fieldName);
|
||||
if (analyzer == null) {
|
||||
analyzer = defaultAnalyzer;
|
||||
}
|
||||
|
@ -102,7 +102,7 @@ public class PerFieldAnalyzerWrapper extends Analyzer {
|
|||
// tokenStream but not reusableTokenStream
|
||||
return tokenStream(fieldName, reader);
|
||||
}
|
||||
Analyzer analyzer = (Analyzer) analyzerMap.get(fieldName);
|
||||
Analyzer analyzer = analyzerMap.get(fieldName);
|
||||
if (analyzer == null)
|
||||
analyzer = defaultAnalyzer;
|
||||
|
||||
|
@ -111,7 +111,7 @@ public class PerFieldAnalyzerWrapper extends Analyzer {
|
|||
|
||||
/** Return the positionIncrementGap from the analyzer assigned to fieldName */
|
||||
public int getPositionIncrementGap(String fieldName) {
|
||||
Analyzer analyzer = (Analyzer) analyzerMap.get(fieldName);
|
||||
Analyzer analyzer = analyzerMap.get(fieldName);
|
||||
if (analyzer == null)
|
||||
analyzer = defaultAnalyzer;
|
||||
return analyzer.getPositionIncrementGap(fieldName);
|
||||
|
|
|
@ -73,7 +73,7 @@ sink2.consumeAllTokens();
|
|||
* <p>Note, the EntityDetect and URLDetect TokenStreams are for the example and do not currently exist in Lucene.
|
||||
*/
|
||||
public final class TeeSinkTokenFilter extends TokenFilter {
|
||||
private final List sinks = new LinkedList();
|
||||
private final List<WeakReference<SinkTokenStream>> sinks = new LinkedList<WeakReference<SinkTokenStream>>();
|
||||
|
||||
/**
|
||||
* Instantiates a new TeeSinkTokenFilter.
|
||||
|
@ -96,7 +96,7 @@ public final class TeeSinkTokenFilter extends TokenFilter {
|
|||
*/
|
||||
public SinkTokenStream newSinkTokenStream(SinkFilter filter) {
|
||||
SinkTokenStream sink = new SinkTokenStream(this.cloneAttributes(), filter);
|
||||
this.sinks.add(new WeakReference(sink));
|
||||
this.sinks.add(new WeakReference<SinkTokenStream>(sink));
|
||||
return sink;
|
||||
}
|
||||
|
||||
|
@ -111,10 +111,10 @@ public final class TeeSinkTokenFilter extends TokenFilter {
|
|||
throw new IllegalArgumentException("The supplied sink is not compatible to this tee");
|
||||
}
|
||||
// add eventually missing attribute impls to the existing sink
|
||||
for (Iterator it = this.cloneAttributes().getAttributeImplsIterator(); it.hasNext(); ) {
|
||||
sink.addAttributeImpl((AttributeImpl) it.next());
|
||||
for (Iterator<AttributeImpl> it = this.cloneAttributes().getAttributeImplsIterator(); it.hasNext(); ) {
|
||||
sink.addAttributeImpl(it.next());
|
||||
}
|
||||
this.sinks.add(new WeakReference(sink));
|
||||
this.sinks.add(new WeakReference<SinkTokenStream>(sink));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -131,8 +131,8 @@ public final class TeeSinkTokenFilter extends TokenFilter {
|
|||
if (input.incrementToken()) {
|
||||
// capture state lazily - maybe no SinkFilter accepts this state
|
||||
AttributeSource.State state = null;
|
||||
for (Iterator it = sinks.iterator(); it.hasNext(); ) {
|
||||
final SinkTokenStream sink = (SinkTokenStream) ((WeakReference) it.next()).get();
|
||||
for (WeakReference<SinkTokenStream> ref : sinks) {
|
||||
final SinkTokenStream sink = ref.get();
|
||||
if (sink != null) {
|
||||
if (sink.accept(this)) {
|
||||
if (state == null) {
|
||||
|
@ -151,8 +151,8 @@ public final class TeeSinkTokenFilter extends TokenFilter {
|
|||
public final void end() throws IOException {
|
||||
super.end();
|
||||
AttributeSource.State finalState = captureState();
|
||||
for (Iterator it = sinks.iterator(); it.hasNext(); ) {
|
||||
final SinkTokenStream sink = (SinkTokenStream) ((WeakReference) it.next()).get();
|
||||
for (WeakReference<SinkTokenStream> ref : sinks) {
|
||||
final SinkTokenStream sink = ref.get();
|
||||
if (sink != null) {
|
||||
sink.setFinalState(finalState);
|
||||
}
|
||||
|
@ -179,9 +179,9 @@ public final class TeeSinkTokenFilter extends TokenFilter {
|
|||
}
|
||||
|
||||
public static final class SinkTokenStream extends TokenStream {
|
||||
private final List cachedStates = new LinkedList();
|
||||
private final List<AttributeSource.State> cachedStates = new LinkedList<AttributeSource.State>();
|
||||
private AttributeSource.State finalState;
|
||||
private Iterator it = null;
|
||||
private Iterator<AttributeSource.State> it = null;
|
||||
private SinkFilter filter;
|
||||
|
||||
private SinkTokenStream(AttributeSource source, SinkFilter filter) {
|
||||
|
@ -214,7 +214,7 @@ public final class TeeSinkTokenFilter extends TokenFilter {
|
|||
return false;
|
||||
}
|
||||
|
||||
AttributeSource.State state = (State) it.next();
|
||||
AttributeSource.State state = it.next();
|
||||
restoreState(state);
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -55,7 +55,7 @@ final class FieldsReader implements Cloneable {
|
|||
// file. This will be 0 if we have our own private file.
|
||||
private int docStoreOffset;
|
||||
|
||||
private CloseableThreadLocal fieldsStreamTL = new CloseableThreadLocal();
|
||||
private CloseableThreadLocal<IndexInput> fieldsStreamTL = new CloseableThreadLocal<IndexInput>();
|
||||
private boolean isOriginal = false;
|
||||
|
||||
/** Returns a cloned FieldsReader that shares open
|
||||
|
@ -417,7 +417,7 @@ final class FieldsReader implements Cloneable {
|
|||
}
|
||||
|
||||
private IndexInput getFieldStream() {
|
||||
IndexInput localFieldsStream = (IndexInput) fieldsStreamTL.get();
|
||||
IndexInput localFieldsStream = fieldsStreamTL.get();
|
||||
if (localFieldsStream == null) {
|
||||
localFieldsStream = (IndexInput) cloneableFieldsStream.clone();
|
||||
fieldsStreamTL.set(localFieldsStream);
|
||||
|
|
|
@ -23,7 +23,7 @@ import java.util.Arrays;
|
|||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -49,8 +49,8 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
|||
private SegmentInfo si;
|
||||
private int readBufferSize;
|
||||
|
||||
CloseableThreadLocal fieldsReaderLocal = new FieldsReaderLocal();
|
||||
CloseableThreadLocal termVectorsLocal = new CloseableThreadLocal();
|
||||
CloseableThreadLocal<FieldsReader> fieldsReaderLocal = new FieldsReaderLocal();
|
||||
CloseableThreadLocal<TermVectorsReader> termVectorsLocal = new CloseableThreadLocal<TermVectorsReader>();
|
||||
|
||||
BitVector deletedDocs = null;
|
||||
Ref deletedDocsRef = null;
|
||||
|
@ -292,9 +292,9 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
|||
/**
|
||||
* Sets the initial value
|
||||
*/
|
||||
private class FieldsReaderLocal extends CloseableThreadLocal {
|
||||
protected Object initialValue() {
|
||||
return core.getFieldsReaderOrig().clone();
|
||||
private class FieldsReaderLocal extends CloseableThreadLocal<FieldsReader> {
|
||||
protected FieldsReader initialValue() {
|
||||
return (FieldsReader) core.getFieldsReaderOrig().clone();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -824,7 +824,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
|||
}
|
||||
|
||||
FieldsReader getFieldsReader() {
|
||||
return (FieldsReader) fieldsReaderLocal.get();
|
||||
return fieldsReaderLocal.get();
|
||||
}
|
||||
|
||||
protected void doClose() throws IOException {
|
||||
|
@ -1149,7 +1149,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
|
|||
* @return TermVectorsReader
|
||||
*/
|
||||
TermVectorsReader getTermVectorsReader() {
|
||||
TermVectorsReader tvReader = (TermVectorsReader) termVectorsLocal.get();
|
||||
TermVectorsReader tvReader = termVectorsLocal.get();
|
||||
if (tvReader == null) {
|
||||
TermVectorsReader orig = core.getTermVectorsReaderOrig();
|
||||
if (orig == null) {
|
||||
|
|
|
@ -33,7 +33,7 @@ final class TermInfosReader {
|
|||
private final String segment;
|
||||
private final FieldInfos fieldInfos;
|
||||
|
||||
private final CloseableThreadLocal threadResources = new CloseableThreadLocal();
|
||||
private final CloseableThreadLocal<ThreadResources> threadResources = new CloseableThreadLocal<ThreadResources>();
|
||||
private final SegmentTermEnum origEnum;
|
||||
private final long size;
|
||||
|
||||
|
@ -138,7 +138,7 @@ final class TermInfosReader {
|
|||
}
|
||||
|
||||
private ThreadResources getThreadResources() {
|
||||
ThreadResources resources = (ThreadResources)threadResources.get();
|
||||
ThreadResources resources = threadResources.get();
|
||||
if (resources == null) {
|
||||
resources = new ThreadResources();
|
||||
resources.termEnum = terms();
|
||||
|
|
Loading…
Reference in New Issue