mirror of https://github.com/apache/lucene.git
- Removed compiler warnings (jikes 1.17). Style stuff only.
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@149884 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
d0a577e1bd
commit
98330b5030
|
@ -66,7 +66,7 @@ import java.io.Reader;
|
|||
* WARNING: You must override one of the methods defined by this class in your
|
||||
* subclass or the Analyzer will enter an infinite loop.
|
||||
*/
|
||||
abstract public class Analyzer {
|
||||
public abstract class Analyzer {
|
||||
/** Creates a TokenStream which tokenizes all the text in the provided
|
||||
Reader. Default implementation forwards to tokenStream(Reader) for
|
||||
compatibility with older version. Override to allow Analyzer to choose
|
||||
|
|
|
@ -63,8 +63,8 @@ public abstract class CharTokenizer extends Tokenizer {
|
|||
}
|
||||
|
||||
private int offset = 0, bufferIndex=0, dataLen=0;
|
||||
private final static int MAX_WORD_LEN = 255;
|
||||
private final static int IO_BUFFER_SIZE = 1024;
|
||||
private static final int MAX_WORD_LEN = 255;
|
||||
private static final int IO_BUFFER_SIZE = 1024;
|
||||
private final char[] buffer = new char[MAX_WORD_LEN];
|
||||
private final char[] ioBuffer = new char[IO_BUFFER_SIZE];
|
||||
|
||||
|
|
|
@ -80,7 +80,7 @@ public final class StopFilter extends TokenFilter {
|
|||
/** Builds a Hashtable from an array of stop words, appropriate for passing
|
||||
into the StopFilter constructor. This permits this table construction to
|
||||
be cached once when an Analyzer is constructed. */
|
||||
public final static Hashtable makeStopTable(String[] stopWords) {
|
||||
public static final Hashtable makeStopTable(String[] stopWords) {
|
||||
Hashtable stopTable = new Hashtable(stopWords.length);
|
||||
for (int i = 0; i < stopWords.length; i++)
|
||||
stopTable.put(stopWords[i], stopWords[i]);
|
||||
|
|
|
@ -61,7 +61,7 @@ import java.io.IOException;
|
|||
This is an abstract class.
|
||||
*/
|
||||
|
||||
abstract public class TokenFilter extends TokenStream {
|
||||
public abstract class TokenFilter extends TokenStream {
|
||||
/** The source of tokens for this filter. */
|
||||
protected TokenStream input;
|
||||
|
||||
|
|
|
@ -68,9 +68,9 @@ import java.io.IOException;
|
|||
</ul>
|
||||
*/
|
||||
|
||||
abstract public class TokenStream {
|
||||
public abstract class TokenStream {
|
||||
/** Returns the next token in the stream, or null at EOS. */
|
||||
abstract public Token next() throws IOException;
|
||||
public abstract Token next() throws IOException;
|
||||
|
||||
/** Releases resources associated with this stream. */
|
||||
public void close() throws IOException {}
|
||||
|
|
|
@ -62,7 +62,7 @@ import java.io.IOException;
|
|||
This is an abstract class.
|
||||
*/
|
||||
|
||||
abstract public class Tokenizer extends TokenStream {
|
||||
public abstract class Tokenizer extends TokenStream {
|
||||
/** The text source for this Tokenizer. */
|
||||
protected Reader input;
|
||||
|
||||
|
|
|
@ -75,13 +75,13 @@ final class DocumentWriter {
|
|||
private Directory directory;
|
||||
private FieldInfos fieldInfos;
|
||||
private int maxFieldLength;
|
||||
|
||||
|
||||
DocumentWriter(Directory d, Analyzer a, int mfl) {
|
||||
directory = d;
|
||||
analyzer = a;
|
||||
maxFieldLength = mfl;
|
||||
}
|
||||
|
||||
|
||||
final void addDocument(String segment, Document doc)
|
||||
throws IOException {
|
||||
// write field names
|
||||
|
@ -97,7 +97,7 @@ final class DocumentWriter {
|
|||
} finally {
|
||||
fieldsWriter.close();
|
||||
}
|
||||
|
||||
|
||||
// invert doc into postingTable
|
||||
postingTable.clear(); // clear postingTable
|
||||
fieldLengths = new int[fieldInfos.size()]; // init fieldLengths
|
||||
|
@ -128,7 +128,7 @@ final class DocumentWriter {
|
|||
|
||||
// write norms of indexed fields
|
||||
writeNorms(doc, segment);
|
||||
|
||||
|
||||
}
|
||||
|
||||
// Keys are Terms, values are Postings.
|
||||
|
@ -216,7 +216,7 @@ final class DocumentWriter {
|
|||
return array;
|
||||
}
|
||||
|
||||
static private final void quickSort(Posting[] postings, int lo, int hi) {
|
||||
private static final void quickSort(Posting[] postings, int lo, int hi) {
|
||||
if(lo >= hi)
|
||||
return;
|
||||
|
||||
|
@ -232,7 +232,7 @@ final class DocumentWriter {
|
|||
Posting tmp = postings[mid];
|
||||
postings[mid] = postings[hi];
|
||||
postings[hi] = tmp;
|
||||
|
||||
|
||||
if(postings[lo].term.compareTo(postings[mid].term) > 0) {
|
||||
Posting tmp2 = postings[lo];
|
||||
postings[lo] = postings[mid];
|
||||
|
@ -244,17 +244,17 @@ final class DocumentWriter {
|
|||
int right = hi - 1;
|
||||
|
||||
if (left >= right)
|
||||
return;
|
||||
return;
|
||||
|
||||
Term partition = postings[mid].term;
|
||||
|
||||
|
||||
for( ;; ) {
|
||||
while(postings[right].term.compareTo(partition) > 0)
|
||||
--right;
|
||||
|
||||
|
||||
while(left < right && postings[left].term.compareTo(partition) <= 0)
|
||||
++left;
|
||||
|
||||
|
||||
if(left < right) {
|
||||
Posting tmp = postings[left];
|
||||
postings[left] = postings[right];
|
||||
|
@ -264,7 +264,7 @@ final class DocumentWriter {
|
|||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
quickSort(postings, lo, left);
|
||||
quickSort(postings, left + 1, hi);
|
||||
}
|
||||
|
@ -286,7 +286,7 @@ final class DocumentWriter {
|
|||
// add an entry to the dictionary with pointers to prox and freq files
|
||||
ti.set(1, freq.getFilePointer(), prox.getFilePointer());
|
||||
tis.add(posting.term, ti);
|
||||
|
||||
|
||||
// add an entry to the freq file
|
||||
int f = posting.freq;
|
||||
if (f == 1) // optimize freq=1
|
||||
|
@ -295,7 +295,7 @@ final class DocumentWriter {
|
|||
freq.writeVInt(0); // the document number
|
||||
freq.writeVInt(f); // frequency in doc
|
||||
}
|
||||
|
||||
|
||||
int lastPosition = 0; // write positions
|
||||
int[] positions = posting.positions;
|
||||
for (int j = 0; j < f; j++) { // use delta-encoding
|
||||
|
@ -336,7 +336,7 @@ final class Posting { // info about a Term in a doc
|
|||
Term term; // the Term
|
||||
int freq; // its frequency in doc
|
||||
int[] positions; // positions it occurs at
|
||||
|
||||
|
||||
Posting(Term t, int position) {
|
||||
term = t;
|
||||
freq = 1;
|
||||
|
|
|
@ -75,7 +75,7 @@ import org.apache.lucene.document.Field;
|
|||
as documents are added to and deleted from an index. Clients should thus not
|
||||
rely on a given document having the same number between sessions. */
|
||||
|
||||
abstract public class IndexReader {
|
||||
public abstract class IndexReader {
|
||||
protected IndexReader(Directory directory) {
|
||||
this.directory = directory;
|
||||
}
|
||||
|
@ -162,42 +162,42 @@ abstract public class IndexReader {
|
|||
}
|
||||
|
||||
/** Returns the number of documents in this index. */
|
||||
abstract public int numDocs();
|
||||
public abstract int numDocs();
|
||||
|
||||
/** Returns one greater than the largest possible document number.
|
||||
This may be used to, e.g., determine how big to allocate an array which
|
||||
will have an element for every document number in an index.
|
||||
*/
|
||||
abstract public int maxDoc();
|
||||
public abstract int maxDoc();
|
||||
|
||||
/** Returns the stored fields of the <code>n</code><sup>th</sup>
|
||||
<code>Document</code> in this index. */
|
||||
abstract public Document document(int n) throws IOException;
|
||||
public abstract Document document(int n) throws IOException;
|
||||
|
||||
/** Returns true if document <i>n</i> has been deleted */
|
||||
abstract public boolean isDeleted(int n);
|
||||
public abstract boolean isDeleted(int n);
|
||||
|
||||
/** Returns the byte-encoded normalization factor for the named field of
|
||||
* every document. This is used by the search code to score documents.
|
||||
*
|
||||
* @see Field#setBoost(float)
|
||||
*/
|
||||
abstract public byte[] norms(String field) throws IOException;
|
||||
public abstract byte[] norms(String field) throws IOException;
|
||||
|
||||
/** Returns an enumeration of all the terms in the index.
|
||||
The enumeration is ordered by Term.compareTo(). Each term
|
||||
is greater than all that precede it in the enumeration.
|
||||
*/
|
||||
abstract public TermEnum terms() throws IOException;
|
||||
public abstract TermEnum terms() throws IOException;
|
||||
|
||||
/** Returns an enumeration of all terms after a given term.
|
||||
The enumeration is ordered by Term.compareTo(). Each term
|
||||
is greater than all that precede it in the enumeration.
|
||||
*/
|
||||
abstract public TermEnum terms(Term t) throws IOException;
|
||||
public abstract TermEnum terms(Term t) throws IOException;
|
||||
|
||||
/** Returns the number of documents containing the term <code>t</code>. */
|
||||
abstract public int docFreq(Term t) throws IOException;
|
||||
public abstract int docFreq(Term t) throws IOException;
|
||||
|
||||
/** Returns an enumeration of all the documents which contain
|
||||
<code>term</code>. For each document, the document number, the frequency of
|
||||
|
@ -215,7 +215,7 @@ abstract public class IndexReader {
|
|||
}
|
||||
|
||||
/** Returns an unpositioned {@link TermDocs} enumerator. */
|
||||
abstract public TermDocs termDocs() throws IOException;
|
||||
public abstract TermDocs termDocs() throws IOException;
|
||||
|
||||
/** Returns an enumeration of all the documents which contain
|
||||
<code>term</code>. For each document, in addition to the document number
|
||||
|
@ -239,7 +239,7 @@ abstract public class IndexReader {
|
|||
}
|
||||
|
||||
/** Returns an unpositioned {@link TermPositions} enumerator. */
|
||||
abstract public TermPositions termPositions() throws IOException;
|
||||
public abstract TermPositions termPositions() throws IOException;
|
||||
|
||||
/** Deletes the document numbered <code>docNum</code>. Once a document is
|
||||
deleted it will not appear in TermDocs or TermPostitions enumerations.
|
||||
|
@ -247,7 +247,7 @@ abstract public class IndexReader {
|
|||
method will result in an error. The presence of this document may still be
|
||||
reflected in the {@link #docFreq} statistic, though
|
||||
this will be corrected eventually as the index is further modified. */
|
||||
public synchronized final void delete(int docNum) throws IOException {
|
||||
public final synchronized void delete(int docNum) throws IOException {
|
||||
if (writeLock == null) {
|
||||
Lock writeLock = directory.makeLock("write.lock");
|
||||
if (!writeLock.obtain()) // obtain write lock
|
||||
|
|
|
@ -65,6 +65,7 @@ import org.apache.lucene.store.FSDirectory;
|
|||
import org.apache.lucene.store.Lock;
|
||||
import org.apache.lucene.store.InputStream;
|
||||
import org.apache.lucene.store.OutputStream;
|
||||
import org.apache.lucene.search.Similarity;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
|
||||
|
@ -93,6 +94,8 @@ public class IndexWriter {
|
|||
|
||||
private Lock writeLock;
|
||||
|
||||
private Similarity similarity;
|
||||
|
||||
/** Constructs an IndexWriter for the index in <code>path</code>. Text will
|
||||
be analyzed with <code>a</code>. If <code>create</code> is true, then a
|
||||
new, empty index will be created in <code>path</code>, replacing the index
|
||||
|
@ -404,4 +407,13 @@ public class IndexWriter {
|
|||
}
|
||||
directory.renameFile("deleteable.new", "deletable");
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the <code>Similarity</code> implementation to use.
|
||||
*
|
||||
* @param sim an instance of a class that implements <code>Similarity</code
|
||||
*/
|
||||
public void setSimilarity(Similarity sim) {
|
||||
similarity = sim;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ import org.apache.lucene.util.PriorityQueue;
|
|||
public class MultipleTermPositions
|
||||
implements TermPositions
|
||||
{
|
||||
private final static class TermPositionsQueue
|
||||
private static final class TermPositionsQueue
|
||||
extends PriorityQueue
|
||||
{
|
||||
TermPositionsQueue(List termPositions)
|
||||
|
@ -100,7 +100,7 @@ public class MultipleTermPositions
|
|||
}
|
||||
}
|
||||
|
||||
private final static class IntQueue
|
||||
private static final class IntQueue
|
||||
{
|
||||
private int _arraySize = 16;
|
||||
|
||||
|
|
|
@ -143,7 +143,7 @@ final class SegmentReader extends IndexReader {
|
|||
directory.close();
|
||||
}
|
||||
|
||||
final static boolean hasDeletions(SegmentInfo si) throws IOException {
|
||||
static final boolean hasDeletions(SegmentInfo si) throws IOException {
|
||||
return si.dir.fileExists(si.name + ".del");
|
||||
}
|
||||
|
||||
|
|
|
@ -84,7 +84,7 @@ final class SegmentsReader extends IndexReader
|
|||
starts[readers.length] = maxDoc;
|
||||
}
|
||||
|
||||
public synchronized final int numDocs() {
|
||||
public final synchronized int numDocs() {
|
||||
if (numDocs == -1) { // check cache
|
||||
int n = 0; // cache miss--recompute
|
||||
for (int i = 0; i < readers.length; i++)
|
||||
|
@ -108,7 +108,7 @@ final class SegmentsReader extends IndexReader
|
|||
return readers[i].isDeleted(n - starts[i]); // dispatch to segment reader
|
||||
}
|
||||
|
||||
synchronized final void doDelete(int n) throws IOException {
|
||||
final synchronized void doDelete(int n) throws IOException {
|
||||
numDocs = -1; // invalidate cache
|
||||
int i = readerIndex(n); // find segment num
|
||||
readers[i].doDelete(n - starts[i]); // dispatch to segment reader
|
||||
|
|
|
@ -63,14 +63,14 @@ import java.io.IOException;
|
|||
|
||||
public abstract class TermEnum {
|
||||
/** Increments the enumeration to the next element. True if one exists.*/
|
||||
abstract public boolean next() throws IOException;
|
||||
public abstract boolean next() throws IOException;
|
||||
|
||||
/** Returns the current Term in the enumeration.*/
|
||||
abstract public Term term();
|
||||
public abstract Term term();
|
||||
|
||||
/** Returns the docFreq of the current Term in the enumeration.*/
|
||||
abstract public int docFreq();
|
||||
public abstract int docFreq();
|
||||
|
||||
/** Closes the enumeration to further activity, freeing resources. */
|
||||
abstract public void close() throws IOException;
|
||||
public abstract void close() throws IOException;
|
||||
}
|
||||
|
|
|
@ -67,7 +67,7 @@ final class TermInfosWriter {
|
|||
private Term lastTerm = new Term("", "");
|
||||
private TermInfo lastTi = new TermInfo();
|
||||
private int size = 0;
|
||||
|
||||
|
||||
static final int INDEX_INTERVAL = 128;
|
||||
private long lastIndexPointer = 0;
|
||||
private boolean isIndex = false;
|
||||
|
@ -75,7 +75,7 @@ final class TermInfosWriter {
|
|||
private TermInfosWriter other = null;
|
||||
|
||||
TermInfosWriter(Directory directory, String segment, FieldInfos fis)
|
||||
throws IOException, SecurityException {
|
||||
throws IOException {
|
||||
initialize(directory, segment, fis, false);
|
||||
other = new TermInfosWriter(directory, segment, fis, true);
|
||||
other.other = this;
|
||||
|
@ -98,7 +98,7 @@ final class TermInfosWriter {
|
|||
Term must be lexicographically greater than all previous Terms added.
|
||||
TermInfo pointers must be positive and greater than all previous.*/
|
||||
final void add(Term term, TermInfo ti)
|
||||
throws IOException, SecurityException {
|
||||
throws IOException {
|
||||
if (!isIndex && term.compareTo(lastTerm) <= 0)
|
||||
throw new IOException("term out of order");
|
||||
if (ti.freqPointer < lastTi.freqPointer)
|
||||
|
@ -127,7 +127,7 @@ final class TermInfosWriter {
|
|||
throws IOException {
|
||||
int start = stringDifference(lastTerm.text, term.text);
|
||||
int length = term.text.length() - start;
|
||||
|
||||
|
||||
output.writeVInt(start); // write shared prefix length
|
||||
output.writeVInt(length); // write delta length
|
||||
output.writeChars(term.text, start, length); // write delta chars
|
||||
|
@ -148,11 +148,11 @@ final class TermInfosWriter {
|
|||
}
|
||||
|
||||
/** Called to complete TermInfos creation. */
|
||||
final void close() throws IOException, SecurityException {
|
||||
final void close() throws IOException {
|
||||
output.seek(0); // write size at start
|
||||
output.writeInt(size);
|
||||
output.close();
|
||||
|
||||
|
||||
if (!isIndex)
|
||||
other.close();
|
||||
}
|
||||
|
|
|
@ -66,7 +66,7 @@ public final class FastCharStream implements CharStream {
|
|||
|
||||
int bufferLength = 0; // end of valid chars
|
||||
int bufferPosition = 0; // next char to read
|
||||
|
||||
|
||||
int tokenStart = 0; // offset in buffer
|
||||
int bufferStart = 0; // position in file of buffer
|
||||
|
||||
|
@ -88,7 +88,7 @@ public final class FastCharStream implements CharStream {
|
|||
|
||||
if (tokenStart == 0) { // token won't fit in buffer
|
||||
if (buffer == null) { // first time: alloc buffer
|
||||
buffer = new char[2048];
|
||||
buffer = new char[2048];
|
||||
} else if (bufferLength == buffer.length) { // grow buffer
|
||||
char[] newBuffer = new char[buffer.length*2];
|
||||
System.arraycopy(buffer, 0, newBuffer, 0, bufferLength);
|
||||
|
|
|
@ -328,6 +328,15 @@ PARSER_END(QueryParser)
|
|||
<<_WHITESPACE>>
|
||||
}
|
||||
|
||||
// OG: to support prefix queries:
|
||||
// http://nagoya.apache.org/bugzilla/show_bug.cgi?id=12137
|
||||
// Change from:
|
||||
// | <WILDTERM: <_TERM_START_CHAR>
|
||||
// (<_TERM_CHAR> | ( [ "*", "?" ] ))* >
|
||||
// To:
|
||||
//
|
||||
// | <WILDTERM: (<_TERM_CHAR> | ( [ "*", "?" ] ))* >
|
||||
|
||||
<DEFAULT> TOKEN : {
|
||||
<AND: ("AND" | "&&") >
|
||||
| <OR: ("OR" | "||") >
|
||||
|
|
|
@ -60,8 +60,8 @@ import org.apache.lucene.index.IndexReader;
|
|||
|
||||
/** Abstract base class providing a mechanism to restrict searches to a subset
|
||||
of an index. */
|
||||
abstract public class Filter implements java.io.Serializable {
|
||||
public abstract class Filter implements java.io.Serializable {
|
||||
/** Returns a BitSet with true for documents which should be permitted in
|
||||
search results, and false for those that should not. */
|
||||
abstract public BitSet bits(IndexReader reader) throws IOException;
|
||||
public abstract BitSet bits(IndexReader reader) throws IOException;
|
||||
}
|
||||
|
|
|
@ -82,7 +82,7 @@ public abstract class FilteredTermEnum extends TermEnum {
|
|||
this.actualEnum = actualEnum;
|
||||
// Find the first term that matches
|
||||
Term term = actualEnum.term();
|
||||
if (termCompare(term))
|
||||
if (term != null && termCompare(term))
|
||||
currentTerm = term;
|
||||
else next();
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ import org.apache.lucene.index.Term;
|
|||
import java.io.IOException;
|
||||
|
||||
/** Implements the fuzzy search query */
|
||||
final public class FuzzyQuery extends MultiTermQuery {
|
||||
public final class FuzzyQuery extends MultiTermQuery {
|
||||
private Term fuzzyTerm;
|
||||
|
||||
public FuzzyQuery(Term term) {
|
||||
|
|
|
@ -63,7 +63,7 @@ import org.apache.lucene.index.TermEnum;
|
|||
|
||||
<p>Term enumerations are always ordered by Term.compareTo(). Each term in
|
||||
the enumeration is greater than all that precede it. */
|
||||
final public class FuzzyTermEnum extends FilteredTermEnum {
|
||||
public final class FuzzyTermEnum extends FilteredTermEnum {
|
||||
double distance;
|
||||
boolean fieldMatch = false;
|
||||
boolean endEnum = false;
|
||||
|
@ -86,7 +86,7 @@ final public class FuzzyTermEnum extends FilteredTermEnum {
|
|||
The termCompare method in FuzzyTermEnum uses Levenshtein distance to
|
||||
calculate the distance between the given term and the comparing term.
|
||||
*/
|
||||
final protected boolean termCompare(Term term) {
|
||||
protected final boolean termCompare(Term term) {
|
||||
if (field == term.field()) {
|
||||
String target = term.text();
|
||||
int targetlen = target.length();
|
||||
|
@ -98,11 +98,11 @@ final public class FuzzyTermEnum extends FilteredTermEnum {
|
|||
return false;
|
||||
}
|
||||
|
||||
final protected float difference() {
|
||||
protected final float difference() {
|
||||
return (float)((distance - FUZZY_THRESHOLD) * SCALE_FACTOR);
|
||||
}
|
||||
|
||||
final public boolean endEnum() {
|
||||
public final boolean endEnum() {
|
||||
return endEnum;
|
||||
}
|
||||
|
||||
|
@ -116,7 +116,7 @@ final public class FuzzyTermEnum extends FilteredTermEnum {
|
|||
/**
|
||||
Finds and returns the smallest of three integers
|
||||
*/
|
||||
private final static int min(int a, int b, int c) {
|
||||
private static final int min(int a, int b, int c) {
|
||||
int t = (a < b) ? a : b;
|
||||
return (t < c) ? t : c;
|
||||
}
|
||||
|
|
|
@ -109,7 +109,7 @@ public class MultiTermQuery extends Query {
|
|||
return getQuery().scorer(reader);
|
||||
}
|
||||
|
||||
final private BooleanQuery getQuery() throws IOException {
|
||||
private final BooleanQuery getQuery() throws IOException {
|
||||
if (query == null) {
|
||||
BooleanQuery q = new BooleanQuery();
|
||||
try {
|
||||
|
|
|
@ -100,7 +100,7 @@ abstract class PhraseScorer extends Scorer {
|
|||
}
|
||||
}
|
||||
|
||||
abstract protected float phraseFreq() throws IOException;
|
||||
protected abstract float phraseFreq() throws IOException;
|
||||
|
||||
protected final void pqToList() {
|
||||
last = first = null;
|
||||
|
|
|
@ -76,7 +76,7 @@ import org.apache.lucene.index.IndexReader;
|
|||
<li>{@link org.apache.lucene.queryParser.QueryParser QueryParser}
|
||||
</ul>
|
||||
*/
|
||||
abstract public class Query implements java.io.Serializable
|
||||
public abstract class Query implements java.io.Serializable
|
||||
{
|
||||
// query boost factor
|
||||
protected float boost = 1.0f;
|
||||
|
@ -122,5 +122,5 @@ abstract public class Query implements java.io.Serializable
|
|||
* (although, if the query was created by the parser, the printed
|
||||
* representation may not be exactly what was parsed).
|
||||
*/
|
||||
abstract public String toString(String field);
|
||||
public abstract String toString(String field);
|
||||
}
|
||||
|
|
|
@ -64,6 +64,8 @@ import org.apache.lucene.index.IndexReader;
|
|||
*/
|
||||
public abstract class Searcher implements Searchable {
|
||||
|
||||
protected Similarity similarity;
|
||||
|
||||
/** Returns the documents matching <code>query</code>. */
|
||||
public final Hits search(Query query) throws IOException {
|
||||
return search(query, (Filter)null);
|
||||
|
@ -88,4 +90,13 @@ public abstract class Searcher implements Searchable {
|
|||
throws IOException {
|
||||
search(query, (Filter)null, results);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the <code>Similarity</code> implementation to use.
|
||||
*
|
||||
* @param sim an instance of a class that implements <code>Similarity</code
|
||||
*/
|
||||
public void setSimilarity(Similarity sim) {
|
||||
similarity = sim;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -61,16 +61,28 @@ import org.apache.lucene.document.Field;
|
|||
/** Internal class used for scoring.
|
||||
* <p>Public only so that the indexing code can compute and store the
|
||||
* normalization byte for each document. */
|
||||
public final class Similarity {
|
||||
private Similarity() {} // no public constructor
|
||||
public abstract class Similarity {
|
||||
|
||||
static final float[] NORM_TABLE = new float[256];
|
||||
private static final float[] NORM_TABLE = new float[256];
|
||||
|
||||
static {
|
||||
for (int i = 0; i < 256; i++)
|
||||
NORM_TABLE[i] = byteToFloat((byte)i);
|
||||
}
|
||||
|
||||
private static Similarity similarity;
|
||||
|
||||
private Similarity() {} // no public constructor
|
||||
|
||||
/**
|
||||
* Sets the <code>Similarity</code> implementation to use.
|
||||
*
|
||||
* @param sim an instance of a class that implements <code>Similarity</code
|
||||
*/
|
||||
public static void setDefaultSimilarity(Similarity sim) {
|
||||
similarity = sim;
|
||||
}
|
||||
|
||||
/** Computes the normalization value for a document given the total number of
|
||||
* terms contained in a field. These values are stored in an index and used
|
||||
* by the search code.
|
||||
|
|
|
@ -94,7 +94,7 @@ public class WildcardTermEnum extends FilteredTermEnum {
|
|||
setEnum(reader.terms(new Term(searchTerm.field(), pre)));
|
||||
}
|
||||
|
||||
final protected boolean termCompare(Term term) {
|
||||
protected final boolean termCompare(Term term) {
|
||||
if (field == term.field()) {
|
||||
String searchText = term.text();
|
||||
if (searchText.startsWith(pre)) {
|
||||
|
@ -105,11 +105,11 @@ public class WildcardTermEnum extends FilteredTermEnum {
|
|||
return false;
|
||||
}
|
||||
|
||||
final public float difference() {
|
||||
public final float difference() {
|
||||
return 1.0f;
|
||||
}
|
||||
|
||||
final public boolean endEnum() {
|
||||
public final boolean endEnum() {
|
||||
return endEnum;
|
||||
}
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ import java.io.IOException;
|
|||
* deleted. Random access is permitted both when reading and writing.
|
||||
*
|
||||
* <p> Java's i/o APIs not used directly, but rather all i/o is
|
||||
* through this API. This permits things such as: <ul>
|
||||
* through this API. This permits things such as: <ul>
|
||||
* <li> implementation of RAM-based indices;
|
||||
* <li> implementation indices stored in a database, via JDBC;
|
||||
* <li> implementation of an index as a single file;
|
||||
|
@ -69,52 +69,52 @@ import java.io.IOException;
|
|||
*
|
||||
* @author Doug Cutting
|
||||
*/
|
||||
abstract public class Directory {
|
||||
public abstract class Directory {
|
||||
/** Returns an array of strings, one for each file in the directory. */
|
||||
abstract public String[] list()
|
||||
throws IOException, SecurityException;
|
||||
|
||||
public abstract String[] list()
|
||||
throws IOException;
|
||||
|
||||
/** Returns true iff a file with the given name exists. */
|
||||
abstract public boolean fileExists(String name)
|
||||
throws IOException, SecurityException;
|
||||
public abstract boolean fileExists(String name)
|
||||
throws IOException;
|
||||
|
||||
/** Returns the time the named file was last modified. */
|
||||
abstract public long fileModified(String name)
|
||||
throws IOException, SecurityException;
|
||||
public abstract long fileModified(String name)
|
||||
throws IOException;
|
||||
|
||||
/** Set the modified time of an existing file to now. */
|
||||
abstract public void touchFile(String name)
|
||||
throws IOException, SecurityException;
|
||||
public abstract void touchFile(String name)
|
||||
throws IOException;
|
||||
|
||||
/** Removes an existing file in the directory. */
|
||||
abstract public void deleteFile(String name)
|
||||
throws IOException, SecurityException;
|
||||
public abstract void deleteFile(String name)
|
||||
throws IOException;
|
||||
|
||||
/** Renames an existing file in the directory.
|
||||
If a file already exists with the new name, then it is replaced.
|
||||
This replacement should be atomic. */
|
||||
abstract public void renameFile(String from, String to)
|
||||
throws IOException, SecurityException;
|
||||
public abstract void renameFile(String from, String to)
|
||||
throws IOException;
|
||||
|
||||
/** Returns the length of a file in the directory. */
|
||||
abstract public long fileLength(String name)
|
||||
throws IOException, SecurityException;
|
||||
public abstract long fileLength(String name)
|
||||
throws IOException;
|
||||
|
||||
/** Creates a new, empty file in the directory with the given name.
|
||||
Returns a stream writing this file. */
|
||||
abstract public OutputStream createFile(String name)
|
||||
throws IOException, SecurityException;
|
||||
public abstract OutputStream createFile(String name)
|
||||
throws IOException;
|
||||
|
||||
/** Returns a stream reading an existing file. */
|
||||
abstract public InputStream openFile(String name)
|
||||
throws IOException, SecurityException;
|
||||
public abstract InputStream openFile(String name)
|
||||
throws IOException;
|
||||
|
||||
/** Construct a {@link Lock}.
|
||||
* @param name the name of the lock file
|
||||
*/
|
||||
abstract public Lock makeLock(String name);
|
||||
public abstract Lock makeLock(String name);
|
||||
|
||||
/** Closes the store. */
|
||||
abstract public void close()
|
||||
throws IOException, SecurityException;
|
||||
public abstract void close()
|
||||
throws IOException;
|
||||
}
|
||||
|
|
|
@ -71,7 +71,7 @@ import org.apache.lucene.util.Constants;
|
|||
* @author Doug Cutting
|
||||
*/
|
||||
|
||||
final public class FSDirectory extends Directory {
|
||||
public final class FSDirectory extends Directory {
|
||||
/** This cache of directories ensures that there is a unique Directory
|
||||
* instance per path, so that synchronization on the Directory can be used to
|
||||
* synchronize access between readers and writers.
|
||||
|
@ -176,7 +176,7 @@ final public class FSDirectory extends Directory {
|
|||
}
|
||||
|
||||
/** Set the modified time of an existing file to now. */
|
||||
public void touchFile(String name) throws IOException, SecurityException {
|
||||
public void touchFile(String name) throws IOException {
|
||||
File file = new File(directory, name);
|
||||
file.setLastModified(System.currentTimeMillis());
|
||||
}
|
||||
|
|
|
@ -61,8 +61,8 @@ import java.io.IOException;
|
|||
* @see Directory
|
||||
* @see OutputStream
|
||||
*/
|
||||
abstract public class InputStream implements Cloneable {
|
||||
final static int BUFFER_SIZE = OutputStream.BUFFER_SIZE;
|
||||
public abstract class InputStream implements Cloneable {
|
||||
static final int BUFFER_SIZE = OutputStream.BUFFER_SIZE;
|
||||
|
||||
private byte[] buffer;
|
||||
private char[] chars;
|
||||
|
@ -174,7 +174,7 @@ abstract public class InputStream implements Cloneable {
|
|||
else if ((b & 0xE0) != 0xE0) {
|
||||
buffer[i] = (char)(((b & 0x1F) << 6)
|
||||
| (readByte() & 0x3F));
|
||||
} else
|
||||
} else
|
||||
buffer[i] = (char)(((b & 0x0F) << 12)
|
||||
| ((readByte() & 0x3F) << 6)
|
||||
| (readByte() & 0x3F));
|
||||
|
@ -205,11 +205,11 @@ abstract public class InputStream implements Cloneable {
|
|||
* @param offset the offset in the array to start storing bytes
|
||||
* @param length the number of bytes to read
|
||||
*/
|
||||
abstract protected void readInternal(byte[] b, int offset, int length)
|
||||
protected abstract void readInternal(byte[] b, int offset, int length)
|
||||
throws IOException;
|
||||
|
||||
/** Closes the stream to futher operations. */
|
||||
abstract public void close() throws IOException;
|
||||
public abstract void close() throws IOException;
|
||||
|
||||
/** Returns the current position in this file, where the next read will
|
||||
* occur.
|
||||
|
@ -237,7 +237,7 @@ abstract public class InputStream implements Cloneable {
|
|||
* next {@link #readInternal(byte[],int,int)} will occur.
|
||||
* @see #readInternal(byte[],int,int)
|
||||
*/
|
||||
abstract protected void seekInternal(long pos) throws IOException;
|
||||
protected abstract void seekInternal(long pos) throws IOException;
|
||||
|
||||
/** The number of bytes in the file. */
|
||||
public final long length() {
|
||||
|
|
|
@ -60,9 +60,9 @@ import java.io.IOException;
|
|||
* stream. Used for all Lucene index output operations.
|
||||
* @see Directory
|
||||
* @see InputStream
|
||||
*/
|
||||
abstract public class OutputStream {
|
||||
final static int BUFFER_SIZE = 1024;
|
||||
*/
|
||||
public abstract class OutputStream {
|
||||
static final int BUFFER_SIZE = 1024;
|
||||
|
||||
private final byte[] buffer = new byte[BUFFER_SIZE];
|
||||
private long bufferStart = 0; // position in file of buffer
|
||||
|
@ -176,7 +176,7 @@ abstract public class OutputStream {
|
|||
* @param b the bytes to write
|
||||
* @param len the number of bytes to write
|
||||
*/
|
||||
abstract protected void flushBuffer(byte[] b, int len) throws IOException;
|
||||
protected abstract void flushBuffer(byte[] b, int len) throws IOException;
|
||||
|
||||
/** Closes this stream to further operations. */
|
||||
public void close() throws IOException {
|
||||
|
@ -200,7 +200,7 @@ abstract public class OutputStream {
|
|||
}
|
||||
|
||||
/** The number of bytes in the file. */
|
||||
abstract public long length() throws IOException;
|
||||
public abstract long length() throws IOException;
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -64,13 +64,41 @@ import org.apache.lucene.store.InputStream;
|
|||
import org.apache.lucene.store.OutputStream;
|
||||
|
||||
/** A memory-resident {@link Directory} implementation. */
|
||||
final public class RAMDirectory extends Directory {
|
||||
public final class RAMDirectory extends Directory {
|
||||
Hashtable files = new Hashtable();
|
||||
|
||||
/** Constructs an empty {@link Directory}. */
|
||||
public RAMDirectory() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new <code>RAMDirectory</code> instance from a different
|
||||
* <code>Directory</code> implementation. This can be used to load
|
||||
* a disk-based index into memory.
|
||||
* <P>
|
||||
* This should be used only with indices that can fit into memory.
|
||||
*
|
||||
* @param d a <code>Directory</code> value
|
||||
* @exception IOException if an error occurs
|
||||
*/
|
||||
public RAMDirectory(Directory d) throws IOException {
|
||||
final String[] ar = d.list();
|
||||
for (int i = 0; i < ar.length; i++) {
|
||||
// make place on ram disk
|
||||
OutputStream os = createFile(ar[i]);
|
||||
// read current file
|
||||
InputStream is = d.openFile(ar[i]);
|
||||
// and copy to ram disk
|
||||
int len = (int) is.length();
|
||||
byte[] buf = new byte[len];
|
||||
is.readBytes(buf, 0, len);
|
||||
os.writeBytes(buf, len);
|
||||
// graceful cleanup
|
||||
is.close();
|
||||
os.close();
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns an array of strings, one for each file in the directory. */
|
||||
public final String[] list() {
|
||||
String[] result = new String[files.size()];
|
||||
|
@ -80,7 +108,7 @@ final public class RAMDirectory extends Directory {
|
|||
result[i++] = (String)names.nextElement();
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
/** Returns true iff the named file exists in this directory. */
|
||||
public final boolean fileExists(String name) {
|
||||
RAMFile file = (RAMFile)files.get(name);
|
||||
|
@ -94,7 +122,7 @@ final public class RAMDirectory extends Directory {
|
|||
}
|
||||
|
||||
/** Set the modified time of an existing file to now. */
|
||||
public void touchFile(String name) throws IOException, SecurityException {
|
||||
public void touchFile(String name) throws IOException {
|
||||
RAMFile file = (RAMFile)files.get(name);
|
||||
file.lastModified = System.currentTimeMillis();
|
||||
}
|
||||
|
|
|
@ -63,7 +63,7 @@ public abstract class PriorityQueue {
|
|||
|
||||
/** Determines the ordering of objects in this priority queue. Subclasses
|
||||
must define this one method. */
|
||||
abstract protected boolean lessThan(Object a, Object b);
|
||||
protected abstract boolean lessThan(Object a, Object b);
|
||||
|
||||
/** Subclass constructors must call this. */
|
||||
protected final void initialize(int maxSize) {
|
||||
|
|
Loading…
Reference in New Issue