LUCENE-1257: More generified APIs and implementations. Thanks Kay Kay!

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@826290 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Uwe Schindler 2009-10-17 19:15:07 +00:00
parent 975ce67fb5
commit b22932be70
15 changed files with 78 additions and 93 deletions

View File

@ -28,7 +28,7 @@ import java.io.PrintStream;
import java.io.IOException; import java.io.IOException;
import java.io.File; import java.io.File;
import java.util.Collection; import java.util.Collection;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Map; import java.util.Map;
@ -113,7 +113,7 @@ public class CheckIndex {
public boolean partial; public boolean partial;
/** Holds the userData of the last commit in the index */ /** Holds the userData of the last commit in the index */
public Map userData; public Map<String, String> userData;
/** Holds the status of each segment in the index. /** Holds the status of each segment in the index.
* See {@link #segmentInfos}. * See {@link #segmentInfos}.
@ -172,10 +172,10 @@ public class CheckIndex {
* @see AbstractField#setOmitTermFreqAndPositions */ * @see AbstractField#setOmitTermFreqAndPositions */
public boolean hasProx; public boolean hasProx;
/** Map<String, String> that includes certain /** Map that includes certain
* debugging details that IndexWriter records into * debugging details that IndexWriter records into
* each segment it creates */ * each segment it creates */
public Map diagnostics; public Map<String,String> diagnostics;
/** Status for testing of field norms (null if field norms could not be tested). */ /** Status for testing of field norms (null if field norms could not be tested). */
public FieldNormStatus fieldNormStatus; public FieldNormStatus fieldNormStatus;
@ -309,7 +309,7 @@ public class CheckIndex {
* <p><b>WARNING</b>: make sure * <p><b>WARNING</b>: make sure
* you only call this when the index is not opened by any * you only call this when the index is not opened by any
* writer. */ * writer. */
public Status checkIndex(List onlySegments) throws IOException { public Status checkIndex(List<String> onlySegments) throws IOException {
NumberFormat nf = NumberFormat.getInstance(); NumberFormat nf = NumberFormat.getInstance();
SegmentInfos sis = new SegmentInfos(); SegmentInfos sis = new SegmentInfos();
Status result = new Status(); Status result = new Status();
@ -397,10 +397,9 @@ public class CheckIndex {
result.partial = true; result.partial = true;
if (infoStream != null) if (infoStream != null)
infoStream.print("\nChecking only these segments:"); infoStream.print("\nChecking only these segments:");
Iterator it = onlySegments.iterator(); for (String s : onlySegments) {
while (it.hasNext()) {
if (infoStream != null) if (infoStream != null)
infoStream.print(" " + it.next()); infoStream.print(" " + s);
} }
result.segmentsChecked.addAll(onlySegments); result.segmentsChecked.addAll(onlySegments);
msg(":"); msg(":");
@ -439,7 +438,7 @@ public class CheckIndex {
segInfoStat.numFiles = info.files().size(); segInfoStat.numFiles = info.files().size();
msg(" size (MB)=" + nf.format(info.sizeInBytes()/(1024.*1024.))); msg(" size (MB)=" + nf.format(info.sizeInBytes()/(1024.*1024.)));
segInfoStat.sizeMB = info.sizeInBytes()/(1024.*1024.); segInfoStat.sizeMB = info.sizeInBytes()/(1024.*1024.);
Map diagnostics = info.getDiagnostics(); Map<String,String> diagnostics = info.getDiagnostics();
segInfoStat.diagnostics = diagnostics; segInfoStat.diagnostics = diagnostics;
if (diagnostics.size() > 0) { if (diagnostics.size() > 0) {
msg(" diagnostics = " + diagnostics); msg(" diagnostics = " + diagnostics);
@ -497,7 +496,7 @@ public class CheckIndex {
if (infoStream != null) { if (infoStream != null) {
infoStream.print(" test: fields.............."); infoStream.print(" test: fields..............");
} }
Collection fieldNames = reader.getFieldNames(IndexReader.FieldOption.ALL); Collection<String> fieldNames = reader.getFieldNames(IndexReader.FieldOption.ALL);
msg("OK [" + fieldNames.size() + " fields]"); msg("OK [" + fieldNames.size() + " fields]");
segInfoStat.numFields = fieldNames.size(); segInfoStat.numFields = fieldNames.size();
@ -559,7 +558,7 @@ public class CheckIndex {
/** /**
* Test field norms. * Test field norms.
*/ */
private Status.FieldNormStatus testFieldNorms(Collection fieldNames, SegmentReader reader) { private Status.FieldNormStatus testFieldNorms(Collection<String> fieldNames, SegmentReader reader) {
final Status.FieldNormStatus status = new Status.FieldNormStatus(); final Status.FieldNormStatus status = new Status.FieldNormStatus();
try { try {
@ -567,10 +566,8 @@ public class CheckIndex {
if (infoStream != null) { if (infoStream != null) {
infoStream.print(" test: field norms........."); infoStream.print(" test: field norms.........");
} }
Iterator it = fieldNames.iterator();
final byte[] b = new byte[reader.maxDoc()]; final byte[] b = new byte[reader.maxDoc()];
while (it.hasNext()) { for (final String fieldName : fieldNames) {
final String fieldName = (String) it.next();
reader.norms(fieldName, b, 0); reader.norms(fieldName, b, 0);
++status.totFields; ++status.totFields;
} }
@ -807,7 +804,7 @@ public class CheckIndex {
public static void main(String[] args) throws IOException, InterruptedException { public static void main(String[] args) throws IOException, InterruptedException {
boolean doFix = false; boolean doFix = false;
List onlySegments = new ArrayList(); List<String> onlySegments = new ArrayList<String>();
String indexPath = null; String indexPath = null;
int i = 0; int i = 0;
while(i < args.length) { while(i < args.length) {

View File

@ -47,7 +47,7 @@ class CompoundFileReader extends Directory {
private String fileName; private String fileName;
private IndexInput stream; private IndexInput stream;
private HashMap entries = new HashMap(); private HashMap<String,FileEntry> entries = new HashMap<String,FileEntry>();
public CompoundFileReader(Directory dir, String name) throws IOException { public CompoundFileReader(Directory dir, String name) throws IOException {

View File

@ -22,7 +22,7 @@ import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexInput;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator;
import java.io.IOException; import java.io.IOException;
@ -62,8 +62,8 @@ final class CompoundFileWriter {
private Directory directory; private Directory directory;
private String fileName; private String fileName;
private HashSet ids; private HashSet<String> ids;
private LinkedList entries; private LinkedList<FileEntry> entries;
private boolean merged = false; private boolean merged = false;
private SegmentMerger.CheckAbort checkAbort; private SegmentMerger.CheckAbort checkAbort;
@ -83,8 +83,8 @@ final class CompoundFileWriter {
this.checkAbort = checkAbort; this.checkAbort = checkAbort;
directory = dir; directory = dir;
fileName = name; fileName = name;
ids = new HashSet(); ids = new HashSet<String>();
entries = new LinkedList(); entries = new LinkedList<FileEntry>();
} }
/** Returns the directory of the compound file. */ /** Returns the directory of the compound file. */
@ -152,10 +152,8 @@ final class CompoundFileWriter {
// Write the directory with all offsets at 0. // Write the directory with all offsets at 0.
// Remember the positions of directory entries so that we can // Remember the positions of directory entries so that we can
// adjust the offsets later // adjust the offsets later
Iterator it = entries.iterator();
long totalSize = 0; long totalSize = 0;
while(it.hasNext()) { for (FileEntry fe : entries) {
FileEntry fe = (FileEntry) it.next();
fe.directoryOffset = os.getFilePointer(); fe.directoryOffset = os.getFilePointer();
os.writeLong(0); // for now os.writeLong(0); // for now
os.writeString(fe.file); os.writeString(fe.file);
@ -174,17 +172,13 @@ final class CompoundFileWriter {
// Open the files and copy their data into the stream. // Open the files and copy their data into the stream.
// Remember the locations of each file's data section. // Remember the locations of each file's data section.
byte buffer[] = new byte[16384]; byte buffer[] = new byte[16384];
it = entries.iterator(); for (FileEntry fe : entries) {
while(it.hasNext()) {
FileEntry fe = (FileEntry) it.next();
fe.dataOffset = os.getFilePointer(); fe.dataOffset = os.getFilePointer();
copyFile(fe, os, buffer); copyFile(fe, os, buffer);
} }
// Write the data offsets into the directory of the compound stream // Write the data offsets into the directory of the compound stream
it = entries.iterator(); for (FileEntry fe : entries) {
while(it.hasNext()) {
FileEntry fe = (FileEntry) it.next();
os.seek(fe.directoryOffset); os.seek(fe.directoryOffset);
os.writeLong(fe.dataOffset); os.writeLong(fe.dataOffset);
} }

View File

@ -36,7 +36,7 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
private int mergeThreadPriority = -1; private int mergeThreadPriority = -1;
protected List mergeThreads = new ArrayList(); protected List<MergeThread> mergeThreads = new ArrayList<MergeThread>();
// Max number of threads allowed to be merging at once // Max number of threads allowed to be merging at once
private int maxThreadCount = 3; private int maxThreadCount = 3;
@ -363,7 +363,7 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
// Make sure all outstanding threads are done so we see // Make sure all outstanding threads are done so we see
// any exceptions they may produce: // any exceptions they may produce:
for(int i=0;i<count;i++) for(int i=0;i<count;i++)
((ConcurrentMergeScheduler) allInstances.get(i)).sync(); allInstances.get(i).sync();
boolean v = anyExceptions; boolean v = anyExceptions;
anyExceptions = false; anyExceptions = false;
return v; return v;
@ -382,7 +382,7 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
final int size = allInstances.size(); final int size = allInstances.size();
int upto = 0; int upto = 0;
for(int i=0;i<size;i++) { for(int i=0;i<size;i++) {
final ConcurrentMergeScheduler other = (ConcurrentMergeScheduler) allInstances.get(i); final ConcurrentMergeScheduler other = allInstances.get(i);
if (!(other.closed && 0 == other.mergeThreadCount())) if (!(other.closed && 0 == other.mergeThreadCount()))
// Keep this one for now: it still has threads or // Keep this one for now: it still has threads or
// may spawn new threads // may spawn new threads
@ -406,8 +406,8 @@ public class ConcurrentMergeScheduler extends MergeScheduler {
} }
/** Used for testing */ /** Used for testing */
private static List allInstances; private static List<ConcurrentMergeScheduler> allInstances;
public static void setTestMode() { public static void setTestMode() {
allInstances = new ArrayList(); allInstances = new ArrayList<ConcurrentMergeScheduler>();
} }
} }

View File

@ -25,7 +25,7 @@ import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
@ -47,7 +47,7 @@ class DirectoryReader extends IndexReader implements Cloneable {
IndexWriter writer; IndexWriter writer;
private IndexDeletionPolicy deletionPolicy; private IndexDeletionPolicy deletionPolicy;
private final HashSet synced = new HashSet(); private final HashSet<String> synced = new HashSet<String>();
private Lock writeLock; private Lock writeLock;
private SegmentInfos segmentInfos; private SegmentInfos segmentInfos;
private SegmentInfos segmentInfosStart; private SegmentInfos segmentInfosStart;
@ -59,7 +59,7 @@ class DirectoryReader extends IndexReader implements Cloneable {
private SegmentReader[] subReaders; private SegmentReader[] subReaders;
private int[] starts; // 1st docno for each segment private int[] starts; // 1st docno for each segment
private Map normsCache = new HashMap(); private Map<String,byte[]> normsCache = new HashMap<String,byte[]>();
private int maxDoc = 0; private int maxDoc = 0;
private int numDocs = -1; private int numDocs = -1;
private boolean hasDeletions = false; private boolean hasDeletions = false;
@ -177,7 +177,7 @@ class DirectoryReader extends IndexReader implements Cloneable {
/** This constructor is only used for {@link #reopen()} */ /** This constructor is only used for {@link #reopen()} */
DirectoryReader(Directory directory, SegmentInfos infos, SegmentReader[] oldReaders, int[] oldStarts, DirectoryReader(Directory directory, SegmentInfos infos, SegmentReader[] oldReaders, int[] oldStarts,
Map oldNormsCache, boolean readOnly, boolean doClone, int termInfosIndexDivisor) throws IOException { Map<String,byte[]> oldNormsCache, boolean readOnly, boolean doClone, int termInfosIndexDivisor) throws IOException {
this.directory = directory; this.directory = directory;
this.readOnly = readOnly; this.readOnly = readOnly;
this.segmentInfos = infos; this.segmentInfos = infos;
@ -190,7 +190,7 @@ class DirectoryReader extends IndexReader implements Cloneable {
// we put the old SegmentReaders in a map, that allows us // we put the old SegmentReaders in a map, that allows us
// to lookup a reader using its segment name // to lookup a reader using its segment name
Map segmentReaders = new HashMap(); Map<String,Integer> segmentReaders = new HashMap<String,Integer>();
if (oldReaders != null) { if (oldReaders != null) {
// create a Map SegmentName->SegmentReader // create a Map SegmentName->SegmentReader
@ -267,9 +267,7 @@ class DirectoryReader extends IndexReader implements Cloneable {
// try to copy unchanged norms from the old normsCache to the new one // try to copy unchanged norms from the old normsCache to the new one
if (oldNormsCache != null) { if (oldNormsCache != null) {
Iterator it = oldNormsCache.entrySet().iterator(); for (Map.Entry<String,byte[]> entry: oldNormsCache.entrySet()) {
while (it.hasNext()) {
Map.Entry entry = (Map.Entry) it.next();
String field = (String) entry.getKey(); String field = (String) entry.getKey();
if (!hasNorms(field)) { if (!hasNorms(field)) {
continue; continue;
@ -681,7 +679,7 @@ class DirectoryReader extends IndexReader implements Cloneable {
* *
* @throws IOException if there is a low-level IO error * @throws IOException if there is a low-level IO error
*/ */
protected void doCommit(Map commitUserData) throws IOException { protected void doCommit(Map<String,String> commitUserData) throws IOException {
if (hasChanges) { if (hasChanges) {
segmentInfos.setUserData(commitUserData); segmentInfos.setUserData(commitUserData);
// Default deleter (for backwards compatibility) is // Default deleter (for backwards compatibility) is
@ -700,9 +698,8 @@ class DirectoryReader extends IndexReader implements Cloneable {
subReaders[i].commit(); subReaders[i].commit();
// Sync all files we just wrote // Sync all files we just wrote
Iterator it = segmentInfos.files(directory, false).iterator(); final Collection<String> files = segmentInfos.files(directory, false);
while (it.hasNext()) { for (final String fileName : files) {
final String fileName = (String) it.next();
if (!synced.contains(fileName)) { if (!synced.contains(fileName)) {
assert directory.fileExists(fileName); assert directory.fileExists(fileName);
directory.sync(fileName); directory.sync(fileName);
@ -766,7 +763,7 @@ class DirectoryReader extends IndexReader implements Cloneable {
} }
} }
public Map getCommitUserData() { public Map<String,String> getCommitUserData() {
ensureOpen(); ensureOpen();
return segmentInfos.getUserData(); return segmentInfos.getUserData();
} }
@ -796,17 +793,16 @@ class DirectoryReader extends IndexReader implements Cloneable {
if (ioe != null) throw ioe; if (ioe != null) throw ioe;
} }
public Collection getFieldNames (IndexReader.FieldOption fieldNames) { public Collection<String> getFieldNames (IndexReader.FieldOption fieldNames) {
ensureOpen(); ensureOpen();
return getFieldNames(fieldNames, this.subReaders); return getFieldNames(fieldNames, this.subReaders);
} }
static Collection getFieldNames (IndexReader.FieldOption fieldNames, IndexReader[] subReaders) { static Collection<String> getFieldNames (IndexReader.FieldOption fieldNames, IndexReader[] subReaders) {
// maintain a unique set of field names // maintain a unique set of field names
Set fieldSet = new HashSet(); Set<String> fieldSet = new HashSet<String>();
for (int i = 0; i < subReaders.length; i++) { for (IndexReader reader : subReaders) {
IndexReader reader = subReaders[i]; Collection<String> names = reader.getFieldNames(fieldNames);
Collection names = reader.getFieldNames(fieldNames);
fieldSet.addAll(names); fieldSet.addAll(names);
} }
return fieldSet; return fieldSet;
@ -838,10 +834,10 @@ class DirectoryReader extends IndexReader implements Cloneable {
} }
/** @see org.apache.lucene.index.IndexReader#listCommits */ /** @see org.apache.lucene.index.IndexReader#listCommits */
public static Collection listCommits(Directory dir) throws IOException { public static Collection<IndexCommit> listCommits(Directory dir) throws IOException {
final String[] files = dir.listAll(); final String[] files = dir.listAll();
Collection commits = new ArrayList(); Collection<IndexCommit> commits = new ArrayList<IndexCommit>();
SegmentInfos latest = new SegmentInfos(); SegmentInfos latest = new SegmentInfos();
latest.read(dir); latest.read(dir);
@ -883,12 +879,12 @@ class DirectoryReader extends IndexReader implements Cloneable {
private static final class ReaderCommit extends IndexCommit { private static final class ReaderCommit extends IndexCommit {
private String segmentsFileName; private String segmentsFileName;
Collection files; Collection<String> files;
Directory dir; Directory dir;
long generation; long generation;
long version; long version;
final boolean isOptimized; final boolean isOptimized;
final Map userData; final Map<String,String> userData;
ReaderCommit(SegmentInfos infos, Directory dir) throws IOException { ReaderCommit(SegmentInfos infos, Directory dir) throws IOException {
segmentsFileName = infos.getCurrentSegmentFileName(); segmentsFileName = infos.getCurrentSegmentFileName();
@ -908,7 +904,7 @@ class DirectoryReader extends IndexReader implements Cloneable {
return segmentsFileName; return segmentsFileName;
} }
public Collection getFileNames() { public Collection<String> getFileNames() {
return files; return files;
} }
@ -928,7 +924,7 @@ class DirectoryReader extends IndexReader implements Cloneable {
return false; return false;
} }
public Map getUserData() { public Map<String,String> getUserData() {
return userData; return userData;
} }
} }

View File

@ -22,7 +22,7 @@ import java.util.Collection;
abstract class DocConsumer { abstract class DocConsumer {
abstract DocConsumerPerThread addThread(DocumentsWriterThreadState perThread) throws IOException; abstract DocConsumerPerThread addThread(DocumentsWriterThreadState perThread) throws IOException;
abstract void flush(final Collection threads, final SegmentWriteState state) throws IOException; abstract void flush(final Collection<DocFieldProcessorPerThread> threads, final SegmentWriteState state) throws IOException;
abstract void closeDocStore(final SegmentWriteState state) throws IOException; abstract void closeDocStore(final SegmentWriteState state) throws IOException;
abstract void abort(); abstract void abort();
abstract boolean freeRAM(); abstract boolean freeRAM();

View File

@ -21,7 +21,7 @@ import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.Map; import java.util.Map;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator;
/** /**
* This is a DocConsumer that gathers all fields under the * This is a DocConsumer that gathers all fields under the
@ -50,12 +50,10 @@ final class DocFieldProcessor extends DocConsumer {
fieldsWriter.closeDocStore(state); fieldsWriter.closeDocStore(state);
} }
public void flush(Collection threads, SegmentWriteState state) throws IOException { public void flush(Collection<DocFieldProcessorPerThread> threads, SegmentWriteState state) throws IOException {
Map childThreadsAndFields = new HashMap(); Map<DocFieldConsumerPerThread, Collection<DocFieldConsumerPerField>> childThreadsAndFields = new HashMap<DocFieldConsumerPerThread, Collection<DocFieldConsumerPerField>>();
Iterator it = threads.iterator(); for ( DocFieldProcessorPerThread perThread : threads) {
while(it.hasNext()) {
DocFieldProcessorPerThread perThread = (DocFieldProcessorPerThread) it.next();
childThreadsAndFields.put(perThread.consumer, perThread.fields()); childThreadsAndFields.put(perThread.consumer, perThread.fields());
perThread.trimFields(state); perThread.trimFields(state);
} }

View File

@ -76,8 +76,8 @@ final class DocFieldProcessorPerThread extends DocConsumerPerThread {
consumer.abort(); consumer.abort();
} }
public Collection fields() { public Collection<DocFieldConsumerPerField> fields() {
Collection fields = new HashSet(); Collection<DocFieldConsumerPerField> fields = new HashSet<DocFieldConsumerPerField>();
for(int i=0;i<fieldHash.length;i++) { for(int i=0;i<fieldHash.length;i++) {
DocFieldProcessorPerField field = fieldHash[i]; DocFieldProcessorPerField field = fieldHash[i];
while(field != null) { while(field != null) {
@ -163,7 +163,7 @@ final class DocFieldProcessorPerThread extends DocConsumerPerThread {
final int thisFieldGen = fieldGen++; final int thisFieldGen = fieldGen++;
final List docFields = doc.getFields(); final List<Fieldable> docFields = doc.getFields();
final int numDocFields = docFields.size(); final int numDocFields = docFields.size();
// Absorb any new fields first seen in this document. // Absorb any new fields first seen in this document.

View File

@ -211,12 +211,12 @@ public class FilterIndexReader extends IndexReader {
protected void doDelete(int n) throws CorruptIndexException, IOException { in.deleteDocument(n); } protected void doDelete(int n) throws CorruptIndexException, IOException { in.deleteDocument(n); }
protected void doCommit(Map commitUserData) throws IOException { in.commit(commitUserData); } protected void doCommit(Map<String,String> commitUserData) throws IOException { in.commit(commitUserData); }
protected void doClose() throws IOException { in.close(); } protected void doClose() throws IOException { in.close(); }
public Collection getFieldNames(IndexReader.FieldOption fieldNames) { public Collection<String> getFieldNames(IndexReader.FieldOption fieldNames) {
ensureOpen(); ensureOpen();
return in.getFieldNames(fieldNames); return in.getFieldNames(fieldNames);
} }

View File

@ -463,7 +463,7 @@ public abstract class IndexReader implements Cloneable {
* *
* @see #getCommitUserData() * @see #getCommitUserData()
*/ */
public static Map getCommitUserData(Directory directory) throws CorruptIndexException, IOException { public static Map<String,String> getCommitUserData(Directory directory) throws CorruptIndexException, IOException {
return SegmentInfos.readCurrentUserData(directory); return SegmentInfos.readCurrentUserData(directory);
} }
@ -503,7 +503,7 @@ public abstract class IndexReader implements Cloneable {
* *
* @see #getCommitUserData(Directory) * @see #getCommitUserData(Directory)
*/ */
public Map getCommitUserData() { public Map<String,String> getCommitUserData() {
throw new UnsupportedOperationException("This reader does not support this method."); throw new UnsupportedOperationException("This reader does not support this method.");
} }
@ -944,7 +944,7 @@ public abstract class IndexReader implements Cloneable {
* IndexReader#getCommitUserData}. * IndexReader#getCommitUserData}.
* @throws IOException * @throws IOException
*/ */
public final synchronized void flush(Map commitUserData) throws IOException { public final synchronized void flush(Map<String, String> commitUserData) throws IOException {
ensureOpen(); ensureOpen();
commit(commitUserData); commit(commitUserData);
} }
@ -971,7 +971,7 @@ public abstract class IndexReader implements Cloneable {
* (transactional semantics). * (transactional semantics).
* @throws IOException if there is a low-level IO error * @throws IOException if there is a low-level IO error
*/ */
protected final synchronized void commit(Map commitUserData) throws IOException { protected final synchronized void commit(Map<String, String> commitUserData) throws IOException {
if (hasChanges) { if (hasChanges) {
doCommit(commitUserData); doCommit(commitUserData);
} }
@ -980,7 +980,7 @@ public abstract class IndexReader implements Cloneable {
/** Implements commit. NOTE: subclasses should override /** Implements commit. NOTE: subclasses should override
* this. In 3.0 this will become an abstract method. */ * this. In 3.0 this will become an abstract method. */
protected abstract void doCommit(Map commitUserData) throws IOException; protected abstract void doCommit(Map<String, String> commitUserData) throws IOException;
/** /**
* Closes files associated with this index. * Closes files associated with this index.
@ -1006,7 +1006,7 @@ public abstract class IndexReader implements Cloneable {
* @return Collection of Strings indicating the names of the fields. * @return Collection of Strings indicating the names of the fields.
* @see IndexReader.FieldOption * @see IndexReader.FieldOption
*/ */
public abstract Collection getFieldNames(FieldOption fldOption); public abstract Collection<String> getFieldNames(FieldOption fldOption);
/** /**
* Expert: return the IndexCommit that this reader has * Expert: return the IndexCommit that this reader has
@ -1111,7 +1111,7 @@ public abstract class IndexReader implements Cloneable {
* java.io.IOException}. Note that if a commit is in * java.io.IOException}. Note that if a commit is in
* progress while this method is running, that commit * progress while this method is running, that commit
* may or may not be returned array. */ * may or may not be returned array. */
public static Collection listCommits(Directory dir) throws IOException { public static Collection<IndexCommit> listCommits(Directory dir) throws IOException {
return DirectoryReader.listCommits(dir); return DirectoryReader.listCommits(dir);
} }

View File

@ -352,7 +352,7 @@ public class MultiReader extends IndexReader implements Cloneable {
return new MultiTermPositions(this, subReaders, starts); return new MultiTermPositions(this, subReaders, starts);
} }
protected void doCommit(Map commitUserData) throws IOException { protected void doCommit(Map<String,String> commitUserData) throws IOException {
for (int i = 0; i < subReaders.length; i++) for (int i = 0; i < subReaders.length; i++)
subReaders[i].commit(commitUserData); subReaders[i].commit(commitUserData);
} }
@ -367,7 +367,7 @@ public class MultiReader extends IndexReader implements Cloneable {
} }
} }
public Collection getFieldNames (IndexReader.FieldOption fieldNames) { public Collection<String> getFieldNames (IndexReader.FieldOption fieldNames) {
ensureOpen(); ensureOpen();
return DirectoryReader.getFieldNames(fieldNames, this.subReaders); return DirectoryReader.getFieldNames(fieldNames, this.subReaders);
} }

View File

@ -104,7 +104,7 @@ public class ParallelReader extends IndexReader {
throw new IllegalArgumentException throw new IllegalArgumentException
("All readers must have same numDocs: "+numDocs+"!="+reader.numDocs()); ("All readers must have same numDocs: "+numDocs+"!="+reader.numDocs());
Collection fields = reader.getFieldNames(IndexReader.FieldOption.ALL); Collection<String> fields = reader.getFieldNames(IndexReader.FieldOption.ALL);
readerToFields.put(reader, fields); readerToFields.put(reader, fields);
Iterator i = fields.iterator(); Iterator i = fields.iterator();
while (i.hasNext()) { // update fieldToReader map while (i.hasNext()) { // update fieldToReader map
@ -435,7 +435,7 @@ public class ParallelReader extends IndexReader {
return (IndexReader[]) readers.toArray(new IndexReader[readers.size()]); return (IndexReader[]) readers.toArray(new IndexReader[readers.size()]);
} }
protected void doCommit(Map commitUserData) throws IOException { protected void doCommit(Map<String,String> commitUserData) throws IOException {
for (int i = 0; i < readers.size(); i++) for (int i = 0; i < readers.size(); i++)
((IndexReader)readers.get(i)).commit(commitUserData); ((IndexReader)readers.get(i)).commit(commitUserData);
} }
@ -450,12 +450,12 @@ public class ParallelReader extends IndexReader {
} }
} }
public Collection getFieldNames (IndexReader.FieldOption fieldNames) { public Collection<String> getFieldNames (IndexReader.FieldOption fieldNames) {
ensureOpen(); ensureOpen();
Set fieldSet = new HashSet(); Set fieldSet = new HashSet();
for (int i = 0; i < readers.size(); i++) { for (int i = 0; i < readers.size(); i++) {
IndexReader reader = ((IndexReader)readers.get(i)); IndexReader reader = ((IndexReader)readers.get(i));
Collection names = reader.getFieldNames(fieldNames); Collection<String> names = reader.getFieldNames(fieldNames);
fieldSet.addAll(names); fieldSet.addAll(names);
} }
return fieldSet; return fieldSet;

View File

@ -89,7 +89,7 @@ public final class SegmentInfo {
private boolean hasProx; // True if this segment has any fields with omitTermFreqAndPositions==false private boolean hasProx; // True if this segment has any fields with omitTermFreqAndPositions==false
private Map diagnostics; private Map<String,String> diagnostics;
public String toString() { public String toString() {
return "si: "+dir.toString()+" "+name+" docCount: "+docCount+" delCount: "+delCount+" delFileName: "+getDelFileName(); return "si: "+dir.toString()+" "+name+" docCount: "+docCount+" delCount: "+delCount+" delFileName: "+getDelFileName();
@ -152,12 +152,12 @@ public final class SegmentInfo {
} }
// must be Map<String, String> // must be Map<String, String>
void setDiagnostics(Map diagnostics) { void setDiagnostics(Map<String, String> diagnostics) {
this.diagnostics = diagnostics; this.diagnostics = diagnostics;
} }
// returns Map<String, String> // returns Map<String, String>
public Map getDiagnostics() { public Map<String, String> getDiagnostics() {
return diagnostics; return diagnostics;
} }

View File

@ -792,7 +792,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
return clone; return clone;
} }
protected void doCommit(Map commitUserData) throws IOException { protected void doCommit(Map<String,String> commitUserData) throws IOException {
if (hasChanges) { if (hasChanges) {
if (deletedDocsDirty) { // re-write deleted if (deletedDocsDirty) { // re-write deleted
si.advanceDelGen(); si.advanceDelGen();
@ -971,10 +971,10 @@ public class SegmentReader extends IndexReader implements Cloneable {
/** /**
* @see IndexReader#getFieldNames(IndexReader.FieldOption fldOption) * @see IndexReader#getFieldNames(IndexReader.FieldOption fldOption)
*/ */
public Collection getFieldNames(IndexReader.FieldOption fieldOption) { public Collection<String> getFieldNames(IndexReader.FieldOption fieldOption) {
ensureOpen(); ensureOpen();
Set fieldSet = new HashSet(); Set<String> fieldSet = new HashSet<String>();
for (int i = 0; i < core.fieldInfos.size(); i++) { for (int i = 0; i < core.fieldInfos.size(); i++) {
FieldInfo fi = core.fieldInfos.fieldInfo(i); FieldInfo fi = core.fieldInfos.fieldInfo(i);
if (fieldOption == IndexReader.FieldOption.ALL) { if (fieldOption == IndexReader.FieldOption.ALL) {

View File

@ -30,7 +30,7 @@ class SegmentWriteState {
int numDocs; int numDocs;
int termIndexInterval; int termIndexInterval;
int numDocsInStore; int numDocsInStore;
Collection flushedFiles; Collection<String> flushedFiles;
public SegmentWriteState(DocumentsWriter docWriter, Directory directory, String segmentName, String docStoreSegmentName, int numDocs, public SegmentWriteState(DocumentsWriter docWriter, Directory directory, String segmentName, String docStoreSegmentName, int numDocs,
int numDocsInStore, int termIndexInterval) { int numDocsInStore, int termIndexInterval) {
@ -41,7 +41,7 @@ class SegmentWriteState {
this.numDocs = numDocs; this.numDocs = numDocs;
this.numDocsInStore = numDocsInStore; this.numDocsInStore = numDocsInStore;
this.termIndexInterval = termIndexInterval; this.termIndexInterval = termIndexInterval;
flushedFiles = new HashSet(); flushedFiles = new HashSet<String>();
} }
public String segmentFileName(String ext) { public String segmentFileName(String ext) {