From b22932be70380aaa85eb436c96b5908c0276618f Mon Sep 17 00:00:00 2001 From: Uwe Schindler Date: Sat, 17 Oct 2009 19:15:07 +0000 Subject: [PATCH] LUCENE-1257: More generified APIs and implementations. Thanks Kay Kay! git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@826290 13f79535-47bb-0310-9956-ffa450edef68 --- .../org/apache/lucene/index/CheckIndex.java | 27 +++++------ .../lucene/index/CompoundFileReader.java | 2 +- .../lucene/index/CompoundFileWriter.java | 22 ++++----- .../index/ConcurrentMergeScheduler.java | 10 ++-- .../apache/lucene/index/DirectoryReader.java | 46 +++++++++---------- .../org/apache/lucene/index/DocConsumer.java | 2 +- .../lucene/index/DocFieldProcessor.java | 10 ++-- .../index/DocFieldProcessorPerThread.java | 6 +-- .../lucene/index/FilterIndexReader.java | 4 +- .../org/apache/lucene/index/IndexReader.java | 14 +++--- .../org/apache/lucene/index/MultiReader.java | 4 +- .../apache/lucene/index/ParallelReader.java | 8 ++-- .../org/apache/lucene/index/SegmentInfo.java | 6 +-- .../apache/lucene/index/SegmentReader.java | 6 +-- .../lucene/index/SegmentWriteState.java | 4 +- 15 files changed, 78 insertions(+), 93 deletions(-) diff --git a/src/java/org/apache/lucene/index/CheckIndex.java b/src/java/org/apache/lucene/index/CheckIndex.java index 015e58dab71..2afddea953c 100644 --- a/src/java/org/apache/lucene/index/CheckIndex.java +++ b/src/java/org/apache/lucene/index/CheckIndex.java @@ -28,7 +28,7 @@ import java.io.PrintStream; import java.io.IOException; import java.io.File; import java.util.Collection; -import java.util.Iterator; + import java.util.List; import java.util.ArrayList; import java.util.Map; @@ -113,7 +113,7 @@ public class CheckIndex { public boolean partial; /** Holds the userData of the last commit in the index */ - public Map userData; + public Map userData; /** Holds the status of each segment in the index. * See {@link #segmentInfos}. @@ -172,10 +172,10 @@ public class CheckIndex { * @see AbstractField#setOmitTermFreqAndPositions */ public boolean hasProx; - /** Map that includes certain + /** Map that includes certain * debugging details that IndexWriter records into * each segment it creates */ - public Map diagnostics; + public Map diagnostics; /** Status for testing of field norms (null if field norms could not be tested). */ public FieldNormStatus fieldNormStatus; @@ -309,7 +309,7 @@ public class CheckIndex { *

WARNING: make sure * you only call this when the index is not opened by any * writer. */ - public Status checkIndex(List onlySegments) throws IOException { + public Status checkIndex(List onlySegments) throws IOException { NumberFormat nf = NumberFormat.getInstance(); SegmentInfos sis = new SegmentInfos(); Status result = new Status(); @@ -397,10 +397,9 @@ public class CheckIndex { result.partial = true; if (infoStream != null) infoStream.print("\nChecking only these segments:"); - Iterator it = onlySegments.iterator(); - while (it.hasNext()) { + for (String s : onlySegments) { if (infoStream != null) - infoStream.print(" " + it.next()); + infoStream.print(" " + s); } result.segmentsChecked.addAll(onlySegments); msg(":"); @@ -439,7 +438,7 @@ public class CheckIndex { segInfoStat.numFiles = info.files().size(); msg(" size (MB)=" + nf.format(info.sizeInBytes()/(1024.*1024.))); segInfoStat.sizeMB = info.sizeInBytes()/(1024.*1024.); - Map diagnostics = info.getDiagnostics(); + Map diagnostics = info.getDiagnostics(); segInfoStat.diagnostics = diagnostics; if (diagnostics.size() > 0) { msg(" diagnostics = " + diagnostics); @@ -497,7 +496,7 @@ public class CheckIndex { if (infoStream != null) { infoStream.print(" test: fields.............."); } - Collection fieldNames = reader.getFieldNames(IndexReader.FieldOption.ALL); + Collection fieldNames = reader.getFieldNames(IndexReader.FieldOption.ALL); msg("OK [" + fieldNames.size() + " fields]"); segInfoStat.numFields = fieldNames.size(); @@ -559,7 +558,7 @@ public class CheckIndex { /** * Test field norms. */ - private Status.FieldNormStatus testFieldNorms(Collection fieldNames, SegmentReader reader) { + private Status.FieldNormStatus testFieldNorms(Collection fieldNames, SegmentReader reader) { final Status.FieldNormStatus status = new Status.FieldNormStatus(); try { @@ -567,10 +566,8 @@ public class CheckIndex { if (infoStream != null) { infoStream.print(" test: field norms........."); } - Iterator it = fieldNames.iterator(); final byte[] b = new byte[reader.maxDoc()]; - while (it.hasNext()) { - final String fieldName = (String) it.next(); + for (final String fieldName : fieldNames) { reader.norms(fieldName, b, 0); ++status.totFields; } @@ -807,7 +804,7 @@ public class CheckIndex { public static void main(String[] args) throws IOException, InterruptedException { boolean doFix = false; - List onlySegments = new ArrayList(); + List onlySegments = new ArrayList(); String indexPath = null; int i = 0; while(i < args.length) { diff --git a/src/java/org/apache/lucene/index/CompoundFileReader.java b/src/java/org/apache/lucene/index/CompoundFileReader.java index 43fe0e6e507..570d7dce096 100644 --- a/src/java/org/apache/lucene/index/CompoundFileReader.java +++ b/src/java/org/apache/lucene/index/CompoundFileReader.java @@ -47,7 +47,7 @@ class CompoundFileReader extends Directory { private String fileName; private IndexInput stream; - private HashMap entries = new HashMap(); + private HashMap entries = new HashMap(); public CompoundFileReader(Directory dir, String name) throws IOException { diff --git a/src/java/org/apache/lucene/index/CompoundFileWriter.java b/src/java/org/apache/lucene/index/CompoundFileWriter.java index 4ad92d8f80a..2c29eb00809 100644 --- a/src/java/org/apache/lucene/index/CompoundFileWriter.java +++ b/src/java/org/apache/lucene/index/CompoundFileWriter.java @@ -22,7 +22,7 @@ import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.IndexInput; import java.util.LinkedList; import java.util.HashSet; -import java.util.Iterator; + import java.io.IOException; @@ -62,8 +62,8 @@ final class CompoundFileWriter { private Directory directory; private String fileName; - private HashSet ids; - private LinkedList entries; + private HashSet ids; + private LinkedList entries; private boolean merged = false; private SegmentMerger.CheckAbort checkAbort; @@ -83,8 +83,8 @@ final class CompoundFileWriter { this.checkAbort = checkAbort; directory = dir; fileName = name; - ids = new HashSet(); - entries = new LinkedList(); + ids = new HashSet(); + entries = new LinkedList(); } /** Returns the directory of the compound file. */ @@ -152,10 +152,8 @@ final class CompoundFileWriter { // Write the directory with all offsets at 0. // Remember the positions of directory entries so that we can // adjust the offsets later - Iterator it = entries.iterator(); long totalSize = 0; - while(it.hasNext()) { - FileEntry fe = (FileEntry) it.next(); + for (FileEntry fe : entries) { fe.directoryOffset = os.getFilePointer(); os.writeLong(0); // for now os.writeString(fe.file); @@ -174,17 +172,13 @@ final class CompoundFileWriter { // Open the files and copy their data into the stream. // Remember the locations of each file's data section. byte buffer[] = new byte[16384]; - it = entries.iterator(); - while(it.hasNext()) { - FileEntry fe = (FileEntry) it.next(); + for (FileEntry fe : entries) { fe.dataOffset = os.getFilePointer(); copyFile(fe, os, buffer); } // Write the data offsets into the directory of the compound stream - it = entries.iterator(); - while(it.hasNext()) { - FileEntry fe = (FileEntry) it.next(); + for (FileEntry fe : entries) { os.seek(fe.directoryOffset); os.writeLong(fe.dataOffset); } diff --git a/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java b/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java index 5856d9a2636..2a428647e66 100644 --- a/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java +++ b/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java @@ -36,7 +36,7 @@ public class ConcurrentMergeScheduler extends MergeScheduler { private int mergeThreadPriority = -1; - protected List mergeThreads = new ArrayList(); + protected List mergeThreads = new ArrayList(); // Max number of threads allowed to be merging at once private int maxThreadCount = 3; @@ -363,7 +363,7 @@ public class ConcurrentMergeScheduler extends MergeScheduler { // Make sure all outstanding threads are done so we see // any exceptions they may produce: for(int i=0;i allInstances; public static void setTestMode() { - allInstances = new ArrayList(); + allInstances = new ArrayList(); } } diff --git a/src/java/org/apache/lucene/index/DirectoryReader.java b/src/java/org/apache/lucene/index/DirectoryReader.java index 02247d4ac24..cdbc0f6ef0d 100644 --- a/src/java/org/apache/lucene/index/DirectoryReader.java +++ b/src/java/org/apache/lucene/index/DirectoryReader.java @@ -25,7 +25,7 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; + import java.util.Map; import java.util.Set; @@ -47,7 +47,7 @@ class DirectoryReader extends IndexReader implements Cloneable { IndexWriter writer; private IndexDeletionPolicy deletionPolicy; - private final HashSet synced = new HashSet(); + private final HashSet synced = new HashSet(); private Lock writeLock; private SegmentInfos segmentInfos; private SegmentInfos segmentInfosStart; @@ -59,7 +59,7 @@ class DirectoryReader extends IndexReader implements Cloneable { private SegmentReader[] subReaders; private int[] starts; // 1st docno for each segment - private Map normsCache = new HashMap(); + private Map normsCache = new HashMap(); private int maxDoc = 0; private int numDocs = -1; private boolean hasDeletions = false; @@ -177,7 +177,7 @@ class DirectoryReader extends IndexReader implements Cloneable { /** This constructor is only used for {@link #reopen()} */ DirectoryReader(Directory directory, SegmentInfos infos, SegmentReader[] oldReaders, int[] oldStarts, - Map oldNormsCache, boolean readOnly, boolean doClone, int termInfosIndexDivisor) throws IOException { + Map oldNormsCache, boolean readOnly, boolean doClone, int termInfosIndexDivisor) throws IOException { this.directory = directory; this.readOnly = readOnly; this.segmentInfos = infos; @@ -190,7 +190,7 @@ class DirectoryReader extends IndexReader implements Cloneable { // we put the old SegmentReaders in a map, that allows us // to lookup a reader using its segment name - Map segmentReaders = new HashMap(); + Map segmentReaders = new HashMap(); if (oldReaders != null) { // create a Map SegmentName->SegmentReader @@ -267,9 +267,7 @@ class DirectoryReader extends IndexReader implements Cloneable { // try to copy unchanged norms from the old normsCache to the new one if (oldNormsCache != null) { - Iterator it = oldNormsCache.entrySet().iterator(); - while (it.hasNext()) { - Map.Entry entry = (Map.Entry) it.next(); + for (Map.Entry entry: oldNormsCache.entrySet()) { String field = (String) entry.getKey(); if (!hasNorms(field)) { continue; @@ -681,7 +679,7 @@ class DirectoryReader extends IndexReader implements Cloneable { * * @throws IOException if there is a low-level IO error */ - protected void doCommit(Map commitUserData) throws IOException { + protected void doCommit(Map commitUserData) throws IOException { if (hasChanges) { segmentInfos.setUserData(commitUserData); // Default deleter (for backwards compatibility) is @@ -700,9 +698,8 @@ class DirectoryReader extends IndexReader implements Cloneable { subReaders[i].commit(); // Sync all files we just wrote - Iterator it = segmentInfos.files(directory, false).iterator(); - while (it.hasNext()) { - final String fileName = (String) it.next(); + final Collection files = segmentInfos.files(directory, false); + for (final String fileName : files) { if (!synced.contains(fileName)) { assert directory.fileExists(fileName); directory.sync(fileName); @@ -766,7 +763,7 @@ class DirectoryReader extends IndexReader implements Cloneable { } } - public Map getCommitUserData() { + public Map getCommitUserData() { ensureOpen(); return segmentInfos.getUserData(); } @@ -796,17 +793,16 @@ class DirectoryReader extends IndexReader implements Cloneable { if (ioe != null) throw ioe; } - public Collection getFieldNames (IndexReader.FieldOption fieldNames) { + public Collection getFieldNames (IndexReader.FieldOption fieldNames) { ensureOpen(); return getFieldNames(fieldNames, this.subReaders); } - static Collection getFieldNames (IndexReader.FieldOption fieldNames, IndexReader[] subReaders) { + static Collection getFieldNames (IndexReader.FieldOption fieldNames, IndexReader[] subReaders) { // maintain a unique set of field names - Set fieldSet = new HashSet(); - for (int i = 0; i < subReaders.length; i++) { - IndexReader reader = subReaders[i]; - Collection names = reader.getFieldNames(fieldNames); + Set fieldSet = new HashSet(); + for (IndexReader reader : subReaders) { + Collection names = reader.getFieldNames(fieldNames); fieldSet.addAll(names); } return fieldSet; @@ -838,10 +834,10 @@ class DirectoryReader extends IndexReader implements Cloneable { } /** @see org.apache.lucene.index.IndexReader#listCommits */ - public static Collection listCommits(Directory dir) throws IOException { + public static Collection listCommits(Directory dir) throws IOException { final String[] files = dir.listAll(); - Collection commits = new ArrayList(); + Collection commits = new ArrayList(); SegmentInfos latest = new SegmentInfos(); latest.read(dir); @@ -883,12 +879,12 @@ class DirectoryReader extends IndexReader implements Cloneable { private static final class ReaderCommit extends IndexCommit { private String segmentsFileName; - Collection files; + Collection files; Directory dir; long generation; long version; final boolean isOptimized; - final Map userData; + final Map userData; ReaderCommit(SegmentInfos infos, Directory dir) throws IOException { segmentsFileName = infos.getCurrentSegmentFileName(); @@ -908,7 +904,7 @@ class DirectoryReader extends IndexReader implements Cloneable { return segmentsFileName; } - public Collection getFileNames() { + public Collection getFileNames() { return files; } @@ -928,7 +924,7 @@ class DirectoryReader extends IndexReader implements Cloneable { return false; } - public Map getUserData() { + public Map getUserData() { return userData; } } diff --git a/src/java/org/apache/lucene/index/DocConsumer.java b/src/java/org/apache/lucene/index/DocConsumer.java index ee372a7aeb0..ae993ce91bb 100644 --- a/src/java/org/apache/lucene/index/DocConsumer.java +++ b/src/java/org/apache/lucene/index/DocConsumer.java @@ -22,7 +22,7 @@ import java.util.Collection; abstract class DocConsumer { abstract DocConsumerPerThread addThread(DocumentsWriterThreadState perThread) throws IOException; - abstract void flush(final Collection threads, final SegmentWriteState state) throws IOException; + abstract void flush(final Collection threads, final SegmentWriteState state) throws IOException; abstract void closeDocStore(final SegmentWriteState state) throws IOException; abstract void abort(); abstract boolean freeRAM(); diff --git a/src/java/org/apache/lucene/index/DocFieldProcessor.java b/src/java/org/apache/lucene/index/DocFieldProcessor.java index ae880078fb4..ca6202e1973 100644 --- a/src/java/org/apache/lucene/index/DocFieldProcessor.java +++ b/src/java/org/apache/lucene/index/DocFieldProcessor.java @@ -21,7 +21,7 @@ import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.HashMap; -import java.util.Iterator; + /** * This is a DocConsumer that gathers all fields under the @@ -50,12 +50,10 @@ final class DocFieldProcessor extends DocConsumer { fieldsWriter.closeDocStore(state); } - public void flush(Collection threads, SegmentWriteState state) throws IOException { + public void flush(Collection threads, SegmentWriteState state) throws IOException { - Map childThreadsAndFields = new HashMap(); - Iterator it = threads.iterator(); - while(it.hasNext()) { - DocFieldProcessorPerThread perThread = (DocFieldProcessorPerThread) it.next(); + Map> childThreadsAndFields = new HashMap>(); + for ( DocFieldProcessorPerThread perThread : threads) { childThreadsAndFields.put(perThread.consumer, perThread.fields()); perThread.trimFields(state); } diff --git a/src/java/org/apache/lucene/index/DocFieldProcessorPerThread.java b/src/java/org/apache/lucene/index/DocFieldProcessorPerThread.java index 301dab51e22..9a442eb165f 100644 --- a/src/java/org/apache/lucene/index/DocFieldProcessorPerThread.java +++ b/src/java/org/apache/lucene/index/DocFieldProcessorPerThread.java @@ -76,8 +76,8 @@ final class DocFieldProcessorPerThread extends DocConsumerPerThread { consumer.abort(); } - public Collection fields() { - Collection fields = new HashSet(); + public Collection fields() { + Collection fields = new HashSet(); for(int i=0;i docFields = doc.getFields(); final int numDocFields = docFields.size(); // Absorb any new fields first seen in this document. diff --git a/src/java/org/apache/lucene/index/FilterIndexReader.java b/src/java/org/apache/lucene/index/FilterIndexReader.java index 24f9704a372..b3a8201b004 100644 --- a/src/java/org/apache/lucene/index/FilterIndexReader.java +++ b/src/java/org/apache/lucene/index/FilterIndexReader.java @@ -211,12 +211,12 @@ public class FilterIndexReader extends IndexReader { protected void doDelete(int n) throws CorruptIndexException, IOException { in.deleteDocument(n); } - protected void doCommit(Map commitUserData) throws IOException { in.commit(commitUserData); } + protected void doCommit(Map commitUserData) throws IOException { in.commit(commitUserData); } protected void doClose() throws IOException { in.close(); } - public Collection getFieldNames(IndexReader.FieldOption fieldNames) { + public Collection getFieldNames(IndexReader.FieldOption fieldNames) { ensureOpen(); return in.getFieldNames(fieldNames); } diff --git a/src/java/org/apache/lucene/index/IndexReader.java b/src/java/org/apache/lucene/index/IndexReader.java index 9934c95d986..eeaebd2b6f4 100644 --- a/src/java/org/apache/lucene/index/IndexReader.java +++ b/src/java/org/apache/lucene/index/IndexReader.java @@ -463,7 +463,7 @@ public abstract class IndexReader implements Cloneable { * * @see #getCommitUserData() */ - public static Map getCommitUserData(Directory directory) throws CorruptIndexException, IOException { + public static Map getCommitUserData(Directory directory) throws CorruptIndexException, IOException { return SegmentInfos.readCurrentUserData(directory); } @@ -503,7 +503,7 @@ public abstract class IndexReader implements Cloneable { * * @see #getCommitUserData(Directory) */ - public Map getCommitUserData() { + public Map getCommitUserData() { throw new UnsupportedOperationException("This reader does not support this method."); } @@ -944,7 +944,7 @@ public abstract class IndexReader implements Cloneable { * IndexReader#getCommitUserData}. * @throws IOException */ - public final synchronized void flush(Map commitUserData) throws IOException { + public final synchronized void flush(Map commitUserData) throws IOException { ensureOpen(); commit(commitUserData); } @@ -971,7 +971,7 @@ public abstract class IndexReader implements Cloneable { * (transactional semantics). * @throws IOException if there is a low-level IO error */ - protected final synchronized void commit(Map commitUserData) throws IOException { + protected final synchronized void commit(Map commitUserData) throws IOException { if (hasChanges) { doCommit(commitUserData); } @@ -980,7 +980,7 @@ public abstract class IndexReader implements Cloneable { /** Implements commit. NOTE: subclasses should override * this. In 3.0 this will become an abstract method. */ - protected abstract void doCommit(Map commitUserData) throws IOException; + protected abstract void doCommit(Map commitUserData) throws IOException; /** * Closes files associated with this index. @@ -1006,7 +1006,7 @@ public abstract class IndexReader implements Cloneable { * @return Collection of Strings indicating the names of the fields. * @see IndexReader.FieldOption */ - public abstract Collection getFieldNames(FieldOption fldOption); + public abstract Collection getFieldNames(FieldOption fldOption); /** * Expert: return the IndexCommit that this reader has @@ -1111,7 +1111,7 @@ public abstract class IndexReader implements Cloneable { * java.io.IOException}. Note that if a commit is in * progress while this method is running, that commit * may or may not be returned array. */ - public static Collection listCommits(Directory dir) throws IOException { + public static Collection listCommits(Directory dir) throws IOException { return DirectoryReader.listCommits(dir); } diff --git a/src/java/org/apache/lucene/index/MultiReader.java b/src/java/org/apache/lucene/index/MultiReader.java index 16ecd4a3286..796ab31bf67 100644 --- a/src/java/org/apache/lucene/index/MultiReader.java +++ b/src/java/org/apache/lucene/index/MultiReader.java @@ -352,7 +352,7 @@ public class MultiReader extends IndexReader implements Cloneable { return new MultiTermPositions(this, subReaders, starts); } - protected void doCommit(Map commitUserData) throws IOException { + protected void doCommit(Map commitUserData) throws IOException { for (int i = 0; i < subReaders.length; i++) subReaders[i].commit(commitUserData); } @@ -367,7 +367,7 @@ public class MultiReader extends IndexReader implements Cloneable { } } - public Collection getFieldNames (IndexReader.FieldOption fieldNames) { + public Collection getFieldNames (IndexReader.FieldOption fieldNames) { ensureOpen(); return DirectoryReader.getFieldNames(fieldNames, this.subReaders); } diff --git a/src/java/org/apache/lucene/index/ParallelReader.java b/src/java/org/apache/lucene/index/ParallelReader.java index d558daf93fc..4858b1c33bc 100644 --- a/src/java/org/apache/lucene/index/ParallelReader.java +++ b/src/java/org/apache/lucene/index/ParallelReader.java @@ -104,7 +104,7 @@ public class ParallelReader extends IndexReader { throw new IllegalArgumentException ("All readers must have same numDocs: "+numDocs+"!="+reader.numDocs()); - Collection fields = reader.getFieldNames(IndexReader.FieldOption.ALL); + Collection fields = reader.getFieldNames(IndexReader.FieldOption.ALL); readerToFields.put(reader, fields); Iterator i = fields.iterator(); while (i.hasNext()) { // update fieldToReader map @@ -435,7 +435,7 @@ public class ParallelReader extends IndexReader { return (IndexReader[]) readers.toArray(new IndexReader[readers.size()]); } - protected void doCommit(Map commitUserData) throws IOException { + protected void doCommit(Map commitUserData) throws IOException { for (int i = 0; i < readers.size(); i++) ((IndexReader)readers.get(i)).commit(commitUserData); } @@ -450,12 +450,12 @@ public class ParallelReader extends IndexReader { } } - public Collection getFieldNames (IndexReader.FieldOption fieldNames) { + public Collection getFieldNames (IndexReader.FieldOption fieldNames) { ensureOpen(); Set fieldSet = new HashSet(); for (int i = 0; i < readers.size(); i++) { IndexReader reader = ((IndexReader)readers.get(i)); - Collection names = reader.getFieldNames(fieldNames); + Collection names = reader.getFieldNames(fieldNames); fieldSet.addAll(names); } return fieldSet; diff --git a/src/java/org/apache/lucene/index/SegmentInfo.java b/src/java/org/apache/lucene/index/SegmentInfo.java index 0ce0b2f4491..ef4ef30722b 100644 --- a/src/java/org/apache/lucene/index/SegmentInfo.java +++ b/src/java/org/apache/lucene/index/SegmentInfo.java @@ -89,7 +89,7 @@ public final class SegmentInfo { private boolean hasProx; // True if this segment has any fields with omitTermFreqAndPositions==false - private Map diagnostics; + private Map diagnostics; public String toString() { return "si: "+dir.toString()+" "+name+" docCount: "+docCount+" delCount: "+delCount+" delFileName: "+getDelFileName(); @@ -152,12 +152,12 @@ public final class SegmentInfo { } // must be Map - void setDiagnostics(Map diagnostics) { + void setDiagnostics(Map diagnostics) { this.diagnostics = diagnostics; } // returns Map - public Map getDiagnostics() { + public Map getDiagnostics() { return diagnostics; } diff --git a/src/java/org/apache/lucene/index/SegmentReader.java b/src/java/org/apache/lucene/index/SegmentReader.java index e5fe12bb0de..ff8a23baa18 100644 --- a/src/java/org/apache/lucene/index/SegmentReader.java +++ b/src/java/org/apache/lucene/index/SegmentReader.java @@ -792,7 +792,7 @@ public class SegmentReader extends IndexReader implements Cloneable { return clone; } - protected void doCommit(Map commitUserData) throws IOException { + protected void doCommit(Map commitUserData) throws IOException { if (hasChanges) { if (deletedDocsDirty) { // re-write deleted si.advanceDelGen(); @@ -971,10 +971,10 @@ public class SegmentReader extends IndexReader implements Cloneable { /** * @see IndexReader#getFieldNames(IndexReader.FieldOption fldOption) */ - public Collection getFieldNames(IndexReader.FieldOption fieldOption) { + public Collection getFieldNames(IndexReader.FieldOption fieldOption) { ensureOpen(); - Set fieldSet = new HashSet(); + Set fieldSet = new HashSet(); for (int i = 0; i < core.fieldInfos.size(); i++) { FieldInfo fi = core.fieldInfos.fieldInfo(i); if (fieldOption == IndexReader.FieldOption.ALL) { diff --git a/src/java/org/apache/lucene/index/SegmentWriteState.java b/src/java/org/apache/lucene/index/SegmentWriteState.java index d8b59cccf0c..47552e6c139 100644 --- a/src/java/org/apache/lucene/index/SegmentWriteState.java +++ b/src/java/org/apache/lucene/index/SegmentWriteState.java @@ -30,7 +30,7 @@ class SegmentWriteState { int numDocs; int termIndexInterval; int numDocsInStore; - Collection flushedFiles; + Collection flushedFiles; public SegmentWriteState(DocumentsWriter docWriter, Directory directory, String segmentName, String docStoreSegmentName, int numDocs, int numDocsInStore, int termIndexInterval) { @@ -41,7 +41,7 @@ class SegmentWriteState { this.numDocs = numDocs; this.numDocsInStore = numDocsInStore; this.termIndexInterval = termIndexInterval; - flushedFiles = new HashSet(); + flushedFiles = new HashSet(); } public String segmentFileName(String ext) {