LUCENE-5036: Cleanup StoredFieldsProcessor & TermVectorsConsumer

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1490591 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Simon Willnauer 2013-06-07 11:31:03 +00:00
parent 1f2c01754f
commit 051e0fda3d
2 changed files with 24 additions and 32 deletions

View File

@ -18,11 +18,13 @@ package org.apache.lucene.index;
*/ */
import java.io.IOException; import java.io.IOException;
import java.util.Arrays;
import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.StoredFieldsWriter; import org.apache.lucene.codecs.StoredFieldsWriter;
import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
/** This is a StoredFieldsConsumer that writes stored fields. */ /** This is a StoredFieldsConsumer that writes stored fields. */
@ -32,8 +34,6 @@ final class StoredFieldsProcessor extends StoredFieldsConsumer {
final DocumentsWriterPerThread docWriter; final DocumentsWriterPerThread docWriter;
int lastDocID; int lastDocID;
int freeCount;
final DocumentsWriterPerThread.DocState docState; final DocumentsWriterPerThread.DocState docState;
final Codec codec; final Codec codec;
@ -44,13 +44,13 @@ final class StoredFieldsProcessor extends StoredFieldsConsumer {
} }
private int numStoredFields; private int numStoredFields;
private StorableField[] storedFields; private StorableField[] storedFields = new StorableField[1];
private FieldInfo[] fieldInfos; private FieldInfo[] fieldInfos = new FieldInfo[1];
public void reset() { public void reset() {
numStoredFields = 0; numStoredFields = 0;
storedFields = new StorableField[1]; Arrays.fill(storedFields, null);
fieldInfos = new FieldInfo[1]; Arrays.fill(fieldInfos, null);
} }
@Override @Override
@ -61,7 +61,6 @@ final class StoredFieldsProcessor extends StoredFieldsConsumer {
@Override @Override
public void flush(SegmentWriteState state) throws IOException { public void flush(SegmentWriteState state) throws IOException {
int numDocs = state.segmentInfo.getDocCount(); int numDocs = state.segmentInfo.getDocCount();
if (numDocs > 0) { if (numDocs > 0) {
// It's possible that all documents seen in this segment // It's possible that all documents seen in this segment
// hit non-aborting exceptions, in which case we will // hit non-aborting exceptions, in which case we will
@ -69,14 +68,17 @@ final class StoredFieldsProcessor extends StoredFieldsConsumer {
initFieldsWriter(state.context); initFieldsWriter(state.context);
fill(numDocs); fill(numDocs);
} }
if (fieldsWriter != null) { if (fieldsWriter != null) {
try { boolean success = false;
fieldsWriter.finish(state.fieldInfos, numDocs); try {
} finally { fieldsWriter.finish(state.fieldInfos, numDocs);
fieldsWriter.close(); success = true;
fieldsWriter = null; } finally {
lastDocID = 0; if (success) {
IOUtils.close(fieldsWriter);
} else {
IOUtils.closeWhileHandlingException(fieldsWriter);
}
} }
} }
} }
@ -88,7 +90,6 @@ final class StoredFieldsProcessor extends StoredFieldsConsumer {
} }
} }
int allocCount;
@Override @Override
void abort() { void abort() {

View File

@ -18,6 +18,7 @@ package org.apache.lucene.index;
*/ */
import java.io.IOException; import java.io.IOException;
import java.util.Arrays;
import java.util.Map; import java.util.Map;
import org.apache.lucene.codecs.TermVectorsWriter; import org.apache.lucene.codecs.TermVectorsWriter;
@ -32,9 +33,6 @@ final class TermVectorsConsumer extends TermsHashConsumer {
TermVectorsWriter writer; TermVectorsWriter writer;
final DocumentsWriterPerThread docWriter; final DocumentsWriterPerThread docWriter;
int freeCount;
int lastDocID;
final DocumentsWriterPerThread.DocState docState; final DocumentsWriterPerThread.DocState docState;
final BytesRef flushTerm = new BytesRef(); final BytesRef flushTerm = new BytesRef();
@ -42,6 +40,9 @@ final class TermVectorsConsumer extends TermsHashConsumer {
final ByteSliceReader vectorSliceReaderPos = new ByteSliceReader(); final ByteSliceReader vectorSliceReaderPos = new ByteSliceReader();
final ByteSliceReader vectorSliceReaderOff = new ByteSliceReader(); final ByteSliceReader vectorSliceReaderOff = new ByteSliceReader();
boolean hasVectors; boolean hasVectors;
int numVectorFields;
int lastDocID;
private TermVectorsConsumerPerField[] perFields = new TermVectorsConsumerPerField[1];
public TermVectorsConsumer(DocumentsWriterPerThread docWriter) { public TermVectorsConsumer(DocumentsWriterPerThread docWriter) {
this.docWriter = docWriter; this.docWriter = docWriter;
@ -52,6 +53,7 @@ final class TermVectorsConsumer extends TermsHashConsumer {
void flush(Map<String, TermsHashConsumerPerField> fieldsToFlush, final SegmentWriteState state) throws IOException { void flush(Map<String, TermsHashConsumerPerField> fieldsToFlush, final SegmentWriteState state) throws IOException {
if (writer != null) { if (writer != null) {
int numDocs = state.segmentInfo.getDocCount(); int numDocs = state.segmentInfo.getDocCount();
assert numDocs > 0;
// At least one doc in this run had term vectors enabled // At least one doc in this run had term vectors enabled
try { try {
fill(numDocs); fill(numDocs);
@ -60,7 +62,6 @@ final class TermVectorsConsumer extends TermsHashConsumer {
} finally { } finally {
IOUtils.close(writer); IOUtils.close(writer);
writer = null; writer = null;
lastDocID = 0; lastDocID = 0;
hasVectors = false; hasVectors = false;
} }
@ -130,16 +131,11 @@ final class TermVectorsConsumer extends TermsHashConsumer {
} }
lastDocID = 0; lastDocID = 0;
reset(); reset();
} }
int numVectorFields;
TermVectorsConsumerPerField[] perFields;
void reset() { void reset() {
perFields = null; // don't hang onto stuff from previous doc Arrays.fill(perFields, null);// don't hang onto stuff from previous doc
numVectorFields = 0; numVectorFields = 0;
} }
@ -149,9 +145,7 @@ final class TermVectorsConsumer extends TermsHashConsumer {
} }
void addFieldToFlush(TermVectorsConsumerPerField fieldToFlush) { void addFieldToFlush(TermVectorsConsumerPerField fieldToFlush) {
if (perFields == null) { if (numVectorFields == perFields.length) {
perFields = new TermVectorsConsumerPerField[1];
} else if (numVectorFields == perFields.length) {
int newSize = ArrayUtil.oversize(numVectorFields + 1, RamUsageEstimator.NUM_BYTES_OBJECT_REF); int newSize = ArrayUtil.oversize(numVectorFields + 1, RamUsageEstimator.NUM_BYTES_OBJECT_REF);
TermVectorsConsumerPerField[] newArray = new TermVectorsConsumerPerField[newSize]; TermVectorsConsumerPerField[] newArray = new TermVectorsConsumerPerField[newSize];
System.arraycopy(perFields, 0, newArray, 0, numVectorFields); System.arraycopy(perFields, 0, newArray, 0, numVectorFields);
@ -177,10 +171,7 @@ final class TermVectorsConsumer extends TermsHashConsumer {
String lastVectorFieldName; String lastVectorFieldName;
final boolean vectorFieldsInOrder(FieldInfo fi) { final boolean vectorFieldsInOrder(FieldInfo fi) {
try { try {
if (lastVectorFieldName != null) return lastVectorFieldName != null ? lastVectorFieldName.compareTo(fi.name) < 0 : true;
return lastVectorFieldName.compareTo(fi.name) < 0;
else
return true;
} finally { } finally {
lastVectorFieldName = fi.name; lastVectorFieldName = fi.name;
} }