mirror of https://github.com/apache/lucene.git
LUCENE-5036: Cleanup StoredFieldsProcessor & TermVectorsConsumer
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1490591 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
1f2c01754f
commit
051e0fda3d
|
@ -18,11 +18,13 @@ package org.apache.lucene.index;
|
|||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.lucene.codecs.Codec;
|
||||
import org.apache.lucene.codecs.StoredFieldsWriter;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/** This is a StoredFieldsConsumer that writes stored fields. */
|
||||
|
@ -32,8 +34,6 @@ final class StoredFieldsProcessor extends StoredFieldsConsumer {
|
|||
final DocumentsWriterPerThread docWriter;
|
||||
int lastDocID;
|
||||
|
||||
int freeCount;
|
||||
|
||||
final DocumentsWriterPerThread.DocState docState;
|
||||
final Codec codec;
|
||||
|
||||
|
@ -44,13 +44,13 @@ final class StoredFieldsProcessor extends StoredFieldsConsumer {
|
|||
}
|
||||
|
||||
private int numStoredFields;
|
||||
private StorableField[] storedFields;
|
||||
private FieldInfo[] fieldInfos;
|
||||
private StorableField[] storedFields = new StorableField[1];
|
||||
private FieldInfo[] fieldInfos = new FieldInfo[1];
|
||||
|
||||
public void reset() {
|
||||
numStoredFields = 0;
|
||||
storedFields = new StorableField[1];
|
||||
fieldInfos = new FieldInfo[1];
|
||||
Arrays.fill(storedFields, null);
|
||||
Arrays.fill(fieldInfos, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -61,7 +61,6 @@ final class StoredFieldsProcessor extends StoredFieldsConsumer {
|
|||
@Override
|
||||
public void flush(SegmentWriteState state) throws IOException {
|
||||
int numDocs = state.segmentInfo.getDocCount();
|
||||
|
||||
if (numDocs > 0) {
|
||||
// It's possible that all documents seen in this segment
|
||||
// hit non-aborting exceptions, in which case we will
|
||||
|
@ -69,14 +68,17 @@ final class StoredFieldsProcessor extends StoredFieldsConsumer {
|
|||
initFieldsWriter(state.context);
|
||||
fill(numDocs);
|
||||
}
|
||||
|
||||
if (fieldsWriter != null) {
|
||||
try {
|
||||
fieldsWriter.finish(state.fieldInfos, numDocs);
|
||||
} finally {
|
||||
fieldsWriter.close();
|
||||
fieldsWriter = null;
|
||||
lastDocID = 0;
|
||||
boolean success = false;
|
||||
try {
|
||||
fieldsWriter.finish(state.fieldInfos, numDocs);
|
||||
success = true;
|
||||
} finally {
|
||||
if (success) {
|
||||
IOUtils.close(fieldsWriter);
|
||||
} else {
|
||||
IOUtils.closeWhileHandlingException(fieldsWriter);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -88,7 +90,6 @@ final class StoredFieldsProcessor extends StoredFieldsConsumer {
|
|||
}
|
||||
}
|
||||
|
||||
int allocCount;
|
||||
|
||||
@Override
|
||||
void abort() {
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.index;
|
|||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.lucene.codecs.TermVectorsWriter;
|
||||
|
@ -32,9 +33,6 @@ final class TermVectorsConsumer extends TermsHashConsumer {
|
|||
|
||||
TermVectorsWriter writer;
|
||||
final DocumentsWriterPerThread docWriter;
|
||||
int freeCount;
|
||||
int lastDocID;
|
||||
|
||||
final DocumentsWriterPerThread.DocState docState;
|
||||
final BytesRef flushTerm = new BytesRef();
|
||||
|
||||
|
@ -42,6 +40,9 @@ final class TermVectorsConsumer extends TermsHashConsumer {
|
|||
final ByteSliceReader vectorSliceReaderPos = new ByteSliceReader();
|
||||
final ByteSliceReader vectorSliceReaderOff = new ByteSliceReader();
|
||||
boolean hasVectors;
|
||||
int numVectorFields;
|
||||
int lastDocID;
|
||||
private TermVectorsConsumerPerField[] perFields = new TermVectorsConsumerPerField[1];
|
||||
|
||||
public TermVectorsConsumer(DocumentsWriterPerThread docWriter) {
|
||||
this.docWriter = docWriter;
|
||||
|
@ -52,6 +53,7 @@ final class TermVectorsConsumer extends TermsHashConsumer {
|
|||
void flush(Map<String, TermsHashConsumerPerField> fieldsToFlush, final SegmentWriteState state) throws IOException {
|
||||
if (writer != null) {
|
||||
int numDocs = state.segmentInfo.getDocCount();
|
||||
assert numDocs > 0;
|
||||
// At least one doc in this run had term vectors enabled
|
||||
try {
|
||||
fill(numDocs);
|
||||
|
@ -60,7 +62,6 @@ final class TermVectorsConsumer extends TermsHashConsumer {
|
|||
} finally {
|
||||
IOUtils.close(writer);
|
||||
writer = null;
|
||||
|
||||
lastDocID = 0;
|
||||
hasVectors = false;
|
||||
}
|
||||
|
@ -130,16 +131,11 @@ final class TermVectorsConsumer extends TermsHashConsumer {
|
|||
}
|
||||
|
||||
lastDocID = 0;
|
||||
|
||||
reset();
|
||||
}
|
||||
|
||||
int numVectorFields;
|
||||
|
||||
TermVectorsConsumerPerField[] perFields;
|
||||
|
||||
void reset() {
|
||||
perFields = null; // don't hang onto stuff from previous doc
|
||||
Arrays.fill(perFields, null);// don't hang onto stuff from previous doc
|
||||
numVectorFields = 0;
|
||||
}
|
||||
|
||||
|
@ -149,9 +145,7 @@ final class TermVectorsConsumer extends TermsHashConsumer {
|
|||
}
|
||||
|
||||
void addFieldToFlush(TermVectorsConsumerPerField fieldToFlush) {
|
||||
if (perFields == null) {
|
||||
perFields = new TermVectorsConsumerPerField[1];
|
||||
} else if (numVectorFields == perFields.length) {
|
||||
if (numVectorFields == perFields.length) {
|
||||
int newSize = ArrayUtil.oversize(numVectorFields + 1, RamUsageEstimator.NUM_BYTES_OBJECT_REF);
|
||||
TermVectorsConsumerPerField[] newArray = new TermVectorsConsumerPerField[newSize];
|
||||
System.arraycopy(perFields, 0, newArray, 0, numVectorFields);
|
||||
|
@ -177,10 +171,7 @@ final class TermVectorsConsumer extends TermsHashConsumer {
|
|||
String lastVectorFieldName;
|
||||
final boolean vectorFieldsInOrder(FieldInfo fi) {
|
||||
try {
|
||||
if (lastVectorFieldName != null)
|
||||
return lastVectorFieldName.compareTo(fi.name) < 0;
|
||||
else
|
||||
return true;
|
||||
return lastVectorFieldName != null ? lastVectorFieldName.compareTo(fi.name) < 0 : true;
|
||||
} finally {
|
||||
lastVectorFieldName = fi.name;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue