LUCENE-5027: remove DocConsumer.doAfterFlush and other dead code

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1488311 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2013-05-31 16:59:33 +00:00
parent 7239a57a51
commit 850349987a
5 changed files with 4 additions and 21 deletions

View File

@ -24,5 +24,4 @@ abstract class DocConsumer {
abstract void finishDocument() throws IOException;
abstract void flush(final SegmentWriteState state) throws IOException;
abstract void abort();
abstract void doAfterFlush();
}

View File

@ -144,15 +144,6 @@ final class DocFieldProcessor extends DocConsumer {
return fields;
}
/** In flush we reset the fieldHash to not maintain per-field state
* across segments */
@Override
void doAfterFlush() {
fieldHash = new DocFieldProcessorPerField[2];
hashMask = 1;
totalFieldCount = 0;
}
private void rehash() {
final int newHashSize = (fieldHash.length*2);
assert newHashSize > fieldHash.length;

View File

@ -17,12 +17,7 @@ package org.apache.lucene.index;
* limitations under the License.
*/
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Counter;
import org.apache.lucene.util.RamUsageEstimator;
/**
@ -33,19 +28,16 @@ final class DocFieldProcessorPerField {
final DocFieldConsumerPerField consumer;
final FieldInfo fieldInfo;
private final Counter bytesUsed;
DocFieldProcessorPerField next;
int lastGen = -1;
int fieldCount;
IndexableField[] fields = new IndexableField[1];
private final Map<FieldInfo,String> dvFields = new HashMap<FieldInfo,String>();
public DocFieldProcessorPerField(final DocFieldProcessor docFieldProcessor, final FieldInfo fieldInfo) {
this.consumer = docFieldProcessor.consumer.addField(fieldInfo);
this.fieldInfo = fieldInfo;
this.bytesUsed = docFieldProcessor.bytesUsed;
}
public void addField(IndexableField field) {

View File

@ -428,7 +428,6 @@ class DocumentsWriterPerThread {
/** Reset after a flush */
private void doAfterFlush() {
segmentInfo = null;
consumer.doAfterFlush();
directory.getCreatedFiles().clear();
fieldInfos = new FieldInfos.Builder(fieldInfos.globalFieldNumbers);
parent.subtractFlushedNumDocs(numDocsInRAM);

View File

@ -139,8 +139,8 @@ final class TermVectorsConsumer extends TermsHashConsumer {
TermVectorsConsumerPerField[] perFields;
void reset() {
perFields = null; // don't hang onto stuff from previous doc
numVectorFields = 0;
perFields = new TermVectorsConsumerPerField[1];
}
@Override
@ -149,7 +149,9 @@ final class TermVectorsConsumer extends TermsHashConsumer {
}
void addFieldToFlush(TermVectorsConsumerPerField fieldToFlush) {
if (numVectorFields == perFields.length) {
if (perFields == null) {
perFields = new TermVectorsConsumerPerField[1];
} else if (numVectorFields == perFields.length) {
int newSize = ArrayUtil.oversize(numVectorFields + 1, RamUsageEstimator.NUM_BYTES_OBJECT_REF);
TermVectorsConsumerPerField[] newArray = new TermVectorsConsumerPerField[newSize];
System.arraycopy(perFields, 0, newArray, 0, numVectorFields);