LUCENE-3707: Add a Lucene3x private SegmentInfosFormat implementation

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1233380 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Simon Willnauer 2012-01-19 14:06:10 +00:00
parent fc0b7cfb59
commit 47f474735f
12 changed files with 405 additions and 121 deletions

View File

@ -30,9 +30,7 @@ import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.SegmentInfosFormat;
import org.apache.lucene.codecs.StoredFieldsFormat;
import org.apache.lucene.codecs.TermVectorsFormat;
import org.apache.lucene.codecs.lucene40.Lucene40SegmentInfosFormat;
import org.apache.lucene.codecs.lucene40.Lucene40StoredFieldsFormat;
import org.apache.lucene.codecs.lucene40.Lucene40TermVectorsFormat;
import org.apache.lucene.index.PerDocWriteState;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.index.SegmentReadState;
@ -55,12 +53,8 @@ public class Lucene3xCodec extends Codec {
private final FieldInfosFormat fieldInfosFormat = new Lucene3xFieldInfosFormat();
// TODO: this should really be a different impl
// also if we want preflex to *really* be read-only it should throw exception for the writer?
// this way IR.commit fails on delete/undelete/setNorm/etc ?
private final SegmentInfosFormat infosFormat = new Lucene40SegmentInfosFormat();
private final SegmentInfosFormat infosFormat = new Lucene3xSegmentInfosFormat();
// TODO: this should really be a different impl
private final NormsFormat normsFormat = new Lucene3xNormsFormat();
// 3.x doesn't support docvalues

View File

@ -27,8 +27,12 @@ import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.store.Directory;
/**
* Lucene3x ReadOnly FieldInfosFromat implementation
* @deprecated (4.0) This is only used to read indexes created
* before 4.0.
* @lucene.experimental
*/
@Deprecated
public class Lucene3xFieldInfosFormat extends FieldInfosFormat {
private final FieldInfosReader reader = new Lucene3xFieldInfosReader();
@ -39,7 +43,7 @@ public class Lucene3xFieldInfosFormat extends FieldInfosFormat {
@Override
public FieldInfosWriter getFieldInfosWriter() throws IOException {
throw new IllegalArgumentException("this codec can only be used for reading");
throw new UnsupportedOperationException("this codec can only be used for reading");
}
@Override

View File

@ -29,10 +29,12 @@ import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.store.Directory;
/**
* Read-Only Lucene 3.x Norms Format
*
* Lucene3x ReadOnly NormsFormat implementation
* @deprecated (4.0) This is only used to read indexes created
* before 4.0.
* @lucene.experimental
*/
@Deprecated
public class Lucene3xNormsFormat extends NormsFormat {
@ -49,7 +51,7 @@ public class Lucene3xNormsFormat extends NormsFormat {
@Override
public PerDocConsumer docsConsumer(PerDocWriteState state) throws IOException {
throw new IllegalArgumentException("this codec can only be used for reading");
throw new UnsupportedOperationException("this codec can only be used for reading");
}
@Override

View File

@ -57,7 +57,7 @@ public class Lucene3xPostingsFormat extends PostingsFormat {
@Override
public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
throw new IllegalArgumentException("this codec can only be used for reading");
throw new UnsupportedOperationException("this codec can only be used for reading");
}
@Override

View File

@ -0,0 +1,43 @@
package org.apache.lucene.codecs.lucene3x;
import org.apache.lucene.codecs.SegmentInfosFormat;
import org.apache.lucene.codecs.SegmentInfosReader;
import org.apache.lucene.codecs.SegmentInfosWriter;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Lucene3x ReadOnly SegmentInfosFormat implementation
* @deprecated (4.0) This is only used to read indexes created
* before 4.0.
* @lucene.experimental
*/
@Deprecated
public class Lucene3xSegmentInfosFormat extends SegmentInfosFormat {
private final SegmentInfosReader reader = new Lucene3xSegmentInfosReader();
@Override
public SegmentInfosReader getSegmentInfosReader() {
return reader;
}
@Override
public SegmentInfosWriter getSegmentInfosWriter() {
throw new UnsupportedOperationException("this codec can only be used for reading");
}
}

View File

@ -0,0 +1,170 @@
package org.apache.lucene.codecs.lucene3x;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.SegmentInfosReader;
// TODO we need a 3x version of stored fields reader
import org.apache.lucene.codecs.lucene40.Lucene40StoredFieldsReader;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexFormatTooOldException;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.CompoundFileDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
/**
* Lucene 3x implementation of {@link SegmentInfosReader}.
* @lucene.experimental
*/
public class Lucene3xSegmentInfosReader extends SegmentInfosReader {
@Override
public void read(Directory directory, String segmentsFileName, ChecksumIndexInput input, SegmentInfos infos, IOContext context) throws IOException {
infos.version = input.readLong(); // read version
infos.counter = input.readInt(); // read counter
final int format = infos.getFormat();
for (int i = input.readInt(); i > 0; i--) { // read segmentInfos
SegmentInfo si = readSegmentInfo(directory, format, input);
if (si.getVersion() == null) {
// Could be a 3.0 - try to open the doc stores - if it fails, it's a
// 2.x segment, and an IndexFormatTooOldException will be thrown,
// which is what we want.
Directory dir = directory;
if (si.getDocStoreOffset() != -1) {
if (si.getDocStoreIsCompoundFile()) {
dir = new CompoundFileDirectory(dir, IndexFileNames.segmentFileName(
si.getDocStoreSegment(), "",
IndexFileNames.COMPOUND_FILE_STORE_EXTENSION), context, false);
}
} else if (si.getUseCompoundFile()) {
dir = new CompoundFileDirectory(dir, IndexFileNames.segmentFileName(
si.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), context, false);
}
try {
Lucene40StoredFieldsReader.checkCodeVersion(dir, si.getDocStoreSegment());
} finally {
// If we opened the directory, close it
if (dir != directory) dir.close();
}
// Above call succeeded, so it's a 3.0 segment. Upgrade it so the next
// time the segment is read, its version won't be null and we won't
// need to open FieldsReader every time for each such segment.
si.setVersion("3.0");
} else if (si.getVersion().equals("2.x")) {
// If it's a 3x index touched by 3.1+ code, then segments record their
// version, whether they are 2.x ones or not. We detect that and throw
// appropriate exception.
throw new IndexFormatTooOldException("segment " + si.name + " in resource " + input, si.getVersion());
}
infos.add(si);
}
infos.userData = input.readStringStringMap();
}
// if we make a preflex impl we can remove a lot of this hair...
public SegmentInfo readSegmentInfo(Directory dir, int format, ChecksumIndexInput input) throws IOException {
final String version;
if (format <= SegmentInfos.FORMAT_3_1) {
version = input.readString();
} else {
version = null;
}
final String name = input.readString();
final int docCount = input.readInt();
final long delGen = input.readLong();
final int docStoreOffset = input.readInt();
final String docStoreSegment;
final boolean docStoreIsCompoundFile;
if (docStoreOffset != -1) {
docStoreSegment = input.readString();
docStoreIsCompoundFile = input.readByte() == SegmentInfo.YES;
} else {
docStoreSegment = name;
docStoreIsCompoundFile = false;
}
// pre-4.0 indexes write a byte if there is a single norms file
byte b = input.readByte();
assert 1 == b : "expected 1 but was: "+ b + " format: " + format;
final int numNormGen = input.readInt();
final Map<Integer,Long> normGen;
if (numNormGen == SegmentInfo.NO) {
normGen = null;
} else {
normGen = new HashMap<Integer, Long>();
for(int j=0;j<numNormGen;j++) {
normGen.put(j, input.readLong());
}
}
final boolean isCompoundFile = input.readByte() == SegmentInfo.YES;
final int delCount = input.readInt();
assert delCount <= docCount;
final int hasProx = input.readByte();
final Codec codec = Codec.forName("Lucene3x");
final Map<String,String> diagnostics = input.readStringStringMap();
final int hasVectors;
if (format <= SegmentInfos.FORMAT_HAS_VECTORS) {
hasVectors = input.readByte();
} else {
final String storesSegment;
final String ext;
final boolean storeIsCompoundFile;
if (docStoreOffset != -1) {
storesSegment = docStoreSegment;
storeIsCompoundFile = docStoreIsCompoundFile;
ext = IndexFileNames.COMPOUND_FILE_STORE_EXTENSION;
} else {
storesSegment = name;
storeIsCompoundFile = isCompoundFile;
ext = IndexFileNames.COMPOUND_FILE_EXTENSION;
}
final Directory dirToTest;
if (storeIsCompoundFile) {
dirToTest = new CompoundFileDirectory(dir, IndexFileNames.segmentFileName(storesSegment, "", ext), IOContext.READONCE, false);
} else {
dirToTest = dir;
}
try {
hasVectors = dirToTest.fileExists(IndexFileNames.segmentFileName(storesSegment, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION)) ? SegmentInfo.YES : SegmentInfo.NO;
} finally {
if (isCompoundFile) {
dirToTest.close();
}
}
}
return new SegmentInfo(dir, version, name, docCount, delGen, docStoreOffset,
docStoreSegment, docStoreIsCompoundFile, normGen, isCompoundFile,
delCount, hasProx, codec, diagnostics, hasVectors);
}
}

View File

@ -28,6 +28,13 @@ import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
/**
* Lucene3x ReadOnly TermVectorsFormat implementation
* @deprecated (4.0) This is only used to read indexes created
* before 4.0.
* @lucene.experimental
*/
@Deprecated
public class Lucene3xTermVectorsFormat extends TermVectorsFormat {
@Override
@ -37,8 +44,7 @@ public class Lucene3xTermVectorsFormat extends TermVectorsFormat {
@Override
public TermVectorsWriter vectorsWriter(Directory directory, String segment, IOContext context) throws IOException {
// TODO all these IAEs in preflex should be UOEs?
throw new IllegalArgumentException("this codec can only be used for reading");
throw new UnsupportedOperationException("this codec can only be used for reading");
}
@Override

View File

@ -23,13 +23,10 @@ import java.util.Map;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.SegmentInfosReader;
import org.apache.lucene.codecs.lucene40.Lucene40TermVectorsReader;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexFormatTooOldException;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.CompoundFileDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
@ -39,88 +36,37 @@ import org.apache.lucene.store.IOContext;
*/
public class Lucene40SegmentInfosReader extends SegmentInfosReader {
// TODO: shove all backwards code to preflex!
// this is a little tricky, because of IR.commit(), two options:
// 1. PreFlex writes 4.x SIS format, but reads both 3.x and 4.x
// (and maybe RW always only writes the 3.x one? for that to work well,
// we have to move .fnx file to codec too, not too bad but more work).
// or we just have crappier RW testing like today.
// 2. PreFlex writes 3.x SIS format, and only reads 3.x
// (in this case we have to move .fnx file to codec as well)
@Override
public void read(Directory directory, String segmentsFileName, ChecksumIndexInput input, SegmentInfos infos, IOContext context) throws IOException {
infos.version = input.readLong(); // read version
infos.counter = input.readInt(); // read counter
final int format = infos.getFormat();
assert format <= SegmentInfos.FORMAT_4_0;
for (int i = input.readInt(); i > 0; i--) { // read segmentInfos
SegmentInfo si = readSegmentInfo(directory, format, input);
if (si.getVersion() == null) {
// Could be a 3.0 - try to open the doc stores - if it fails, it's a
// 2.x segment, and an IndexFormatTooOldException will be thrown,
// which is what we want.
Directory dir = directory;
if (si.getDocStoreOffset() != -1) {
if (si.getDocStoreIsCompoundFile()) {
dir = new CompoundFileDirectory(dir, IndexFileNames.segmentFileName(
si.getDocStoreSegment(), "",
IndexFileNames.COMPOUND_FILE_STORE_EXTENSION), context, false);
}
} else if (si.getUseCompoundFile()) {
dir = new CompoundFileDirectory(dir, IndexFileNames.segmentFileName(
si.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), context, false);
}
try {
Lucene40StoredFieldsReader.checkCodeVersion(dir, si.getDocStoreSegment());
} finally {
// If we opened the directory, close it
if (dir != directory) dir.close();
}
// Above call succeeded, so it's a 3.0 segment. Upgrade it so the next
// time the segment is read, its version won't be null and we won't
// need to open FieldsReader every time for each such segment.
si.setVersion("3.0");
} else if (si.getVersion().equals("2.x")) {
// If it's a 3x index touched by 3.1+ code, then segments record their
// version, whether they are 2.x ones or not. We detect that and throw
// appropriate exception.
throw new IndexFormatTooOldException("segment " + si.name + " in resource " + input, si.getVersion());
}
assert si.getVersion() != null;
infos.add(si);
}
infos.userData = input.readStringStringMap();
}
// if we make a preflex impl we can remove a lot of this hair...
public SegmentInfo readSegmentInfo(Directory dir, int format, ChecksumIndexInput input) throws IOException {
final String version;
if (format <= SegmentInfos.FORMAT_3_1) {
version = input.readString();
} else {
version = null;
}
final String version = input.readString();
final String name = input.readString();
final int docCount = input.readInt();
final long delGen = input.readLong();
// this is still written in 4.0 if we open a 3.x and upgrade the SI
final int docStoreOffset = input.readInt();
final String docStoreSegment;
final boolean docStoreIsCompoundFile;
if (docStoreOffset != -1) {
if (docStoreOffset != -1) {
docStoreSegment = input.readString();
docStoreIsCompoundFile = input.readByte() == SegmentInfo.YES;
} else {
docStoreSegment = name;
docStoreIsCompoundFile = false;
}
if (format > SegmentInfos.FORMAT_4_0) {
// pre-4.0 indexes write a byte if there is a single norms file
byte b = input.readByte();
assert 1 == b;
}
final int numNormGen = input.readInt();
final Map<Integer,Long> normGen;
if (numNormGen == SegmentInfo.NO) {
@ -128,61 +74,17 @@ public class Lucene40SegmentInfosReader extends SegmentInfosReader {
} else {
normGen = new HashMap<Integer, Long>();
for(int j=0;j<numNormGen;j++) {
int fieldNumber = j;
if (format <= SegmentInfos.FORMAT_4_0) {
fieldNumber = input.readInt();
}
normGen.put(fieldNumber, input.readLong());
normGen.put(input.readInt(), input.readLong());
}
}
final boolean isCompoundFile = input.readByte() == SegmentInfo.YES;
final int delCount = input.readInt();
assert delCount <= docCount;
final int hasProx = input.readByte();
final Codec codec;
// note: if the codec is not available: Codec.forName will throw an exception.
if (format <= SegmentInfos.FORMAT_4_0) {
codec = Codec.forName(input.readString());
} else {
codec = Codec.forName("Lucene3x");
}
final Codec codec = Codec.forName(input.readString());
final Map<String,String> diagnostics = input.readStringStringMap();
final int hasVectors;
if (format <= SegmentInfos.FORMAT_HAS_VECTORS) {
hasVectors = input.readByte();
} else {
final String storesSegment;
final String ext;
final boolean storeIsCompoundFile;
if (docStoreOffset != -1) {
storesSegment = docStoreSegment;
storeIsCompoundFile = docStoreIsCompoundFile;
ext = IndexFileNames.COMPOUND_FILE_STORE_EXTENSION;
} else {
storesSegment = name;
storeIsCompoundFile = isCompoundFile;
ext = IndexFileNames.COMPOUND_FILE_EXTENSION;
}
final Directory dirToTest;
if (storeIsCompoundFile) {
dirToTest = new CompoundFileDirectory(dir, IndexFileNames.segmentFileName(storesSegment, "", ext), IOContext.READONCE, false);
} else {
dirToTest = dir;
}
try {
// TODO: remove this manual file check or push to preflex codec
hasVectors = dirToTest.fileExists(IndexFileNames.segmentFileName(storesSegment, "", Lucene40TermVectorsReader.VECTORS_INDEX_EXTENSION)) ? SegmentInfo.YES : SegmentInfo.NO;
} finally {
if (isCompoundFile) {
dirToTest.close();
}
}
}
final int hasVectors = input.readByte();
return new SegmentInfo(dir, version, name, docCount, delGen, docStoreOffset,
docStoreSegment, docStoreIsCompoundFile, normGen, isCompoundFile,

View File

@ -43,6 +43,10 @@ public class Lucene40SegmentInfosWriter extends SegmentInfosWriter {
IndexOutput out = createOutput(dir, segmentFileName, new IOContext(new FlushInfo(infos.size(), infos.totalDocCount())));
boolean success = false;
try {
/*
* TODO its not ideal that we write the format and the codecID inside the
* codec private classes but we read it in SegmentInfos.
*/
out.writeInt(SegmentInfos.FORMAT_CURRENT); // write FORMAT
out.writeString(codecID); // write codecID
out.writeLong(infos.version);
@ -69,7 +73,7 @@ public class Lucene40SegmentInfosWriter extends SegmentInfosWriter {
output.writeString(si.name);
output.writeInt(si.docCount);
output.writeLong(si.getDelGen());
// we still need to write this in 4.0 since we can open a 3.x with shared docStores
output.writeInt(si.getDocStoreOffset());
if (si.getDocStoreOffset() != -1) {
output.writeString(si.getDocStoreSegment());

View File

@ -20,6 +20,7 @@ package org.apache.lucene.codecs.preflexrw;
import org.apache.lucene.codecs.FieldInfosFormat;
import org.apache.lucene.codecs.NormsFormat;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.SegmentInfosFormat;
import org.apache.lucene.codecs.TermVectorsFormat;
import org.apache.lucene.codecs.lucene3x.Lucene3xCodec;
import org.apache.lucene.util.LuceneTestCase;
@ -33,6 +34,7 @@ public class PreFlexRWCodec extends Lucene3xCodec {
private final NormsFormat norms = new PreFlexRWNormsFormat();
private final FieldInfosFormat fieldInfos = new PreFlexRWFieldInfosFormat();
private final TermVectorsFormat termVectors = new PreFlexRWTermVectorsFormat();
private final SegmentInfosFormat segmentInfos = new PreFlexRWSegmentInfosFormat();
@Override
public PostingsFormat postingsFormat() {
@ -52,6 +54,15 @@ public class PreFlexRWCodec extends Lucene3xCodec {
}
}
@Override
public SegmentInfosFormat segmentInfosFormat() {
if (LuceneTestCase.PREFLEX_IMPERSONATION_IS_ACTIVE) {
return segmentInfos ;
} else {
return super.segmentInfosFormat();
}
}
@Override
public FieldInfosFormat fieldInfosFormat() {
if (LuceneTestCase.PREFLEX_IMPERSONATION_IS_ACTIVE) {

View File

@ -0,0 +1,33 @@
package org.apache.lucene.codecs.preflexrw;
import org.apache.lucene.codecs.SegmentInfosWriter;
import org.apache.lucene.codecs.lucene3x.Lucene3xSegmentInfosFormat;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @lucene.experimental
*/
public class PreFlexRWSegmentInfosFormat extends Lucene3xSegmentInfosFormat {
private final SegmentInfosWriter writer = new PreFlexRWSegmentInfosWriter();
@Override
public SegmentInfosWriter getSegmentInfosWriter() {
return writer;
}
}

View File

@ -0,0 +1,115 @@
package org.apache.lucene.codecs.preflexrw;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.lucene.codecs.SegmentInfosWriter;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.store.ChecksumIndexOutput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FlushInfo;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.IOUtils;
/**
* PreFlex implementation of {@link SegmentInfosWriter}.
* @lucene.experimental
*/
public class PreFlexRWSegmentInfosWriter extends SegmentInfosWriter {
@Override
public IndexOutput writeInfos(Directory dir, String segmentFileName, String codecID, SegmentInfos infos, IOContext context)
throws IOException {
IndexOutput out = createOutput(dir, segmentFileName, new IOContext(new FlushInfo(infos.size(), infos.totalDocCount())));
boolean success = false;
try {
out.writeInt(SegmentInfos.FORMAT_3_1); // write FORMAT
// we don't write a codec - this is 3.x
out.writeLong(infos.version);
out.writeInt(infos.counter); // write counter
out.writeInt(infos.size()); // write infos
for (SegmentInfo si : infos) {
writeInfo(out, si);
}
out.writeStringStringMap(infos.getUserData());
success = true;
return out;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(out);
}
}
}
/** Save a single segment's info. */
private void writeInfo(IndexOutput output, SegmentInfo si) throws IOException {
assert si.getDelCount() <= si.docCount: "delCount=" + si.getDelCount() + " docCount=" + si.docCount + " segment=" + si.name;
// Write the Lucene version that created this segment, since 3.1
output.writeString(si.getVersion());
output.writeString(si.name);
output.writeInt(si.docCount);
output.writeLong(si.getDelGen());
output.writeInt(si.getDocStoreOffset());
if (si.getDocStoreOffset() != -1) {
output.writeString(si.getDocStoreSegment());
output.writeByte((byte) (si.getDocStoreIsCompoundFile() ? 1:0));
}
// pre-4.0 indexes write a byte if there is a single norms file
output.writeByte((byte) 1);
Map<Integer,Long> normGen = si.getNormGen();
if (normGen == null) {
output.writeInt(SegmentInfo.NO);
} else {
output.writeInt(normGen.size());
for (Entry<Integer,Long> entry : normGen.entrySet()) {
output.writeLong(entry.getValue());
}
}
output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO));
output.writeInt(si.getDelCount());
output.writeByte((byte) (si.getHasProxInternal()));
output.writeStringStringMap(si.getDiagnostics());
output.writeByte((byte) (si.getHasVectorsInternal()));
}
protected IndexOutput createOutput(Directory dir, String segmentFileName, IOContext context)
throws IOException {
IndexOutput plainOut = dir.createOutput(segmentFileName, context);
ChecksumIndexOutput out = new ChecksumIndexOutput(plainOut);
return out;
}
@Override
public void prepareCommit(IndexOutput segmentOutput) throws IOException {
((ChecksumIndexOutput)segmentOutput).prepareCommit();
}
@Override
public void finishCommit(IndexOutput out) throws IOException {
((ChecksumIndexOutput)out).finishCommit();
out.close();
}
}