mirror of https://github.com/apache/lucene.git
LUCENE-5216: Fix SegmentInfo.attributes when updates are involved
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1527391 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
c534c0b3b3
commit
f6ae77fc1e
|
@ -17,8 +17,16 @@ package org.apache.lucene.codecs.simpletext;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import static org.apache.lucene.codecs.simpletext.SimpleTextSegmentInfoWriter.SI_DIAG_KEY;
|
||||
import static org.apache.lucene.codecs.simpletext.SimpleTextSegmentInfoWriter.SI_DIAG_VALUE;
|
||||
import static org.apache.lucene.codecs.simpletext.SimpleTextSegmentInfoWriter.SI_DOCCOUNT;
|
||||
import static org.apache.lucene.codecs.simpletext.SimpleTextSegmentInfoWriter.SI_FILE;
|
||||
import static org.apache.lucene.codecs.simpletext.SimpleTextSegmentInfoWriter.SI_NUM_DIAG;
|
||||
import static org.apache.lucene.codecs.simpletext.SimpleTextSegmentInfoWriter.SI_NUM_FILES;
|
||||
import static org.apache.lucene.codecs.simpletext.SimpleTextSegmentInfoWriter.SI_USECOMPOUND;
|
||||
import static org.apache.lucene.codecs.simpletext.SimpleTextSegmentInfoWriter.SI_VERSION;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
|
@ -34,8 +42,6 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
|
||||
import static org.apache.lucene.codecs.simpletext.SimpleTextSegmentInfoWriter.*;
|
||||
|
||||
/**
|
||||
* reads plaintext segments files
|
||||
* <p>
|
||||
|
@ -79,22 +85,6 @@ public class SimpleTextSegmentInfoReader extends SegmentInfoReader {
|
|||
diagnostics.put(key, value);
|
||||
}
|
||||
|
||||
SimpleTextUtil.readLine(input, scratch);
|
||||
assert StringHelper.startsWith(scratch, SI_NUM_ATTS);
|
||||
int numAtts = Integer.parseInt(readString(SI_NUM_ATTS.length, scratch));
|
||||
Map<String,String> attributes = new HashMap<String,String>();
|
||||
|
||||
for (int i = 0; i < numAtts; i++) {
|
||||
SimpleTextUtil.readLine(input, scratch);
|
||||
assert StringHelper.startsWith(scratch, SI_ATT_KEY);
|
||||
String key = readString(SI_ATT_KEY.length, scratch);
|
||||
|
||||
SimpleTextUtil.readLine(input, scratch);
|
||||
assert StringHelper.startsWith(scratch, SI_ATT_VALUE);
|
||||
String value = readString(SI_ATT_VALUE.length, scratch);
|
||||
attributes.put(key, value);
|
||||
}
|
||||
|
||||
SimpleTextUtil.readLine(input, scratch);
|
||||
assert StringHelper.startsWith(scratch, SI_NUM_FILES);
|
||||
int numFiles = Integer.parseInt(readString(SI_NUM_FILES.length, scratch));
|
||||
|
@ -108,7 +98,7 @@ public class SimpleTextSegmentInfoReader extends SegmentInfoReader {
|
|||
}
|
||||
|
||||
SegmentInfo info = new SegmentInfo(directory, version, segmentName, docCount,
|
||||
isCompoundFile, null, diagnostics, Collections.unmodifiableMap(attributes));
|
||||
isCompoundFile, null, diagnostics);
|
||||
info.setFiles(files);
|
||||
success = true;
|
||||
return info;
|
||||
|
|
|
@ -45,9 +45,6 @@ public class SimpleTextSegmentInfoWriter extends SegmentInfoWriter {
|
|||
final static BytesRef SI_NUM_DIAG = new BytesRef(" diagnostics ");
|
||||
final static BytesRef SI_DIAG_KEY = new BytesRef(" key ");
|
||||
final static BytesRef SI_DIAG_VALUE = new BytesRef(" value ");
|
||||
final static BytesRef SI_NUM_ATTS = new BytesRef(" attributes ");
|
||||
final static BytesRef SI_ATT_KEY = new BytesRef(" key ");
|
||||
final static BytesRef SI_ATT_VALUE = new BytesRef(" value ");
|
||||
final static BytesRef SI_NUM_FILES = new BytesRef(" files ");
|
||||
final static BytesRef SI_FILE = new BytesRef(" file ");
|
||||
|
||||
|
@ -93,24 +90,6 @@ public class SimpleTextSegmentInfoWriter extends SegmentInfoWriter {
|
|||
}
|
||||
}
|
||||
|
||||
Map<String,String> atts = si.attributes();
|
||||
int numAtts = atts == null ? 0 : atts.size();
|
||||
SimpleTextUtil.write(output, SI_NUM_ATTS);
|
||||
SimpleTextUtil.write(output, Integer.toString(numAtts), scratch);
|
||||
SimpleTextUtil.writeNewline(output);
|
||||
|
||||
if (numAtts > 0) {
|
||||
for (Map.Entry<String,String> entry : atts.entrySet()) {
|
||||
SimpleTextUtil.write(output, SI_ATT_KEY);
|
||||
SimpleTextUtil.write(output, entry.getKey(), scratch);
|
||||
SimpleTextUtil.writeNewline(output);
|
||||
|
||||
SimpleTextUtil.write(output, SI_ATT_VALUE);
|
||||
SimpleTextUtil.write(output, entry.getValue(), scratch);
|
||||
SimpleTextUtil.writeNewline(output);
|
||||
}
|
||||
}
|
||||
|
||||
Set<String> files = si.files();
|
||||
int numFiles = files == null ? 0 : files.size();
|
||||
SimpleTextUtil.write(output, SI_NUM_FILES);
|
||||
|
|
|
@ -82,7 +82,7 @@ public class Lucene40Codec extends Codec {
|
|||
}
|
||||
|
||||
@Override
|
||||
public final SegmentInfoFormat segmentInfoFormat() {
|
||||
public SegmentInfoFormat segmentInfoFormat() {
|
||||
return infosFormat;
|
||||
}
|
||||
|
||||
|
|
|
@ -67,10 +67,11 @@ import org.apache.lucene.store.DataOutput; // javadocs
|
|||
*
|
||||
* @see SegmentInfos
|
||||
* @lucene.experimental
|
||||
* @deprecated Only for reading old 4.0-4.5 segments
|
||||
*/
|
||||
@Deprecated
|
||||
public class Lucene40SegmentInfoFormat extends SegmentInfoFormat {
|
||||
private final SegmentInfoReader reader = new Lucene40SegmentInfoReader();
|
||||
private final SegmentInfoWriter writer = new Lucene40SegmentInfoWriter();
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene40SegmentInfoFormat() {
|
||||
|
@ -83,7 +84,7 @@ public class Lucene40SegmentInfoFormat extends SegmentInfoFormat {
|
|||
|
||||
@Override
|
||||
public SegmentInfoWriter getSegmentInfoWriter() {
|
||||
return writer;
|
||||
throw new UnsupportedOperationException("this codec can only be used for reading");
|
||||
}
|
||||
|
||||
/** File extension used to store {@link SegmentInfo}. */
|
||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.lucene.codecs.lucene40;
|
|||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -37,7 +36,9 @@ import org.apache.lucene.util.IOUtils;
|
|||
*
|
||||
* @see Lucene40SegmentInfoFormat
|
||||
* @lucene.experimental
|
||||
* @deprecated Only for reading old 4.0-4.5 segments
|
||||
*/
|
||||
@Deprecated
|
||||
public class Lucene40SegmentInfoReader extends SegmentInfoReader {
|
||||
|
||||
/** Sole constructor. */
|
||||
|
@ -60,15 +61,14 @@ public class Lucene40SegmentInfoReader extends SegmentInfoReader {
|
|||
}
|
||||
final boolean isCompoundFile = input.readByte() == SegmentInfo.YES;
|
||||
final Map<String,String> diagnostics = input.readStringStringMap();
|
||||
final Map<String,String> attributes = input.readStringStringMap();
|
||||
input.readStringStringMap(); // read deprecated attributes
|
||||
final Set<String> files = input.readStringSet();
|
||||
|
||||
if (input.getFilePointer() != input.length()) {
|
||||
throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.getFilePointer() + " vs size " + input.length() + " (resource: " + input + ")");
|
||||
}
|
||||
|
||||
final SegmentInfo si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile,
|
||||
null, diagnostics, Collections.unmodifiableMap(attributes));
|
||||
final SegmentInfo si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics);
|
||||
si.setFiles(files);
|
||||
|
||||
success = true;
|
||||
|
|
|
@ -101,7 +101,7 @@ public class Lucene41Codec extends Codec {
|
|||
}
|
||||
|
||||
@Override
|
||||
public final SegmentInfoFormat segmentInfoFormat() {
|
||||
public SegmentInfoFormat segmentInfoFormat() {
|
||||
return infosFormat;
|
||||
}
|
||||
|
||||
|
|
|
@ -100,7 +100,7 @@ public class Lucene42Codec extends Codec {
|
|||
}
|
||||
|
||||
@Override
|
||||
public final SegmentInfoFormat segmentInfoFormat() {
|
||||
public SegmentInfoFormat segmentInfoFormat() {
|
||||
return infosFormat;
|
||||
}
|
||||
|
||||
|
|
|
@ -99,7 +99,7 @@ public class Lucene45Codec extends Codec {
|
|||
}
|
||||
|
||||
@Override
|
||||
public final SegmentInfoFormat segmentInfoFormat() {
|
||||
public SegmentInfoFormat segmentInfoFormat() {
|
||||
return infosFormat;
|
||||
}
|
||||
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.apache.lucene.codecs.SegmentInfoFormat;
|
|||
import org.apache.lucene.codecs.StoredFieldsFormat;
|
||||
import org.apache.lucene.codecs.TermVectorsFormat;
|
||||
import org.apache.lucene.codecs.lucene40.Lucene40LiveDocsFormat;
|
||||
import org.apache.lucene.codecs.lucene40.Lucene40SegmentInfoFormat;
|
||||
import org.apache.lucene.codecs.lucene41.Lucene41StoredFieldsFormat;
|
||||
import org.apache.lucene.codecs.lucene42.Lucene42NormsFormat;
|
||||
import org.apache.lucene.codecs.lucene42.Lucene42TermVectorsFormat;
|
||||
|
@ -52,7 +51,7 @@ public class Lucene46Codec extends Codec {
|
|||
private final StoredFieldsFormat fieldsFormat = new Lucene41StoredFieldsFormat();
|
||||
private final TermVectorsFormat vectorsFormat = new Lucene42TermVectorsFormat();
|
||||
private final FieldInfosFormat fieldInfosFormat = new Lucene46FieldInfosFormat();
|
||||
private final SegmentInfoFormat infosFormat = new Lucene40SegmentInfoFormat();
|
||||
private final SegmentInfoFormat segmentInfosFormat = new Lucene46SegmentInfoFormat();
|
||||
private final LiveDocsFormat liveDocsFormat = new Lucene40LiveDocsFormat();
|
||||
|
||||
private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() {
|
||||
|
@ -96,7 +95,7 @@ public class Lucene46Codec extends Codec {
|
|||
|
||||
@Override
|
||||
public final SegmentInfoFormat segmentInfoFormat() {
|
||||
return infosFormat;
|
||||
return segmentInfosFormat;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -0,0 +1,93 @@
|
|||
package org.apache.lucene.codecs.lucene46;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.codecs.SegmentInfoFormat;
|
||||
import org.apache.lucene.codecs.SegmentInfoReader;
|
||||
import org.apache.lucene.codecs.SegmentInfoWriter;
|
||||
import org.apache.lucene.index.IndexWriter; // javadocs
|
||||
import org.apache.lucene.index.SegmentInfo; // javadocs
|
||||
import org.apache.lucene.index.SegmentInfos; // javadocs
|
||||
import org.apache.lucene.store.DataOutput; // javadocs
|
||||
|
||||
/**
|
||||
* Lucene 4.6 Segment info format.
|
||||
* <p>
|
||||
* Files:
|
||||
* <ul>
|
||||
* <li><tt>.si</tt>: Header, SegVersion, SegSize, IsCompoundFile, Diagnostics, Files
|
||||
* </ul>
|
||||
* </p>
|
||||
* Data types:
|
||||
* <p>
|
||||
* <ul>
|
||||
* <li>Header --> {@link CodecUtil#writeHeader CodecHeader}</li>
|
||||
* <li>SegSize --> {@link DataOutput#writeInt Int32}</li>
|
||||
* <li>SegVersion --> {@link DataOutput#writeString String}</li>
|
||||
* <li>Files --> {@link DataOutput#writeStringSet Set<String>}</li>
|
||||
* <li>Diagnostics --> {@link DataOutput#writeStringStringMap Map<String,String>}</li>
|
||||
* <li>IsCompoundFile --> {@link DataOutput#writeByte Int8}</li>
|
||||
* </ul>
|
||||
* </p>
|
||||
* Field Descriptions:
|
||||
* <p>
|
||||
* <ul>
|
||||
* <li>SegVersion is the code version that created the segment.</li>
|
||||
* <li>SegSize is the number of documents contained in the segment index.</li>
|
||||
* <li>IsCompoundFile records whether the segment is written as a compound file or
|
||||
* not. If this is -1, the segment is not a compound file. If it is 1, the segment
|
||||
* is a compound file.</li>
|
||||
* <li>Checksum contains the CRC32 checksum of all bytes in the segments_N file up
|
||||
* until the checksum. This is used to verify integrity of the file on opening the
|
||||
* index.</li>
|
||||
* <li>The Diagnostics Map is privately written by {@link IndexWriter}, as a debugging aid,
|
||||
* for each segment it creates. It includes metadata like the current Lucene
|
||||
* version, OS, Java version, why the segment was created (merge, flush,
|
||||
* addIndexes), etc.</li>
|
||||
* <li>Files is a list of files referred to by this segment.</li>
|
||||
* </ul>
|
||||
* </p>
|
||||
*
|
||||
* @see SegmentInfos
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class Lucene46SegmentInfoFormat extends SegmentInfoFormat {
|
||||
private final SegmentInfoReader reader = new Lucene46SegmentInfoReader();
|
||||
private final SegmentInfoWriter writer = new Lucene46SegmentInfoWriter();
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene46SegmentInfoFormat() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public SegmentInfoReader getSegmentInfoReader() {
|
||||
return reader;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SegmentInfoWriter getSegmentInfoWriter() {
|
||||
return writer;
|
||||
}
|
||||
|
||||
/** File extension used to store {@link SegmentInfo}. */
|
||||
public final static String SI_EXTENSION = "si";
|
||||
static final String CODEC_NAME = "Lucene46SegmentInfo";
|
||||
static final int VERSION_START = 0;
|
||||
static final int VERSION_CURRENT = VERSION_START;
|
||||
}
|
|
@ -0,0 +1,83 @@
|
|||
package org.apache.lucene.codecs.lucene46;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.codecs.SegmentInfoReader;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.IndexFileNames;
|
||||
import org.apache.lucene.index.SegmentInfo;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
||||
/**
|
||||
* Lucene 4.6 implementation of {@link SegmentInfoReader}.
|
||||
*
|
||||
* @see Lucene46SegmentInfoFormat
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class Lucene46SegmentInfoReader extends SegmentInfoReader {
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene46SegmentInfoReader() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public SegmentInfo read(Directory dir, String segment, IOContext context) throws IOException {
|
||||
final String fileName = IndexFileNames.segmentFileName(segment, "", Lucene46SegmentInfoFormat.SI_EXTENSION);
|
||||
final IndexInput input = dir.openInput(fileName, context);
|
||||
boolean success = false;
|
||||
try {
|
||||
CodecUtil.checkHeader(input, Lucene46SegmentInfoFormat.CODEC_NAME,
|
||||
Lucene46SegmentInfoFormat.VERSION_START,
|
||||
Lucene46SegmentInfoFormat.VERSION_CURRENT);
|
||||
final String version = input.readString();
|
||||
final int docCount = input.readInt();
|
||||
if (docCount < 0) {
|
||||
throw new CorruptIndexException("invalid docCount: " + docCount + " (resource=" + input + ")");
|
||||
}
|
||||
final boolean isCompoundFile = input.readByte() == SegmentInfo.YES;
|
||||
final Map<String,String> diagnostics = input.readStringStringMap();
|
||||
final Set<String> files = input.readStringSet();
|
||||
|
||||
if (input.getFilePointer() != input.length()) {
|
||||
throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.getFilePointer() + " vs size " + input.length() + " (resource: " + input + ")");
|
||||
}
|
||||
|
||||
final SegmentInfo si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics);
|
||||
si.setFiles(files);
|
||||
|
||||
success = true;
|
||||
|
||||
return si;
|
||||
|
||||
} finally {
|
||||
if (!success) {
|
||||
IOUtils.closeWhileHandlingException(input);
|
||||
} else {
|
||||
input.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,73 @@
|
|||
package org.apache.lucene.codecs.lucene46;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.codecs.SegmentInfoWriter;
|
||||
import org.apache.lucene.index.FieldInfos;
|
||||
import org.apache.lucene.index.IndexFileNames;
|
||||
import org.apache.lucene.index.SegmentInfo;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
||||
/**
|
||||
* Lucene 4.0 implementation of {@link SegmentInfoWriter}.
|
||||
*
|
||||
* @see Lucene46SegmentInfoFormat
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class Lucene46SegmentInfoWriter extends SegmentInfoWriter {
|
||||
|
||||
/** Sole constructor. */
|
||||
public Lucene46SegmentInfoWriter() {
|
||||
}
|
||||
|
||||
/** Save a single segment's info. */
|
||||
@Override
|
||||
public void write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext) throws IOException {
|
||||
final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene46SegmentInfoFormat.SI_EXTENSION);
|
||||
si.addFile(fileName);
|
||||
|
||||
final IndexOutput output = dir.createOutput(fileName, ioContext);
|
||||
|
||||
boolean success = false;
|
||||
try {
|
||||
CodecUtil.writeHeader(output, Lucene46SegmentInfoFormat.CODEC_NAME, Lucene46SegmentInfoFormat.VERSION_CURRENT);
|
||||
// Write the Lucene version that created this segment, since 3.1
|
||||
output.writeString(si.getVersion());
|
||||
output.writeInt(si.getDocCount());
|
||||
|
||||
output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO));
|
||||
output.writeStringStringMap(si.getDiagnostics());
|
||||
output.writeStringSet(si.files());
|
||||
|
||||
success = true;
|
||||
} finally {
|
||||
if (!success) {
|
||||
IOUtils.closeWhileHandlingException(output);
|
||||
si.dir.deleteFile(fileName);
|
||||
} else {
|
||||
output.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -138,7 +138,7 @@ its numbering.</p>
|
|||
<p>Each segment index maintains the following:</p>
|
||||
<ul>
|
||||
<li>
|
||||
{@link org.apache.lucene.codecs.lucene40.Lucene40SegmentInfoFormat Segment info}.
|
||||
{@link org.apache.lucene.codecs.lucene46.Lucene46SegmentInfoFormat Segment info}.
|
||||
This contains metadata about a segment, such as the number of documents,
|
||||
what files it uses,
|
||||
</li>
|
||||
|
|
|
@ -522,11 +522,6 @@ public class CheckIndex {
|
|||
msg(infoStream, " diagnostics = " + diagnostics);
|
||||
}
|
||||
|
||||
Map<String,String> atts = info.info.attributes();
|
||||
if (atts != null && !atts.isEmpty()) {
|
||||
msg(infoStream, " attributes = " + atts);
|
||||
}
|
||||
|
||||
if (!info.hasDeletions()) {
|
||||
msg(infoStream, " no deletions");
|
||||
segInfoStat.hasDeletions = false;
|
||||
|
|
|
@ -210,8 +210,7 @@ class DocumentsWriterPerThread {
|
|||
pendingDeletes.clear();
|
||||
deleteSlice = deleteQueue.newSlice();
|
||||
|
||||
segmentInfo = new SegmentInfo(directoryOrig, Constants.LUCENE_MAIN_VERSION, segmentName, -1,
|
||||
false, codec, null, null);
|
||||
segmentInfo = new SegmentInfo(directoryOrig, Constants.LUCENE_MAIN_VERSION, segmentName, -1, false, codec, null);
|
||||
assert numDocsInRAM == 0;
|
||||
if (INFO_VERBOSE && infoStream.isEnabled("DWPT")) {
|
||||
infoStream.message("DWPT", Thread.currentThread().getName() + " init seg=" + segmentName + " delQueue=" + deleteQueue);
|
||||
|
|
|
@ -2513,7 +2513,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit{
|
|||
TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(directory);
|
||||
|
||||
SegmentInfo info = new SegmentInfo(directory, Constants.LUCENE_MAIN_VERSION, mergedName, -1,
|
||||
false, codec, null, null);
|
||||
false, codec, null);
|
||||
|
||||
SegmentMerger merger = new SegmentMerger(mergeReaders, info, infoStream, trackingDir,
|
||||
MergeState.CheckAbort.NONE, globalFieldNumberMap, context);
|
||||
|
@ -2608,20 +2608,11 @@ public class IndexWriter implements Closeable, TwoPhaseCommit{
|
|||
// so we don't pass a null value to the si writer
|
||||
FieldInfos fis = SegmentReader.readFieldInfos(info);
|
||||
|
||||
final Map<String,String> attributes;
|
||||
// copy the attributes map, we might modify it below.
|
||||
// also we need to ensure its read-write, since we will invoke the SIwriter (which might want to set something).
|
||||
if (info.info.attributes() == null) {
|
||||
attributes = new HashMap<String,String>();
|
||||
} else {
|
||||
attributes = new HashMap<String,String>(info.info.attributes());
|
||||
}
|
||||
|
||||
//System.out.println("copy seg=" + info.info.name + " version=" + info.info.getVersion());
|
||||
// Same SI as before but we change directory and name
|
||||
SegmentInfo newInfo = new SegmentInfo(directory, info.info.getVersion(), segName, info.info.getDocCount(),
|
||||
info.info.getUseCompoundFile(),
|
||||
info.info.getCodec(), info.info.getDiagnostics(), attributes);
|
||||
info.info.getUseCompoundFile(), info.info.getCodec(),
|
||||
info.info.getDiagnostics());
|
||||
SegmentInfoPerCommit newInfoPerCommit = new SegmentInfoPerCommit(newInfo, info.getDelCount(), info.getDelGen(), info.getFieldInfosGen());
|
||||
|
||||
Set<String> segFiles = new HashSet<String>();
|
||||
|
@ -3652,7 +3643,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit{
|
|||
// ConcurrentMergePolicy we keep deterministic segment
|
||||
// names.
|
||||
final String mergeSegmentName = newSegmentName();
|
||||
SegmentInfo si = new SegmentInfo(directory, Constants.LUCENE_MAIN_VERSION, mergeSegmentName, -1, false, codec, null, null);
|
||||
SegmentInfo si = new SegmentInfo(directory, Constants.LUCENE_MAIN_VERSION, mergeSegmentName, -1, false, codec, null);
|
||||
Map<String,String> details = new HashMap<String,String>();
|
||||
details.put("mergeMaxNumSegments", "" + merge.maxNumSegments);
|
||||
details.put("mergeFactor", Integer.toString(merge.segments.size()));
|
||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.lucene.index;
|
|||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
|
@ -60,8 +59,6 @@ public final class SegmentInfo {
|
|||
|
||||
private Map<String,String> diagnostics;
|
||||
|
||||
private Map<String,String> attributes;
|
||||
|
||||
// Tracks the Lucene version this segment was created with, since 3.1. Null
|
||||
// indicates an older than 3.0 index, and it's used to detect a too old index.
|
||||
// The format expected is "x.y" - "2.x" for pre-3.0 indexes (or null), and
|
||||
|
@ -85,7 +82,7 @@ public final class SegmentInfo {
|
|||
* the codecs package.</p>
|
||||
*/
|
||||
public SegmentInfo(Directory dir, String version, String name, int docCount,
|
||||
boolean isCompoundFile, Codec codec, Map<String,String> diagnostics, Map<String,String> attributes) {
|
||||
boolean isCompoundFile, Codec codec, Map<String,String> diagnostics) {
|
||||
assert !(dir instanceof TrackingDirectoryWrapper);
|
||||
this.dir = dir;
|
||||
this.version = version;
|
||||
|
@ -94,7 +91,6 @@ public final class SegmentInfo {
|
|||
this.isCompoundFile = isCompoundFile;
|
||||
this.codec = codec;
|
||||
this.diagnostics = diagnostics;
|
||||
this.attributes = attributes;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -259,40 +255,4 @@ public final class SegmentInfo {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a codec attribute value, or null if it does not exist
|
||||
*/
|
||||
public String getAttribute(String key) {
|
||||
if (attributes == null) {
|
||||
return null;
|
||||
} else {
|
||||
return attributes.get(key);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Puts a codec attribute value.
|
||||
* <p>
|
||||
* This is a key-value mapping for the field that the codec can use
|
||||
* to store additional metadata, and will be available to the codec
|
||||
* when reading the segment via {@link #getAttribute(String)}
|
||||
* <p>
|
||||
* If a value already exists for the field, it will be replaced with
|
||||
* the new value.
|
||||
*/
|
||||
public String putAttribute(String key, String value) {
|
||||
if (attributes == null) {
|
||||
attributes = new HashMap<String,String>();
|
||||
}
|
||||
return attributes.put(key, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the internal codec attributes map.
|
||||
*
|
||||
* @return internal codec attributes map. May be null if no mappings exist.
|
||||
*/
|
||||
public Map<String,String> attributes() {
|
||||
return attributes;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -254,7 +254,7 @@ public class TestCodecs extends LuceneTestCase {
|
|||
final Directory dir = newDirectory();
|
||||
this.write(fieldInfos, dir, fields);
|
||||
Codec codec = Codec.getDefault();
|
||||
final SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null, null);
|
||||
final SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null);
|
||||
|
||||
final FieldsProducer reader = codec.postingsFormat().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, newIOContext(random())));
|
||||
|
||||
|
@ -311,8 +311,7 @@ public class TestCodecs extends LuceneTestCase {
|
|||
|
||||
this.write(fieldInfos, dir, fields);
|
||||
Codec codec = Codec.getDefault();
|
||||
final SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000,
|
||||
false, codec, null, null);
|
||||
final SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null);
|
||||
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: now read postings");
|
||||
|
@ -850,7 +849,7 @@ public class TestCodecs extends LuceneTestCase {
|
|||
private void write(final FieldInfos fieldInfos, final Directory dir, final FieldData[] fields) throws Throwable {
|
||||
|
||||
final Codec codec = Codec.getDefault();
|
||||
final SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null, null);
|
||||
final SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, false, codec, null);
|
||||
final SegmentWriteState state = new SegmentWriteState(InfoStream.getDefault(), dir, si, fieldInfos, null, newIOContext(random()));
|
||||
|
||||
Arrays.sort(fields);
|
||||
|
|
|
@ -217,7 +217,7 @@ public class TestDoc extends LuceneTestCase {
|
|||
|
||||
final Codec codec = Codec.getDefault();
|
||||
TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.info.dir);
|
||||
final SegmentInfo si = new SegmentInfo(si1.info.dir, Constants.LUCENE_MAIN_VERSION, merged, -1, false, codec, null, null);
|
||||
final SegmentInfo si = new SegmentInfo(si1.info.dir, Constants.LUCENE_MAIN_VERSION, merged, -1, false, codec, null);
|
||||
|
||||
SegmentMerger merger = new SegmentMerger(Arrays.<AtomicReader>asList(r1, r2),
|
||||
si, InfoStream.getDefault(), trackingDir,
|
||||
|
@ -228,7 +228,7 @@ public class TestDoc extends LuceneTestCase {
|
|||
r2.close();
|
||||
final SegmentInfo info = new SegmentInfo(si1.info.dir, Constants.LUCENE_MAIN_VERSION, merged,
|
||||
si1.info.getDocCount() + si2.info.getDocCount(),
|
||||
false, codec, null, null);
|
||||
false, codec, null);
|
||||
info.setFiles(new HashSet<String>(trackingDir.getCreatedFiles()));
|
||||
|
||||
if (useCompoundFile) {
|
||||
|
|
|
@ -78,7 +78,7 @@ public class TestSegmentMerger extends LuceneTestCase {
|
|||
|
||||
public void testMerge() throws IOException {
|
||||
final Codec codec = Codec.getDefault();
|
||||
final SegmentInfo si = new SegmentInfo(mergedDir, Constants.LUCENE_MAIN_VERSION, mergedSegment, -1, false, codec, null, null);
|
||||
final SegmentInfo si = new SegmentInfo(mergedDir, Constants.LUCENE_MAIN_VERSION, mergedSegment, -1, false, codec, null);
|
||||
|
||||
SegmentMerger merger = new SegmentMerger(Arrays.<AtomicReader>asList(reader1, reader2),
|
||||
si, InfoStream.getDefault(), mergedDir,
|
||||
|
@ -89,7 +89,7 @@ public class TestSegmentMerger extends LuceneTestCase {
|
|||
//Should be able to open a new SegmentReader against the new directory
|
||||
SegmentReader mergedReader = new SegmentReader(new SegmentInfoPerCommit(
|
||||
new SegmentInfo(mergedDir, Constants.LUCENE_MAIN_VERSION, mergedSegment, docsMerged,
|
||||
false, codec, null, null),
|
||||
false, codec, null),
|
||||
0, -1L, -1L),
|
||||
newIOContext(random()));
|
||||
assertTrue(mergedReader != null);
|
||||
|
|
|
@ -139,8 +139,7 @@ public class IndexSplitter {
|
|||
SegmentInfo info = infoPerCommit.info;
|
||||
// Same info just changing the dir:
|
||||
SegmentInfo newInfo = new SegmentInfo(destFSDir, info.getVersion(), info.name, info.getDocCount(),
|
||||
info.getUseCompoundFile(),
|
||||
info.getCodec(), info.getDiagnostics(), info.attributes());
|
||||
info.getUseCompoundFile(), info.getCodec(), info.getDiagnostics());
|
||||
destInfos.add(new SegmentInfoPerCommit(newInfo, infoPerCommit.getDelCount(), infoPerCommit.getDelGen(), infoPerCommit.getFieldInfosGen()));
|
||||
// now copy files over
|
||||
Collection<String> files = infoPerCommit.files();
|
||||
|
|
|
@ -6,6 +6,7 @@ import org.apache.lucene.codecs.DocValuesFormat;
|
|||
import org.apache.lucene.codecs.FieldInfosFormat;
|
||||
import org.apache.lucene.codecs.FieldInfosWriter;
|
||||
import org.apache.lucene.codecs.NormsFormat;
|
||||
import org.apache.lucene.codecs.SegmentInfoFormat;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
/*
|
||||
|
@ -40,6 +41,17 @@ public final class Lucene40RWCodec extends Lucene40Codec {
|
|||
}
|
||||
};
|
||||
|
||||
private final SegmentInfoFormat infosFormat = new Lucene40SegmentInfoFormat() {
|
||||
@Override
|
||||
public org.apache.lucene.codecs.SegmentInfoWriter getSegmentInfoWriter() {
|
||||
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
|
||||
return super.getSegmentInfoWriter();
|
||||
} else {
|
||||
return new Lucene40SegmentInfoWriter();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final DocValuesFormat docValues = new Lucene40RWDocValuesFormat();
|
||||
private final NormsFormat norms = new Lucene40RWNormsFormat();
|
||||
|
||||
|
@ -57,4 +69,10 @@ public final class Lucene40RWCodec extends Lucene40Codec {
|
|||
public NormsFormat normsFormat() {
|
||||
return norms;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SegmentInfoFormat segmentInfoFormat() {
|
||||
return infosFormat;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.codecs.lucene40;
|
|||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
import org.apache.lucene.codecs.SegmentInfoWriter;
|
||||
|
@ -35,6 +36,7 @@ import org.apache.lucene.util.IOUtils;
|
|||
* @see Lucene40SegmentInfoFormat
|
||||
* @lucene.experimental
|
||||
*/
|
||||
@Deprecated
|
||||
public class Lucene40SegmentInfoWriter extends SegmentInfoWriter {
|
||||
|
||||
/** Sole constructor. */
|
||||
|
@ -58,7 +60,7 @@ public class Lucene40SegmentInfoWriter extends SegmentInfoWriter {
|
|||
|
||||
output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO));
|
||||
output.writeStringStringMap(si.getDiagnostics());
|
||||
output.writeStringStringMap(si.attributes());
|
||||
output.writeStringStringMap(Collections.<String,String>emptyMap());
|
||||
output.writeStringSet(si.files());
|
||||
|
||||
success = true;
|
|
@ -6,11 +6,14 @@ import org.apache.lucene.codecs.DocValuesFormat;
|
|||
import org.apache.lucene.codecs.FieldInfosFormat;
|
||||
import org.apache.lucene.codecs.FieldInfosWriter;
|
||||
import org.apache.lucene.codecs.NormsFormat;
|
||||
import org.apache.lucene.codecs.SegmentInfoFormat;
|
||||
import org.apache.lucene.codecs.StoredFieldsFormat;
|
||||
import org.apache.lucene.codecs.lucene40.Lucene40FieldInfosFormat;
|
||||
import org.apache.lucene.codecs.lucene40.Lucene40FieldInfosWriter;
|
||||
import org.apache.lucene.codecs.lucene40.Lucene40RWDocValuesFormat;
|
||||
import org.apache.lucene.codecs.lucene40.Lucene40RWNormsFormat;
|
||||
import org.apache.lucene.codecs.lucene40.Lucene40SegmentInfoFormat;
|
||||
import org.apache.lucene.codecs.lucene40.Lucene40SegmentInfoWriter;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
/*
|
||||
|
@ -50,6 +53,17 @@ public class Lucene41RWCodec extends Lucene41Codec {
|
|||
private final DocValuesFormat docValues = new Lucene40RWDocValuesFormat();
|
||||
private final NormsFormat norms = new Lucene40RWNormsFormat();
|
||||
|
||||
private final SegmentInfoFormat segmentInfosFormat = new Lucene40SegmentInfoFormat() {
|
||||
@Override
|
||||
public org.apache.lucene.codecs.SegmentInfoWriter getSegmentInfoWriter() {
|
||||
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
|
||||
return super.getSegmentInfoWriter();
|
||||
} else {
|
||||
return new Lucene40SegmentInfoWriter();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@Override
|
||||
public FieldInfosFormat fieldInfosFormat() {
|
||||
return fieldInfos;
|
||||
|
@ -69,4 +83,10 @@ public class Lucene41RWCodec extends Lucene41Codec {
|
|||
public NormsFormat normsFormat() {
|
||||
return norms;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SegmentInfoFormat segmentInfoFormat() {
|
||||
return segmentInfosFormat;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -23,6 +23,9 @@ import org.apache.lucene.codecs.DocValuesFormat;
|
|||
import org.apache.lucene.codecs.FieldInfosFormat;
|
||||
import org.apache.lucene.codecs.FieldInfosWriter;
|
||||
import org.apache.lucene.codecs.NormsFormat;
|
||||
import org.apache.lucene.codecs.SegmentInfoFormat;
|
||||
import org.apache.lucene.codecs.lucene40.Lucene40SegmentInfoFormat;
|
||||
import org.apache.lucene.codecs.lucene40.Lucene40SegmentInfoWriter;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
/**
|
||||
|
@ -30,9 +33,32 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public class Lucene42RWCodec extends Lucene42Codec {
|
||||
|
||||
private static final DocValuesFormat dv = new Lucene42RWDocValuesFormat();
|
||||
private static final NormsFormat norms = new Lucene42NormsFormat();
|
||||
|
||||
private final FieldInfosFormat fieldInfosFormat = new Lucene42FieldInfosFormat() {
|
||||
@Override
|
||||
public FieldInfosWriter getFieldInfosWriter() throws IOException {
|
||||
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
|
||||
return super.getFieldInfosWriter();
|
||||
} else {
|
||||
return new Lucene42FieldInfosWriter();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final SegmentInfoFormat segmentInfosFormat = new Lucene40SegmentInfoFormat() {
|
||||
@Override
|
||||
public org.apache.lucene.codecs.SegmentInfoWriter getSegmentInfoWriter() {
|
||||
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
|
||||
return super.getSegmentInfoWriter();
|
||||
} else {
|
||||
return new Lucene40SegmentInfoWriter();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@Override
|
||||
public DocValuesFormat getDocValuesFormatForField(String field) {
|
||||
return dv;
|
||||
|
@ -45,16 +71,12 @@ public class Lucene42RWCodec extends Lucene42Codec {
|
|||
|
||||
@Override
|
||||
public FieldInfosFormat fieldInfosFormat() {
|
||||
return new Lucene42FieldInfosFormat() {
|
||||
@Override
|
||||
public FieldInfosWriter getFieldInfosWriter() throws IOException {
|
||||
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
|
||||
return super.getFieldInfosWriter();
|
||||
} else {
|
||||
return new Lucene42FieldInfosWriter();
|
||||
}
|
||||
}
|
||||
};
|
||||
return fieldInfosFormat;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SegmentInfoFormat segmentInfoFormat() {
|
||||
return segmentInfosFormat;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,6 +21,9 @@ import java.io.IOException;
|
|||
|
||||
import org.apache.lucene.codecs.FieldInfosFormat;
|
||||
import org.apache.lucene.codecs.FieldInfosWriter;
|
||||
import org.apache.lucene.codecs.SegmentInfoFormat;
|
||||
import org.apache.lucene.codecs.lucene40.Lucene40SegmentInfoFormat;
|
||||
import org.apache.lucene.codecs.lucene40.Lucene40SegmentInfoWriter;
|
||||
import org.apache.lucene.codecs.lucene42.Lucene42FieldInfosFormat;
|
||||
import org.apache.lucene.codecs.lucene42.Lucene42FieldInfosWriter;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -31,18 +34,36 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
@SuppressWarnings("deprecation")
|
||||
public class Lucene45RWCodec extends Lucene45Codec {
|
||||
|
||||
private final FieldInfosFormat fieldInfosFormat = new Lucene42FieldInfosFormat() {
|
||||
@Override
|
||||
public FieldInfosWriter getFieldInfosWriter() throws IOException {
|
||||
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
|
||||
return super.getFieldInfosWriter();
|
||||
} else {
|
||||
return new Lucene42FieldInfosWriter();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final SegmentInfoFormat segmentInfosFormat = new Lucene40SegmentInfoFormat() {
|
||||
@Override
|
||||
public org.apache.lucene.codecs.SegmentInfoWriter getSegmentInfoWriter() {
|
||||
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
|
||||
return super.getSegmentInfoWriter();
|
||||
} else {
|
||||
return new Lucene40SegmentInfoWriter();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@Override
|
||||
public FieldInfosFormat fieldInfosFormat() {
|
||||
return new Lucene42FieldInfosFormat() {
|
||||
@Override
|
||||
public FieldInfosWriter getFieldInfosWriter() throws IOException {
|
||||
if (!LuceneTestCase.OLD_FORMAT_IMPERSONATION_IS_ACTIVE) {
|
||||
return super.getFieldInfosWriter();
|
||||
} else {
|
||||
return new Lucene42FieldInfosWriter();
|
||||
}
|
||||
}
|
||||
};
|
||||
return fieldInfosFormat;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public SegmentInfoFormat segmentInfoFormat() {
|
||||
return segmentInfosFormat;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -651,7 +651,7 @@ public abstract class BasePostingsFormatTestCase extends LuceneTestCase {
|
|||
// randomly index at lower IndexOption
|
||||
private FieldsProducer buildIndex(Directory dir, IndexOptions maxAllowed, boolean allowPayloads, boolean alwaysTestMax) throws IOException {
|
||||
Codec codec = getCodec();
|
||||
SegmentInfo segmentInfo = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, "_0", maxDoc, false, codec, null, null);
|
||||
SegmentInfo segmentInfo = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, "_0", maxDoc, false, codec, null);
|
||||
|
||||
int maxIndexOption = Arrays.asList(IndexOptions.values()).indexOf(maxAllowed);
|
||||
if (VERBOSE) {
|
||||
|
|
Loading…
Reference in New Issue