LUCENE-4055: add some extra safety to FI/SI attributes

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4055@1342215 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2012-05-24 12:08:06 +00:00
parent eb615fad86
commit 7703ce22bd
7 changed files with 21 additions and 8 deletions

View File

@ -16,6 +16,7 @@ package org.apache.lucene.codecs.lucene3x;
* limitations under the License.
*/
import java.io.IOException;
import java.util.Collections;
import java.util.Set;
import org.apache.lucene.codecs.FieldInfosReader;
@ -100,7 +101,7 @@ class Lucene3xFieldInfosReader extends FieldInfosReader {
storePayloads = false;
}
infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector,
omitNorms, storePayloads, indexOptions, null, isIndexed && !omitNorms? Type.FIXED_INTS_8 : null, null);
omitNorms, storePayloads, indexOptions, null, isIndexed && !omitNorms? Type.FIXED_INTS_8 : null, Collections.<String,String>emptyMap());
}
if (input.getFilePointer() != input.length()) {

View File

@ -18,6 +18,7 @@ package org.apache.lucene.codecs.lucene3x;
*/
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
@ -266,7 +267,7 @@ public class Lucene3xSegmentInfoReader extends SegmentInfoReader {
// nocommit: convert normgen into attributes?
SegmentInfo info = new SegmentInfo(dir, version, segmentName, docCount, normGen, isCompoundFile,
null, diagnostics, attributes);
null, diagnostics, Collections.unmodifiableMap(attributes));
info.setFiles(files);
SegmentInfoPerCommit infoPerCommit = new SegmentInfoPerCommit(info, delCount, delGen);

View File

@ -1,6 +1,7 @@
package org.apache.lucene.codecs.lucene40;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
@ -94,7 +95,7 @@ public class Lucene40FieldInfosReader extends FieldInfosReader {
final DocValues.Type normsType = getDocValuesType((byte) ((val >>> 4) & 0x0F));
final Map<String,String> attributes = input.readStringStringMap();
infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector,
omitNorms, storePayloads, indexOptions, docValuesType, normsType, attributes);
omitNorms, storePayloads, indexOptions, docValuesType, normsType, Collections.unmodifiableMap(attributes));
}
if (input.getFilePointer() != input.length()) {

View File

@ -18,6 +18,7 @@ package org.apache.lucene.codecs.lucene40;
*/
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
@ -52,7 +53,7 @@ public class Lucene40SegmentInfoReader extends SegmentInfoReader {
final Set<String> files = input.readStringSet();
final SegmentInfo si = new SegmentInfo(dir, version, segment, docCount, normGen, isCompoundFile,
null, diagnostics, attributes);
null, diagnostics, Collections.unmodifiableMap(attributes));
si.setFiles(files);
success = true;

View File

@ -18,6 +18,7 @@ package org.apache.lucene.codecs.simpletext;
*/
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@ -114,7 +115,7 @@ public class SimpleTextFieldInfosReader extends FieldInfosReader {
}
infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector,
omitNorms, storePayloads, indexOptions, docValuesType, normsType, atts);
omitNorms, storePayloads, indexOptions, docValuesType, normsType, Collections.unmodifiableMap(atts));
}
if (input.getFilePointer() != input.length()) {

View File

@ -18,6 +18,7 @@ package org.apache.lucene.codecs.simpletext;
*/
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
@ -107,7 +108,7 @@ public class SimpleTextSegmentInfoReader extends SegmentInfoReader {
}
SegmentInfo info = new SegmentInfo(directory, version, segmentName, docCount,
null, isCompoundFile, null, diagnostics, attributes);
null, isCompoundFile, null, diagnostics, Collections.unmodifiableMap(attributes));
info.setFiles(files);
success = true;
return info;

View File

@ -2375,6 +2375,13 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
"",
Lucene3xSegmentInfoFormat.SI_EXTENSION);
// copy the attributes map, we modify it for the preflex case
final Map<String,String> attributes;
if (info.info.attributes() == null) {
attributes = new HashMap<String,String>();
} else {
attributes = new HashMap<String,String>(info.info.attributes());
}
if (hasSharedDocStore) {
// only violate the codec this way if it's preflex &
// shares doc stores
@ -2389,7 +2396,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
codecDocStoreFiles.add(IndexFileNames.segmentFileName(dsName, "", "tvd"));
}
// change docStoreSegment to newDsName
info.info.putAttribute(Lucene3xSegmentInfoFormat.DS_NAME_KEY, newDsName);
attributes.put(Lucene3xSegmentInfoFormat.DS_NAME_KEY, newDsName);
}
//System.out.println("copy seg=" + info.info.name + " version=" + info.info.getVersion());
@ -2397,7 +2404,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
// Same SI as before but we change directory, name and docStoreSegment:
SegmentInfo newInfo = new SegmentInfo(directory, info.info.getVersion(), segName, info.info.getDocCount(),
info.info.getNormGen(), info.info.getUseCompoundFile(),
info.info.getCodec(), info.info.getDiagnostics(), info.info.attributes());
info.info.getCodec(), info.info.getDiagnostics(), Collections.unmodifiableMap(attributes));
SegmentInfoPerCommit newInfoPerCommit = new SegmentInfoPerCommit(newInfo, info.getDelCount(), info.getDelGen());
Set<String> segFiles = new HashSet<String>();