mirror of https://github.com/apache/lucene.git
LUCENE-4055: move 3.x codec specific stuff out of IW into Lucene3xCodec
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4055@1342371 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
6278ae5dd2
commit
85f30fcc97
|
@ -18,6 +18,8 @@ package org.apache.lucene.codecs.lucene3x;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.lucene.codecs.Codec;
|
import org.apache.lucene.codecs.Codec;
|
||||||
import org.apache.lucene.codecs.DocValuesFormat;
|
import org.apache.lucene.codecs.DocValuesFormat;
|
||||||
|
@ -31,7 +33,9 @@ import org.apache.lucene.codecs.SegmentInfoFormat;
|
||||||
import org.apache.lucene.codecs.StoredFieldsFormat;
|
import org.apache.lucene.codecs.StoredFieldsFormat;
|
||||||
import org.apache.lucene.codecs.TermVectorsFormat;
|
import org.apache.lucene.codecs.TermVectorsFormat;
|
||||||
import org.apache.lucene.codecs.lucene40.Lucene40LiveDocsFormat;
|
import org.apache.lucene.codecs.lucene40.Lucene40LiveDocsFormat;
|
||||||
|
import org.apache.lucene.index.IndexFileNames;
|
||||||
import org.apache.lucene.index.PerDocWriteState;
|
import org.apache.lucene.index.PerDocWriteState;
|
||||||
|
import org.apache.lucene.index.SegmentInfo;
|
||||||
import org.apache.lucene.index.SegmentInfoPerCommit;
|
import org.apache.lucene.index.SegmentInfoPerCommit;
|
||||||
import org.apache.lucene.index.SegmentReadState;
|
import org.apache.lucene.index.SegmentReadState;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
|
@ -123,4 +127,25 @@ public class Lucene3xCodec extends Codec {
|
||||||
public LiveDocsFormat liveDocsFormat() {
|
public LiveDocsFormat liveDocsFormat() {
|
||||||
return liveDocsFormat;
|
return liveDocsFormat;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Returns file names for shared doc stores, if any, else
|
||||||
|
* null. */
|
||||||
|
public static Set<String> getDocStoreFiles(SegmentInfo info) {
|
||||||
|
if (Lucene3xSegmentInfoFormat.getDocStoreOffset(info) != -1) {
|
||||||
|
final String dsName = Lucene3xSegmentInfoFormat.getDocStoreSegment(info);
|
||||||
|
Set<String> files = new HashSet<String>();
|
||||||
|
if (Lucene3xSegmentInfoFormat.getDocStoreIsCompoundFile(info)) {
|
||||||
|
files.add(IndexFileNames.segmentFileName(dsName, "", COMPOUND_FILE_STORE_EXTENSION));
|
||||||
|
} else {
|
||||||
|
files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xStoredFieldsReader.FIELDS_INDEX_EXTENSION));
|
||||||
|
files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xStoredFieldsReader.FIELDS_EXTENSION));
|
||||||
|
files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION));
|
||||||
|
files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION));
|
||||||
|
files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION));
|
||||||
|
}
|
||||||
|
return files;
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,6 +33,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.codecs.Codec;
|
import org.apache.lucene.codecs.Codec;
|
||||||
|
import org.apache.lucene.codecs.lucene3x.Lucene3xCodec;
|
||||||
import org.apache.lucene.codecs.lucene3x.Lucene3xSegmentInfoFormat;
|
import org.apache.lucene.codecs.lucene3x.Lucene3xSegmentInfoFormat;
|
||||||
import org.apache.lucene.index.DocumentsWriterPerThread.FlushedSegment;
|
import org.apache.lucene.index.DocumentsWriterPerThread.FlushedSegment;
|
||||||
import org.apache.lucene.index.FieldInfos.FieldNumbers;
|
import org.apache.lucene.index.FieldInfos.FieldNumbers;
|
||||||
|
@ -2369,33 +2370,23 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
||||||
newDsName = segName;
|
newDsName = segName;
|
||||||
}
|
}
|
||||||
|
|
||||||
Set<String> codecDocStoreFiles = new HashSet<String>();
|
Set<String> docStoreFiles3xOnly = Lucene3xCodec.getDocStoreFiles(info.info);
|
||||||
final boolean hasSharedDocStore = Lucene3xSegmentInfoFormat.getDocStoreOffset(info.info) != -1;
|
|
||||||
|
|
||||||
// copy the attributes map, we modify it for the preflex case
|
|
||||||
final Map<String,String> attributes;
|
final Map<String,String> attributes;
|
||||||
if (info.info.attributes() == null) {
|
if (docStoreFiles3xOnly != null) {
|
||||||
attributes = new HashMap<String,String>();
|
|
||||||
} else {
|
|
||||||
attributes = new HashMap<String,String>(info.info.attributes());
|
|
||||||
}
|
|
||||||
if (hasSharedDocStore) {
|
|
||||||
// only violate the codec this way if it's preflex &
|
// only violate the codec this way if it's preflex &
|
||||||
// shares doc stores
|
// shares doc stores
|
||||||
// nocommit what to do....
|
// change docStoreSegment to newDsName
|
||||||
// cant we determine a file is a 3.x shared doc store file if hasSharedDocStore=true
|
// copy the attributes map, we modify it below:
|
||||||
// and the segment prefix != info.info.name instead of this stuff?
|
if (info.info.attributes() == null) {
|
||||||
if (Lucene3xSegmentInfoFormat.getDocStoreIsCompoundFile(info.info)) {
|
attributes = new HashMap<String,String>();
|
||||||
codecDocStoreFiles.add(IndexFileNames.segmentFileName(dsName, "", "cfx"));
|
|
||||||
} else {
|
} else {
|
||||||
codecDocStoreFiles.add(IndexFileNames.segmentFileName(dsName, "", "fdt"));
|
attributes = new HashMap<String,String>(info.info.attributes());
|
||||||
codecDocStoreFiles.add(IndexFileNames.segmentFileName(dsName, "", "fdx"));
|
|
||||||
codecDocStoreFiles.add(IndexFileNames.segmentFileName(dsName, "", "tvx"));
|
|
||||||
codecDocStoreFiles.add(IndexFileNames.segmentFileName(dsName, "", "tvf"));
|
|
||||||
codecDocStoreFiles.add(IndexFileNames.segmentFileName(dsName, "", "tvd"));
|
|
||||||
}
|
}
|
||||||
// change docStoreSegment to newDsName
|
// change docStoreSegment to newDsName
|
||||||
attributes.put(Lucene3xSegmentInfoFormat.DS_NAME_KEY, newDsName);
|
attributes.put(Lucene3xSegmentInfoFormat.DS_NAME_KEY, newDsName);
|
||||||
|
} else {
|
||||||
|
attributes = info.info.attributes();
|
||||||
}
|
}
|
||||||
|
|
||||||
//System.out.println("copy seg=" + info.info.name + " version=" + info.info.getVersion());
|
//System.out.println("copy seg=" + info.info.name + " version=" + info.info.getVersion());
|
||||||
|
@ -2412,7 +2403,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
||||||
// before writing SegmentInfo:
|
// before writing SegmentInfo:
|
||||||
for (String file: info.files()) {
|
for (String file: info.files()) {
|
||||||
final String newFileName;
|
final String newFileName;
|
||||||
if (codecDocStoreFiles.contains(file)) {
|
if (docStoreFiles3xOnly != null && docStoreFiles3xOnly.contains(file)) {
|
||||||
newFileName = newDsName + IndexFileNames.stripSegmentName(file);
|
newFileName = newDsName + IndexFileNames.stripSegmentName(file);
|
||||||
} else {
|
} else {
|
||||||
newFileName = segName + IndexFileNames.stripSegmentName(file);
|
newFileName = segName + IndexFileNames.stripSegmentName(file);
|
||||||
|
@ -2438,7 +2429,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
||||||
for (String file: info.files()) {
|
for (String file: info.files()) {
|
||||||
|
|
||||||
final String newFileName;
|
final String newFileName;
|
||||||
if (codecDocStoreFiles.contains(file)) {
|
if (docStoreFiles3xOnly != null && docStoreFiles3xOnly.contains(file)) {
|
||||||
newFileName = newDsName + IndexFileNames.stripSegmentName(file);
|
newFileName = newDsName + IndexFileNames.stripSegmentName(file);
|
||||||
if (dsFilesCopied.contains(newFileName)) {
|
if (dsFilesCopied.contains(newFileName)) {
|
||||||
continue;
|
continue;
|
||||||
|
|
Loading…
Reference in New Issue