LUCENE-2282: allow public but internal access to oal.index.IndexFileNames API

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@916755 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2010-02-26 16:52:39 +00:00
parent 04558dada3
commit 21044e1c8f
18 changed files with 195 additions and 149 deletions

View File

@ -68,6 +68,11 @@ API Changes
operations before flush starts. Also exposed doAfterFlush as protected instead
of package-private. (Shai Erera via Mike McCandless)
* LUCENE-2282: IndexFileNames is exposed as a public class allowing for easier
use by external code. In addition it offers a matchExtension method which
callers can use to query whether a certain file matches a certain extension.
(Shai Erera via Mike McCandless)
Bug fixes
* LUCENE-2119: Don't throw NegativeArraySizeException if you pass

View File

@ -613,7 +613,8 @@ final class DocumentsWriter {
/** Build compound file for the segment we just flushed */
void createCompoundFile(String segment) throws IOException {
CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION);
CompoundFileWriter cfsWriter = new CompoundFileWriter(directory,
IndexFileNames.segmentFileName(segment, IndexFileNames.COMPOUND_FILE_EXTENSION));
for (final String flushedFile : flushState.flushedFiles)
cfsWriter.addFile(flushedFile);

View File

@ -105,8 +105,8 @@ final class FieldsReader implements Cloneable {
try {
fieldInfos = fn;
cloneableFieldsStream = d.openInput(segment + "." + IndexFileNames.FIELDS_EXTENSION, readBufferSize);
cloneableIndexStream = d.openInput(segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION, readBufferSize);
cloneableFieldsStream = d.openInput(IndexFileNames.segmentFileName(segment, IndexFileNames.FIELDS_EXTENSION), readBufferSize);
cloneableIndexStream = d.openInput(IndexFileNames.segmentFileName(segment, IndexFileNames.FIELDS_INDEX_EXTENSION), readBufferSize);
// First version of fdx did not include a format
// header, but, the first int will always be 0 in that

View File

@ -61,7 +61,7 @@ final class FieldsWriter
fieldInfos = fn;
boolean success = false;
final String fieldsName = segment + "." + IndexFileNames.FIELDS_EXTENSION;
final String fieldsName = IndexFileNames.segmentFileName(segment, IndexFileNames.FIELDS_EXTENSION);
try {
fieldsStream = d.createOutput(fieldsName);
fieldsStream.writeInt(FORMAT_CURRENT);
@ -82,7 +82,7 @@ final class FieldsWriter
}
success = false;
final String indexName = segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION;
final String indexName = IndexFileNames.segmentFileName(segment, IndexFileNames.FIELDS_INDEX_EXTENSION);
try {
indexStream = d.createOutput(indexName);
indexStream.writeInt(FORMAT_CURRENT);

View File

@ -23,6 +23,8 @@ import java.util.HashSet;
/**
* Filename filter that accept filenames and extensions only created by Lucene.
*
* @lucene.internal
*/
public class IndexFileNameFilter implements FilenameFilter {
@ -33,12 +35,12 @@ public class IndexFileNameFilter implements FilenameFilter {
// Prevent instantiation.
private IndexFileNameFilter() {
extensions = new HashSet<String>();
for (int i = 0; i < IndexFileNames.INDEX_EXTENSIONS.length; i++) {
extensions.add(IndexFileNames.INDEX_EXTENSIONS[i]);
for (String ext : IndexFileNames.INDEX_EXTENSIONS) {
extensions.add(ext);
}
extensionsInCFS = new HashSet<String>();
for (int i = 0; i < IndexFileNames.INDEX_EXTENSIONS_IN_COMPOUND_FILE.length; i++) {
extensionsInCFS.add(IndexFileNames.INDEX_EXTENSIONS_IN_COMPOUND_FILE[i]);
for (String ext : IndexFileNames.INDEX_EXTENSIONS_IN_COMPOUND_FILE) {
extensionsInCFS.add(ext);
}
}

View File

@ -18,70 +18,78 @@ package org.apache.lucene.index;
*/
/**
* Useful constants representing filenames and extensions used by lucene
* This class contains useful constants representing filenames and extensions
* used by lucene, as well as convenience methods for querying whether a file
* name matches an extension ({@link #matchesExtension(String, String)
* matchesExtension}), as well as generating file names from a segment name,
* generation and extension (
* {@link #fileNameFromGeneration(String, String, long) fileNameFromGeneration},
* {@link #segmentFileName(String, String) segmentFileName}).
*
* @lucene.internal
*/
final class IndexFileNames {
public final class IndexFileNames {
/** Name of the index segment file */
static final String SEGMENTS = "segments";
public static final String SEGMENTS = "segments";
/** Name of the generation reference file name */
static final String SEGMENTS_GEN = "segments.gen";
public static final String SEGMENTS_GEN = "segments.gen";
/** Name of the index deletable file (only used in
* pre-lockless indices) */
static final String DELETABLE = "deletable";
public static final String DELETABLE = "deletable";
/** Extension of norms file */
static final String NORMS_EXTENSION = "nrm";
public static final String NORMS_EXTENSION = "nrm";
/** Extension of freq postings file */
static final String FREQ_EXTENSION = "frq";
public static final String FREQ_EXTENSION = "frq";
/** Extension of prox postings file */
static final String PROX_EXTENSION = "prx";
public static final String PROX_EXTENSION = "prx";
/** Extension of terms file */
static final String TERMS_EXTENSION = "tis";
public static final String TERMS_EXTENSION = "tis";
/** Extension of terms index file */
static final String TERMS_INDEX_EXTENSION = "tii";
public static final String TERMS_INDEX_EXTENSION = "tii";
/** Extension of stored fields index file */
static final String FIELDS_INDEX_EXTENSION = "fdx";
public static final String FIELDS_INDEX_EXTENSION = "fdx";
/** Extension of stored fields file */
static final String FIELDS_EXTENSION = "fdt";
public static final String FIELDS_EXTENSION = "fdt";
/** Extension of vectors fields file */
static final String VECTORS_FIELDS_EXTENSION = "tvf";
public static final String VECTORS_FIELDS_EXTENSION = "tvf";
/** Extension of vectors documents file */
static final String VECTORS_DOCUMENTS_EXTENSION = "tvd";
public static final String VECTORS_DOCUMENTS_EXTENSION = "tvd";
/** Extension of vectors index file */
static final String VECTORS_INDEX_EXTENSION = "tvx";
public static final String VECTORS_INDEX_EXTENSION = "tvx";
/** Extension of compound file */
static final String COMPOUND_FILE_EXTENSION = "cfs";
public static final String COMPOUND_FILE_EXTENSION = "cfs";
/** Extension of compound file for doc store files*/
static final String COMPOUND_FILE_STORE_EXTENSION = "cfx";
public static final String COMPOUND_FILE_STORE_EXTENSION = "cfx";
/** Extension of deletes */
static final String DELETES_EXTENSION = "del";
public static final String DELETES_EXTENSION = "del";
/** Extension of field infos */
static final String FIELD_INFOS_EXTENSION = "fnm";
public static final String FIELD_INFOS_EXTENSION = "fnm";
/** Extension of plain norms */
static final String PLAIN_NORMS_EXTENSION = "f";
public static final String PLAIN_NORMS_EXTENSION = "f";
/** Extension of separate norms */
static final String SEPARATE_NORMS_EXTENSION = "s";
public static final String SEPARATE_NORMS_EXTENSION = "s";
/** Extension of gen file */
static final String GEN_EXTENSION = "gen";
public static final String GEN_EXTENSION = "gen";
/**
* This array contains all filename extensions used by
@ -91,7 +99,7 @@ final class IndexFileNames {
* Lucene's <code>segments_N</code> files do not have any
* filename extension.
*/
static final String INDEX_EXTENSIONS[] = new String[] {
public static final String INDEX_EXTENSIONS[] = new String[] {
COMPOUND_FILE_EXTENSION,
FIELD_INFOS_EXTENSION,
FIELDS_INDEX_EXTENSION,
@ -111,7 +119,7 @@ final class IndexFileNames {
/** File extensions that are added to a compound file
* (same as above, minus "del", "gen", "cfs"). */
static final String[] INDEX_EXTENSIONS_IN_COMPOUND_FILE = new String[] {
public static final String[] INDEX_EXTENSIONS_IN_COMPOUND_FILE = new String[] {
FIELD_INFOS_EXTENSION,
FIELDS_INDEX_EXTENSION,
FIELDS_EXTENSION,
@ -125,7 +133,7 @@ final class IndexFileNames {
NORMS_EXTENSION
};
static final String[] STORE_INDEX_EXTENSIONS = new String[] {
public static final String[] STORE_INDEX_EXTENSIONS = new String[] {
VECTORS_INDEX_EXTENSION,
VECTORS_FIELDS_EXTENSION,
VECTORS_DOCUMENTS_EXTENSION,
@ -133,7 +141,7 @@ final class IndexFileNames {
FIELDS_EXTENSION
};
static final String[] NON_STORE_INDEX_EXTENSIONS = new String[] {
public static final String[] NON_STORE_INDEX_EXTENSIONS = new String[] {
FIELD_INFOS_EXTENSION,
FREQ_EXTENSION,
PROX_EXTENSION,
@ -143,7 +151,7 @@ final class IndexFileNames {
};
/** File extensions of old-style index files */
static final String COMPOUND_EXTENSIONS[] = new String[] {
public static final String COMPOUND_EXTENSIONS[] = new String[] {
FIELD_INFOS_EXTENSION,
FREQ_EXTENSION,
PROX_EXTENSION,
@ -154,47 +162,81 @@ final class IndexFileNames {
};
/** File extensions for term vector support */
static final String VECTOR_EXTENSIONS[] = new String[] {
public static final String VECTOR_EXTENSIONS[] = new String[] {
VECTORS_INDEX_EXTENSION,
VECTORS_DOCUMENTS_EXTENSION,
VECTORS_FIELDS_EXTENSION
};
/**
* Computes the full file name from base, extension and
* generation. If the generation is -1, the file name is
* null. If it's 0, the file name is <base><extension>.
* If it's > 0, the file name is <base>_<generation><extension>.
*
* @param base -- main part of the file name
* @param extension -- extension of the filename (including .)
* @param gen -- generation
* Computes the full file name from base, extension and generation. If the
* generation is -1, the file name is null. If it's 0, the file name is
* &lt;base&gt;.&lt;ext&gt;. If it's > 0, the file name is
* &lt;base&gt;_&lt;gen&gt;.&lt;ext&gt;.<br>
* <b>NOTE:</b> .&lt;ext&gt; is added to the name only if <code>ext</code> is
* not an empty string.
*
* @param base main part of the file name
* @param ext extension of the filename
* @param gen generation
*/
static final String fileNameFromGeneration(String base, String extension, long gen) {
public static final String fileNameFromGeneration(String base, String ext, long gen) {
if (gen == SegmentInfo.NO) {
return null;
} else if (gen == SegmentInfo.WITHOUT_GEN) {
return base + extension;
return segmentFileName(base, ext);
} else {
return base + "_" + Long.toString(gen, Character.MAX_RADIX) + extension;
// The '6' part in the length is: 1 for '.', 1 for '_' and 4 as estimate
// to the gen length as string (hopefully an upper limit so SB won't
// expand in the middle.
StringBuilder res = new StringBuilder(base.length() + 6 + ext.length())
.append(base).append('_').append(Long.toString(gen, Character.MAX_RADIX));
if (ext.length() > 0) {
res.append('.').append(ext);
}
return res.toString();
}
}
/**
* Returns true if the provided filename is one of the doc
* store files (ends with an extension in
* STORE_INDEX_EXTENSIONS).
* Returns true if the provided filename is one of the doc store files (ends
* with an extension in {@link #STORE_INDEX_EXTENSIONS}).
*/
static final boolean isDocStoreFile(String fileName) {
public static final boolean isDocStoreFile(String fileName) {
if (fileName.endsWith(COMPOUND_FILE_STORE_EXTENSION))
return true;
for(int i=0;i<STORE_INDEX_EXTENSIONS.length;i++)
if (fileName.endsWith(STORE_INDEX_EXTENSIONS[i]))
for (String ext : STORE_INDEX_EXTENSIONS) {
if (fileName.endsWith(ext))
return true;
}
return false;
}
static String segmentFileName(String segmentName, String ext) {
return segmentName + "." + ext;
/**
* Returns the file name that matches the given segment name and extension.
* This method takes care to return the full file name in the form
* &lt;segmentName&gt;.&lt;ext&gt;, therefore you don't need to prefix the
* extension with a '.'.<br>
* <b>NOTE:</b> .&lt;ext&gt; is added to the result file name only if
* <code>ext</code> is not empty.
*/
public static final String segmentFileName(String segmentName, String ext) {
if (ext.length() > 0) {
return new StringBuilder(segmentName.length() + 1 + ext.length()).append(
segmentName).append('.').append(ext).toString();
} else {
return segmentName;
}
}
/**
* Returns true if the given filename ends with the given extension. One
* should provide a <i>pure</i> extension, withouth '.'.
*/
public static final boolean matchesExtension(String filename, String ext) {
// It doesn't make a difference whether we allocate a StringBuilder ourself
// or not, since there's only 1 '+' operator.
return filename.endsWith("." + ext);
}
}

View File

@ -1738,13 +1738,13 @@ public class IndexWriter implements Closeable {
// Now build compound doc store file
if (infoStream != null) {
message("create compound file " + docStoreSegment + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION);
message("create compound file " + IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.COMPOUND_FILE_STORE_EXTENSION));
}
success = false;
final int numSegments = segmentInfos.size();
final String compoundFileName = docStoreSegment + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION;
final String compoundFileName = IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.COMPOUND_FILE_STORE_EXTENSION);
try {
CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, compoundFileName);
@ -3653,7 +3653,7 @@ public class IndexWriter implements Closeable {
if (!success) {
if (infoStream != null)
message("hit exception creating compound file for newly flushed segment " + segment);
deleter.deleteFile(segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION);
deleter.deleteFile(IndexFileNames.segmentFileName(segment, IndexFileNames.COMPOUND_FILE_EXTENSION));
}
}
@ -4371,7 +4371,7 @@ public class IndexWriter implements Closeable {
if (merge.useCompoundFile) {
success = false;
final String compoundFileName = mergedName + "." + IndexFileNames.COMPOUND_FILE_EXTENSION;
final String compoundFileName = IndexFileNames.segmentFileName(mergedName, IndexFileNames.COMPOUND_FILE_EXTENSION);
try {
merger.createCompoundFile(compoundFileName);

View File

@ -88,7 +88,7 @@ final class NormsWriter extends InvertedDocEndConsumer {
}
}
final String normsFileName = state.segmentName + "." + IndexFileNames.NORMS_EXTENSION;
final String normsFileName = IndexFileNames.segmentFileName(state.segmentName, IndexFileNames.NORMS_EXTENSION);
state.flushedFiles.add(normsFileName);
IndexOutput normsOut = state.directory.createOutput(normsFileName);

View File

@ -333,7 +333,7 @@ public final class SegmentInfo {
return null;
} else {
// If delGen is CHECK_DIR, it's the pre-lockless-commit file format
return IndexFileNames.fileNameFromGeneration(name, "." + IndexFileNames.DELETES_EXTENSION, delGen);
return IndexFileNames.fileNameFromGeneration(name, IndexFileNames.DELETES_EXTENSION, delGen);
}
}
@ -428,8 +428,6 @@ public final class SegmentInfo {
* @param number field index
*/
public String getNormFileName(int number) throws IOException {
String prefix;
long gen;
if (normGen == null) {
gen = CHECK_DIR;
@ -439,19 +437,16 @@ public final class SegmentInfo {
if (hasSeparateNorms(number)) {
// case 1: separate norm
prefix = ".s";
return IndexFileNames.fileNameFromGeneration(name, prefix + number, gen);
return IndexFileNames.fileNameFromGeneration(name, "s" + number, gen);
}
if (hasSingleNormFile) {
// case 2: lockless (or nrm file exists) - single file for all norms
prefix = "." + IndexFileNames.NORMS_EXTENSION;
return IndexFileNames.fileNameFromGeneration(name, prefix, WITHOUT_GEN);
return IndexFileNames.fileNameFromGeneration(name, IndexFileNames.NORMS_EXTENSION, WITHOUT_GEN);
}
// case 3: norm file for each field
prefix = ".f";
return IndexFileNames.fileNameFromGeneration(name, prefix + number, WITHOUT_GEN);
return IndexFileNames.fileNameFromGeneration(name, "f" + number, WITHOUT_GEN);
}
/**
@ -479,7 +474,7 @@ public final class SegmentInfo {
} else if (isCompoundFile == YES) {
return true;
} else {
return dir.fileExists(name + "." + IndexFileNames.COMPOUND_FILE_EXTENSION);
return dir.fileExists(IndexFileNames.segmentFileName(name, IndexFileNames.COMPOUND_FILE_EXTENSION));
}
}
@ -590,11 +585,10 @@ public final class SegmentInfo {
boolean useCompoundFile = getUseCompoundFile();
if (useCompoundFile) {
files.add(name + "." + IndexFileNames.COMPOUND_FILE_EXTENSION);
files.add(IndexFileNames.segmentFileName(name, IndexFileNames.COMPOUND_FILE_EXTENSION));
} else {
final String[] exts = IndexFileNames.NON_STORE_INDEX_EXTENSIONS;
for(int i=0;i<exts.length;i++)
addIfExists(files, name + "." + exts[i]);
for (String ext : IndexFileNames.NON_STORE_INDEX_EXTENSIONS)
addIfExists(files, IndexFileNames.segmentFileName(name, ext));
}
if (docStoreOffset != -1) {
@ -602,21 +596,17 @@ public final class SegmentInfo {
// vectors) with other segments
assert docStoreSegment != null;
if (docStoreIsCompoundFile) {
files.add(docStoreSegment + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION);
files.add(IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.COMPOUND_FILE_STORE_EXTENSION));
} else {
final String[] exts = IndexFileNames.STORE_INDEX_EXTENSIONS;
for(int i=0;i<exts.length;i++)
addIfExists(files, docStoreSegment + "." + exts[i]);
for (String ext : IndexFileNames.STORE_INDEX_EXTENSIONS)
addIfExists(files, IndexFileNames.segmentFileName(docStoreSegment, ext));
}
} else if (!useCompoundFile) {
// We are not sharing, and, these files were not
// included in the compound file
final String[] exts = IndexFileNames.STORE_INDEX_EXTENSIONS;
for(int i=0;i<exts.length;i++)
addIfExists(files, name + "." + exts[i]);
for (String ext : IndexFileNames.STORE_INDEX_EXTENSIONS)
addIfExists(files, IndexFileNames.segmentFileName(name, ext));
}
String delFileName = IndexFileNames.fileNameFromGeneration(name, "." + IndexFileNames.DELETES_EXTENSION, delGen);
String delFileName = IndexFileNames.fileNameFromGeneration(name, IndexFileNames.DELETES_EXTENSION, delGen);
if (delFileName != null && (delGen >= YES || dir.fileExists(delFileName))) {
files.add(delFileName);
}
@ -627,12 +617,12 @@ public final class SegmentInfo {
long gen = normGen[i];
if (gen >= YES) {
// Definitely a separate norm file, with generation:
files.add(IndexFileNames.fileNameFromGeneration(name, "." + IndexFileNames.SEPARATE_NORMS_EXTENSION + i, gen));
files.add(IndexFileNames.fileNameFromGeneration(name, IndexFileNames.SEPARATE_NORMS_EXTENSION + i, gen));
} else if (NO == gen) {
// No separate norms but maybe plain norms
// in the non compound file case:
if (!hasSingleNormFile && !useCompoundFile) {
String fileName = name + "." + IndexFileNames.PLAIN_NORMS_EXTENSION + i;
String fileName = IndexFileNames.segmentFileName(name, IndexFileNames.PLAIN_NORMS_EXTENSION + i);
if (dir.fileExists(fileName)) {
files.add(fileName);
}
@ -641,9 +631,9 @@ public final class SegmentInfo {
// Pre-2.1: we have to check file existence
String fileName = null;
if (useCompoundFile) {
fileName = name + "." + IndexFileNames.SEPARATE_NORMS_EXTENSION + i;
fileName = IndexFileNames.segmentFileName(name, IndexFileNames.SEPARATE_NORMS_EXTENSION + i);
} else if (!hasSingleNormFile) {
fileName = name + "." + IndexFileNames.PLAIN_NORMS_EXTENSION + i;
fileName = IndexFileNames.segmentFileName(name, IndexFileNames.PLAIN_NORMS_EXTENSION + i);
}
if (fileName != null && dir.fileExists(fileName)) {
files.add(fileName);
@ -655,9 +645,9 @@ public final class SegmentInfo {
// matching _X.sN/_X.fN files for our segment:
String prefix;
if (useCompoundFile)
prefix = name + "." + IndexFileNames.SEPARATE_NORMS_EXTENSION;
prefix = IndexFileNames.segmentFileName(name, IndexFileNames.SEPARATE_NORMS_EXTENSION);
else
prefix = name + "." + IndexFileNames.PLAIN_NORMS_EXTENSION;
prefix = IndexFileNames.segmentFileName(name, IndexFileNames.PLAIN_NORMS_EXTENSION);
int prefixLength = prefix.length();
String[] allFiles = dir.listAll();
final IndexFileNameFilter filter = IndexFileNameFilter.getFilter();

View File

@ -180,30 +180,29 @@ final class SegmentMerger {
new ArrayList<String>(IndexFileNames.COMPOUND_EXTENSIONS.length + 1);
// Basic files
for (int i = 0; i < IndexFileNames.COMPOUND_EXTENSIONS.length; i++) {
String ext = IndexFileNames.COMPOUND_EXTENSIONS[i];
for (String ext : IndexFileNames.COMPOUND_EXTENSIONS) {
if (ext.equals(IndexFileNames.PROX_EXTENSION) && !hasProx())
continue;
if (mergeDocStores || (!ext.equals(IndexFileNames.FIELDS_EXTENSION) &&
!ext.equals(IndexFileNames.FIELDS_INDEX_EXTENSION)))
files.add(segment + "." + ext);
files.add(IndexFileNames.segmentFileName(segment, ext));
}
// Fieldable norm files
for (int i = 0; i < fieldInfos.size(); i++) {
int numFIs = fieldInfos.size();
for (int i = 0; i < numFIs; i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
if (fi.isIndexed && !fi.omitNorms) {
files.add(segment + "." + IndexFileNames.NORMS_EXTENSION);
files.add(IndexFileNames.segmentFileName(segment, IndexFileNames.NORMS_EXTENSION));
break;
}
}
// Vector files
if (fieldInfos.hasVectors() && mergeDocStores) {
for (int i = 0; i < IndexFileNames.VECTOR_EXTENSIONS.length; i++) {
files.add(segment + "." + IndexFileNames.VECTOR_EXTENSIONS[i]);
for (String ext : IndexFileNames.VECTOR_EXTENSIONS) {
files.add(IndexFileNames.segmentFileName(segment, ext));
}
}
@ -341,7 +340,7 @@ final class SegmentMerger {
fieldsWriter.close();
}
final String fileName = segment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION;
final String fileName = IndexFileNames.segmentFileName(segment, IndexFileNames.FIELDS_INDEX_EXTENSION);
final long fdxFileLength = directory.fileLength(fileName);
if (4+((long) docCount)*8 != fdxFileLength)
@ -469,7 +468,7 @@ final class SegmentMerger {
termVectorsWriter.close();
}
final String fileName = segment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION;
final String fileName = IndexFileNames.segmentFileName(segment, IndexFileNames.VECTORS_INDEX_EXTENSION);
final long tvxSize = directory.fileLength(fileName);
if (4+((long) mergedDocs)*16 != tvxSize)
@ -712,7 +711,7 @@ final class SegmentMerger {
FieldInfo fi = fieldInfos.fieldInfo(i);
if (fi.isIndexed && !fi.omitNorms) {
if (output == null) {
output = directory.createOutput(segment + "." + IndexFileNames.NORMS_EXTENSION);
output = directory.createOutput(IndexFileNames.segmentFileName(segment, IndexFileNames.NORMS_EXTENSION));
output.writeBytes(NORMS_HEADER,NORMS_HEADER.length);
}
for ( IndexReader reader : readers) {

View File

@ -110,12 +110,12 @@ public class SegmentReader extends IndexReader implements Cloneable {
try {
Directory dir0 = dir;
if (si.getUseCompoundFile()) {
cfsReader = new CompoundFileReader(dir, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);
cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
dir0 = cfsReader;
}
cfsDir = dir0;
fieldInfos = new FieldInfos(cfsDir, segment + "." + IndexFileNames.FIELD_INFOS_EXTENSION);
fieldInfos = new FieldInfos(cfsDir, IndexFileNames.segmentFileName(segment, IndexFileNames.FIELD_INFOS_EXTENSION));
this.termsIndexDivisor = termsIndexDivisor;
TermInfosReader reader = new TermInfosReader(cfsDir, segment, fieldInfos, readBufferSize, termsIndexDivisor);
@ -128,10 +128,10 @@ public class SegmentReader extends IndexReader implements Cloneable {
// make sure that all index files have been read or are kept open
// so that if an index update removes them we'll still have them
freqStream = cfsDir.openInput(segment + "." + IndexFileNames.FREQ_EXTENSION, readBufferSize);
freqStream = cfsDir.openInput(IndexFileNames.segmentFileName(segment, IndexFileNames.FREQ_EXTENSION), readBufferSize);
if (fieldInfos.hasProx()) {
proxStream = cfsDir.openInput(segment + "." + IndexFileNames.PROX_EXTENSION, readBufferSize);
proxStream = cfsDir.openInput(IndexFileNames.segmentFileName(segment, IndexFileNames.PROX_EXTENSION), readBufferSize);
} else {
proxStream = null;
}
@ -191,7 +191,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
// terms reader with index, the segment has switched
// to CFS
if (cfsReader == null) {
cfsReader = new CompoundFileReader(dir, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);
cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
}
dir0 = cfsReader;
} else {
@ -262,7 +262,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
if (si.getDocStoreIsCompoundFile()) {
assert storeCFSReader == null;
storeCFSReader = new CompoundFileReader(dir,
si.getDocStoreSegment() + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION,
IndexFileNames.segmentFileName(si.getDocStoreSegment(), IndexFileNames.COMPOUND_FILE_STORE_EXTENSION),
readBufferSize);
storeDir = storeCFSReader;
assert storeDir != null;
@ -275,7 +275,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
// was not used, but then we are asked to open doc
// stores after the segment has switched to CFS
if (cfsReader == null) {
cfsReader = new CompoundFileReader(dir, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);
cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
}
storeDir = cfsReader;
assert storeDir != null;
@ -1051,7 +1051,7 @@ public class SegmentReader extends IndexReader implements Cloneable {
}
// singleNormFile means multiple norms share this file
boolean singleNormFile = fileName.endsWith("." + IndexFileNames.NORMS_EXTENSION);
boolean singleNormFile = IndexFileNames.matchesExtension(fileName, IndexFileNames.NORMS_EXTENSION);
IndexInput normInput = null;
long normSeek;

View File

@ -67,8 +67,8 @@ final class StoredFieldsWriter {
fieldsWriter = new FieldsWriter(docWriter.directory,
docStoreSegment,
fieldInfos);
docWriter.addOpenFile(docStoreSegment + "." + IndexFileNames.FIELDS_EXTENSION);
docWriter.addOpenFile(docStoreSegment + "." + IndexFileNames.FIELDS_INDEX_EXTENSION);
docWriter.addOpenFile(IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.FIELDS_EXTENSION));
docWriter.addOpenFile(IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.FIELDS_INDEX_EXTENSION));
lastDocID = 0;
}
}
@ -86,16 +86,16 @@ final class StoredFieldsWriter {
fieldsWriter = null;
lastDocID = 0;
assert state.docStoreSegmentName != null;
state.flushedFiles.add(state.docStoreSegmentName + "." + IndexFileNames.FIELDS_EXTENSION);
state.flushedFiles.add(state.docStoreSegmentName + "." + IndexFileNames.FIELDS_INDEX_EXTENSION);
String fieldsName = IndexFileNames.segmentFileName(state.docStoreSegmentName, IndexFileNames.FIELDS_EXTENSION);
String fieldsIdxName = IndexFileNames.segmentFileName(state.docStoreSegmentName, IndexFileNames.FIELDS_INDEX_EXTENSION);
state.flushedFiles.add(fieldsName);
state.flushedFiles.add(fieldsIdxName);
state.docWriter.removeOpenFile(state.docStoreSegmentName + "." + IndexFileNames.FIELDS_EXTENSION);
state.docWriter.removeOpenFile(state.docStoreSegmentName + "." + IndexFileNames.FIELDS_INDEX_EXTENSION);
state.docWriter.removeOpenFile(fieldsName);
state.docWriter.removeOpenFile(fieldsIdxName);
final String fileName = state.docStoreSegmentName + "." + IndexFileNames.FIELDS_INDEX_EXTENSION;
if (4+((long) state.numDocsInStore)*8 != state.directory.fileLength(fileName))
throw new RuntimeException("after flush: fdx size mismatch: " + state.numDocsInStore + " docs vs " + state.directory.fileLength(fileName) + " length in bytes of " + fileName + " file exists?=" + state.directory.fileExists(fileName));
if (4+((long) state.numDocsInStore)*8 != state.directory.fileLength(fieldsIdxName))
throw new RuntimeException("after flush: fdx size mismatch: " + state.numDocsInStore + " docs vs " + state.directory.fileLength(fieldsIdxName) + " length in bytes of " + fieldsIdxName + " file exists?=" + state.directory.fileExists(fieldsIdxName));
}
}

View File

@ -76,7 +76,7 @@ final class TermInfosReader {
segment = seg;
fieldInfos = fis;
origEnum = new SegmentTermEnum(directory.openInput(segment + "." + IndexFileNames.TERMS_EXTENSION,
origEnum = new SegmentTermEnum(directory.openInput(IndexFileNames.segmentFileName(segment, IndexFileNames.TERMS_EXTENSION),
readBufferSize), fieldInfos, false);
size = origEnum.size;
@ -84,7 +84,7 @@ final class TermInfosReader {
if (indexDivisor != -1) {
// Load terms index
totalIndexInterval = origEnum.indexInterval * indexDivisor;
final SegmentTermEnum indexEnum = new SegmentTermEnum(directory.openInput(segment + "." + IndexFileNames.TERMS_INDEX_EXTENSION,
final SegmentTermEnum indexEnum = new SegmentTermEnum(directory.openInput(IndexFileNames.segmentFileName(segment, IndexFileNames.TERMS_INDEX_EXTENSION),
readBufferSize), fieldInfos, true);
try {

View File

@ -75,12 +75,13 @@ class TermVectorsReader implements Cloneable {
boolean success = false;
try {
if (d.fileExists(segment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION)) {
tvx = d.openInput(segment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION, readBufferSize);
String idxName = IndexFileNames.segmentFileName(segment, IndexFileNames.VECTORS_INDEX_EXTENSION);
if (d.fileExists(idxName)) {
tvx = d.openInput(idxName, readBufferSize);
format = checkValidFormat(tvx);
tvd = d.openInput(segment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION, readBufferSize);
tvd = d.openInput(IndexFileNames.segmentFileName(segment, IndexFileNames.VECTORS_DOCUMENTS_EXTENSION), readBufferSize);
final int tvdFormat = checkValidFormat(tvd);
tvf = d.openInput(segment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION, readBufferSize);
tvf = d.openInput(IndexFileNames.segmentFileName(segment, IndexFileNames.VECTORS_FIELDS_EXTENSION), readBufferSize);
final int tvfFormat = checkValidFormat(tvf);
assert format == tvdFormat;

View File

@ -92,17 +92,19 @@ final class TermVectorsTermsWriter extends TermsHashConsumer {
tvd.close();
tvx = null;
assert state.docStoreSegmentName != null;
final String fileName = state.docStoreSegmentName + "." + IndexFileNames.VECTORS_INDEX_EXTENSION;
if (4+((long) state.numDocsInStore)*16 != state.directory.fileLength(fileName))
throw new RuntimeException("after flush: tvx size mismatch: " + state.numDocsInStore + " docs vs " + state.directory.fileLength(fileName) + " length in bytes of " + fileName + " file exists?=" + state.directory.fileExists(fileName));
String idxName = IndexFileNames.segmentFileName(state.docStoreSegmentName, IndexFileNames.VECTORS_INDEX_EXTENSION);
if (4+((long) state.numDocsInStore)*16 != state.directory.fileLength(idxName))
throw new RuntimeException("after flush: tvx size mismatch: " + state.numDocsInStore + " docs vs " + state.directory.fileLength(idxName) + " length in bytes of " + idxName + " file exists?=" + state.directory.fileExists(idxName));
state.flushedFiles.add(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_INDEX_EXTENSION);
state.flushedFiles.add(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION);
state.flushedFiles.add(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
String fldName = IndexFileNames.segmentFileName(state.docStoreSegmentName, IndexFileNames.VECTORS_FIELDS_EXTENSION);
String docName = IndexFileNames.segmentFileName(state.docStoreSegmentName, IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
state.flushedFiles.add(idxName);
state.flushedFiles.add(fldName);
state.flushedFiles.add(docName);
docWriter.removeOpenFile(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_INDEX_EXTENSION);
docWriter.removeOpenFile(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION);
docWriter.removeOpenFile(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
docWriter.removeOpenFile(idxName);
docWriter.removeOpenFile(fldName);
docWriter.removeOpenFile(docName);
lastDocID = 0;
}
@ -155,17 +157,20 @@ final class TermVectorsTermsWriter extends TermsHashConsumer {
// vector output files, we must abort this segment
// because those files will be in an unknown
// state:
tvx = docWriter.directory.createOutput(docStoreSegment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION);
tvd = docWriter.directory.createOutput(docStoreSegment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
tvf = docWriter.directory.createOutput(docStoreSegment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION);
String idxName = IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.VECTORS_INDEX_EXTENSION);
String docName = IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
String fldName = IndexFileNames.segmentFileName(docStoreSegment, IndexFileNames.VECTORS_FIELDS_EXTENSION);
tvx = docWriter.directory.createOutput(idxName);
tvd = docWriter.directory.createOutput(docName);
tvf = docWriter.directory.createOutput(fldName);
tvx.writeInt(TermVectorsReader.FORMAT_CURRENT);
tvd.writeInt(TermVectorsReader.FORMAT_CURRENT);
tvf.writeInt(TermVectorsReader.FORMAT_CURRENT);
docWriter.addOpenFile(docStoreSegment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION);
docWriter.addOpenFile(docStoreSegment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION);
docWriter.addOpenFile(docStoreSegment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
docWriter.addOpenFile(idxName);
docWriter.addOpenFile(fldName);
docWriter.addOpenFile(docName);
lastDocID = 0;
}

View File

@ -35,11 +35,11 @@ final class TermVectorsWriter {
FieldInfos fieldInfos)
throws IOException {
// Open files for TermVector storage
tvx = directory.createOutput(segment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION);
tvx = directory.createOutput(IndexFileNames.segmentFileName(segment, IndexFileNames.VECTORS_INDEX_EXTENSION));
tvx.writeInt(TermVectorsReader.FORMAT_CURRENT);
tvd = directory.createOutput(segment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
tvd = directory.createOutput(IndexFileNames.segmentFileName(segment, IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
tvd.writeInt(TermVectorsReader.FORMAT_CURRENT);
tvf = directory.createOutput(segment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION);
tvf = directory.createOutput(IndexFileNames.segmentFileName(segment, IndexFileNames.VECTORS_FIELDS_EXTENSION));
tvf.writeInt(TermVectorsReader.FORMAT_CURRENT);
this.fieldInfos = fieldInfos;

View File

@ -116,7 +116,7 @@ public class TestTermVectorsReader extends LuceneTestCase {
seg = writer.newestSegment().name;
writer.close();
fieldInfos = new FieldInfos(dir, seg + "." + IndexFileNames.FIELD_INFOS_EXTENSION);
fieldInfos = new FieldInfos(dir, IndexFileNames.segmentFileName(seg, IndexFileNames.FIELD_INFOS_EXTENSION));
}
private class MyTokenStream extends TokenStream {
@ -160,8 +160,8 @@ public class TestTermVectorsReader extends LuceneTestCase {
public void test() {
//Check to see the files were created properly in setup
assertTrue(dir.fileExists(seg + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
assertTrue(dir.fileExists(seg + "." + IndexFileNames.VECTORS_INDEX_EXTENSION));
assertTrue(dir.fileExists(IndexFileNames.segmentFileName(seg, IndexFileNames.VECTORS_DOCUMENTS_EXTENSION)));
assertTrue(dir.fileExists(IndexFileNames.segmentFileName(seg, IndexFileNames.VECTORS_INDEX_EXTENSION)));
}
public void testReader() throws IOException {

View File

@ -22,6 +22,7 @@ import java.util.HashSet;
import java.util.Set;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.TestIndexWriterReader;
@ -34,8 +35,8 @@ public class TestFileSwitchDirectory extends LuceneTestCase {
*/
public void testBasic() throws IOException {
Set<String> fileExtensions = new HashSet<String>();
fileExtensions.add("fdt");
fileExtensions.add("fdx");
fileExtensions.add(IndexFileNames.FIELDS_EXTENSION);
fileExtensions.add(IndexFileNames.FIELDS_INDEX_EXTENSION);
Directory primaryDir = new MockRAMDirectory();
RAMDirectory secondaryDir = new MockRAMDirectory();