mirror of https://github.com/apache/lucene.git
LUCENE-3612: remove _X.fnx
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1209047 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
ad64b7727c
commit
814d35e437
|
@ -68,9 +68,6 @@ public final class IndexFileNames {
|
|||
/** Extension of separate norms */
|
||||
public static final String SEPARATE_NORMS_EXTENSION = "s";
|
||||
|
||||
/** Extension of global field numbers */
|
||||
public static final String GLOBAL_FIELD_NUM_MAP_EXTENSION = "fnx";
|
||||
|
||||
/**
|
||||
* This array contains all filename extensions used by
|
||||
* Lucene's index files, with one exception, namely the
|
||||
|
@ -85,7 +82,6 @@ public final class IndexFileNames {
|
|||
GEN_EXTENSION,
|
||||
NORMS_EXTENSION,
|
||||
COMPOUND_FILE_STORE_EXTENSION,
|
||||
GLOBAL_FIELD_NUM_MAP_EXTENSION,
|
||||
};
|
||||
|
||||
public static final String[] NON_STORE_INDEX_EXTENSIONS = new String[] {
|
||||
|
|
|
@ -882,7 +882,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
|||
rollbackSegments = segmentInfos.createBackupSegmentInfos(true);
|
||||
|
||||
// start with previous field numbers, but new FieldInfos
|
||||
globalFieldNumberMap = segmentInfos.getOrLoadGlobalFieldNumberMap(directory);
|
||||
globalFieldNumberMap = segmentInfos.getOrLoadGlobalFieldNumberMap();
|
||||
docWriter = new DocumentsWriter(codec, config, directory, this, globalFieldNumberMap, bufferedDeletesStream);
|
||||
|
||||
// Default deleter (for backwards compatibility) is
|
||||
|
|
|
@ -73,11 +73,6 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentInfo> {
|
|||
*/
|
||||
public long version = System.currentTimeMillis();
|
||||
|
||||
private long globalFieldMapVersion = 0; // version of the GFNM for the next commit
|
||||
private long lastGlobalFieldMapVersion = 0; // version of the GFNM file we last successfully read or wrote
|
||||
private long pendingMapVersion = -1; // version of the GFNM itself that we have last successfully written
|
||||
// or -1 if we it was not written. This is set during prepareCommit
|
||||
|
||||
private long generation = 0; // generation of the "segments_N" for the next commit
|
||||
private long lastGeneration = 0; // generation of the "segments_N" file we last successfully read
|
||||
// or wrote; this is normally the same as generation except if
|
||||
|
@ -184,15 +179,6 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentInfo> {
|
|||
lastGeneration);
|
||||
}
|
||||
|
||||
private String getGlobalFieldNumberName(long version) {
|
||||
/*
|
||||
* This creates a file name ${version}.fnx without a leading underscore
|
||||
* since this file might belong to more than one segment (global map) and
|
||||
* could otherwise easily be confused with a per-segment file.
|
||||
*/
|
||||
return IndexFileNames.segmentFileName("_"+ version, "", IndexFileNames.GLOBAL_FIELD_NUM_MAP_EXTENSION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the generation off the segments file name and
|
||||
* return it.
|
||||
|
@ -297,8 +283,6 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentInfo> {
|
|||
return null;
|
||||
}
|
||||
}.run();
|
||||
// either we are on 4.0 or we don't have a lastGlobalFieldMapVersion i.e. its still set to 0
|
||||
assert DefaultSegmentInfosWriter.FORMAT_4_0 <= format || (DefaultSegmentInfosWriter.FORMAT_4_0 > format && lastGlobalFieldMapVersion == 0);
|
||||
}
|
||||
|
||||
// Only non-null after prepareCommit has been called and
|
||||
|
@ -308,14 +292,6 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentInfo> {
|
|||
private void write(Directory directory, Codec codec) throws IOException {
|
||||
|
||||
String segmentFileName = getNextSegmentFileName();
|
||||
final String globalFieldMapFile;
|
||||
if (globalFieldNumberMap != null && globalFieldNumberMap.isDirty()) {
|
||||
globalFieldMapFile = getGlobalFieldNumberName(++globalFieldMapVersion);
|
||||
pendingMapVersion = writeGlobalFieldMap(globalFieldNumberMap, directory, globalFieldMapFile);
|
||||
} else {
|
||||
globalFieldMapFile = null;
|
||||
}
|
||||
|
||||
|
||||
// Always advance the generation on write:
|
||||
if (generation == -1) {
|
||||
|
@ -347,16 +323,6 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentInfo> {
|
|||
} catch (Throwable t) {
|
||||
// Suppress so we keep throwing the original exception
|
||||
}
|
||||
if (globalFieldMapFile != null) { // delete if written here
|
||||
try {
|
||||
// Try not to leave global field map in
|
||||
// the index:
|
||||
directory.deleteFile(globalFieldMapFile);
|
||||
} catch (Throwable t) {
|
||||
// Suppress so we keep throwing the original exception
|
||||
}
|
||||
}
|
||||
pendingMapVersion = -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -767,8 +733,6 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentInfo> {
|
|||
void updateGeneration(SegmentInfos other) {
|
||||
lastGeneration = other.lastGeneration;
|
||||
generation = other.generation;
|
||||
lastGlobalFieldMapVersion = other.lastGlobalFieldMapVersion;
|
||||
globalFieldMapVersion = other.globalFieldMapVersion;
|
||||
}
|
||||
|
||||
final void rollbackCommit(Directory dir) throws IOException {
|
||||
|
@ -792,16 +756,6 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentInfo> {
|
|||
// in our caller
|
||||
}
|
||||
pendingSegnOutput = null;
|
||||
if (pendingMapVersion != -1) {
|
||||
try {
|
||||
final String fieldMapName = getGlobalFieldNumberName(globalFieldMapVersion--);
|
||||
dir.deleteFile(fieldMapName);
|
||||
} catch (Throwable t) {
|
||||
// Suppress so we keep throwing the original exception
|
||||
// in our caller
|
||||
}
|
||||
pendingMapVersion = -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -820,44 +774,6 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentInfo> {
|
|||
throw new IllegalStateException("prepareCommit was already called");
|
||||
write(dir, codec);
|
||||
}
|
||||
|
||||
private final long writeGlobalFieldMap(FieldNumberBiMap map, Directory dir, String name) throws IOException {
|
||||
final IndexOutput output = dir.createOutput(name, IOContext.READONCE);
|
||||
boolean success = false;
|
||||
long version;
|
||||
try {
|
||||
version = map.write(output);
|
||||
success = true;
|
||||
} finally {
|
||||
try {
|
||||
output.close();
|
||||
} catch (Throwable t) {
|
||||
// throw orig excp
|
||||
}
|
||||
if (!success) {
|
||||
try {
|
||||
dir.deleteFile(name);
|
||||
} catch (Throwable t) {
|
||||
// throw orig excp
|
||||
}
|
||||
} else {
|
||||
// we must sync here explicitly since during a commit
|
||||
// IW will not sync the global field map.
|
||||
dir.sync(Collections.singleton(name));
|
||||
}
|
||||
}
|
||||
return version;
|
||||
}
|
||||
|
||||
private void readGlobalFieldMap(FieldNumberBiMap map, Directory dir) throws IOException {
|
||||
final String name = getGlobalFieldNumberName(lastGlobalFieldMapVersion);
|
||||
final IndexInput input = dir.openInput(name, IOContext.READONCE);
|
||||
try {
|
||||
map.read(input);
|
||||
} finally {
|
||||
input.close();
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns all file names referenced by SegmentInfo
|
||||
* instances matching the provided Directory (ie files
|
||||
|
@ -876,9 +792,6 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentInfo> {
|
|||
files.add(segmentFileName);
|
||||
}
|
||||
}
|
||||
if (lastGlobalFieldMapVersion > 0) {
|
||||
files.add(getGlobalFieldNumberName(lastGlobalFieldMapVersion));
|
||||
}
|
||||
final int size = size();
|
||||
for(int i=0;i<size;i++) {
|
||||
final SegmentInfo info = info(i);
|
||||
|
@ -929,17 +842,6 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentInfo> {
|
|||
}
|
||||
|
||||
lastGeneration = generation;
|
||||
if (pendingMapVersion != -1) {
|
||||
/*
|
||||
* TODO is it possible that the commit does not succeed here? if another
|
||||
* commit happens at the same time and we lost the race between the
|
||||
* prepareCommit and finishCommit the latest version is already
|
||||
* incremented.
|
||||
*/
|
||||
globalFieldNumberMap.commitLastVersion(pendingMapVersion);
|
||||
pendingMapVersion = -1;
|
||||
lastGlobalFieldMapVersion = globalFieldMapVersion;
|
||||
}
|
||||
|
||||
try {
|
||||
IndexOutput genOutput = dir.createOutput(IndexFileNames.SEGMENTS_GEN, IOContext.READONCE);
|
||||
|
@ -1003,7 +905,6 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentInfo> {
|
|||
void replace(SegmentInfos other) {
|
||||
rollbackSegmentInfos(other.asList());
|
||||
lastGeneration = other.lastGeneration;
|
||||
lastGlobalFieldMapVersion = other.lastGlobalFieldMapVersion;
|
||||
format = other.format;
|
||||
}
|
||||
|
||||
|
@ -1027,47 +928,24 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentInfo> {
|
|||
* Loads or returns the already loaded the global field number map for this {@link SegmentInfos}.
|
||||
* If this {@link SegmentInfos} has no global field number map the returned instance is empty
|
||||
*/
|
||||
FieldNumberBiMap getOrLoadGlobalFieldNumberMap(Directory dir) throws IOException {
|
||||
FieldNumberBiMap getOrLoadGlobalFieldNumberMap() throws IOException {
|
||||
if (globalFieldNumberMap != null) {
|
||||
return globalFieldNumberMap;
|
||||
}
|
||||
final FieldNumberBiMap map = new FieldNumberBiMap();
|
||||
|
||||
if (lastGlobalFieldMapVersion > 0) {
|
||||
// if we don't have a global map or this is a SI from a earlier version we just return the empty map;
|
||||
readGlobalFieldMap(map, dir);
|
||||
}
|
||||
if (size() > 0) {
|
||||
if (format > DefaultSegmentInfosWriter.FORMAT_4_0) {
|
||||
assert lastGlobalFieldMapVersion == 0;
|
||||
// build the map up if we open a pre 4.0 index
|
||||
for (SegmentInfo info : this) {
|
||||
final FieldInfos segFieldInfos = info.getFieldInfos();
|
||||
for (FieldInfo fi : segFieldInfos) {
|
||||
map.addOrGet(fi.name, fi.number);
|
||||
}
|
||||
// build the map up
|
||||
for (SegmentInfo info : this) {
|
||||
final FieldInfos segFieldInfos = info.getFieldInfos();
|
||||
for (FieldInfo fi : segFieldInfos) {
|
||||
map.addOrGet(fi.name, fi.number);
|
||||
}
|
||||
}
|
||||
}
|
||||
return globalFieldNumberMap = map;
|
||||
}
|
||||
|
||||
/**
|
||||
* Called by {@link SegmentInfosReader} when reading the global field map version
|
||||
*/
|
||||
public void setGlobalFieldMapVersion(long version) {
|
||||
lastGlobalFieldMapVersion = globalFieldMapVersion = version;
|
||||
}
|
||||
|
||||
public long getGlobalFieldMapVersion() {
|
||||
return globalFieldMapVersion;
|
||||
}
|
||||
|
||||
// for testing
|
||||
long getLastGlobalFieldMapVersion() {
|
||||
return lastGlobalFieldMapVersion;
|
||||
}
|
||||
|
||||
/** applies all changes caused by committing a merge to this SegmentInfos */
|
||||
void applyMergeChanges(MergePolicy.OneMerge merge, boolean dropSegment) {
|
||||
final Set<SegmentInfo> mergedAway = new HashSet<SegmentInfo>(merge.segments);
|
||||
|
|
|
@ -49,9 +49,6 @@ public class DefaultSegmentInfosReader extends SegmentInfosReader {
|
|||
infos.version = input.readLong(); // read version
|
||||
infos.counter = input.readInt(); // read counter
|
||||
final int format = infos.getFormat();
|
||||
if (format <= DefaultSegmentInfosWriter.FORMAT_4_0) {
|
||||
infos.setGlobalFieldMapVersion(input.readLong());
|
||||
}
|
||||
for (int i = input.readInt(); i > 0; i--) { // read segmentInfos
|
||||
SegmentInfo si = readSegmentInfo(directory, format, input);
|
||||
if (si.getVersion() == null) {
|
||||
|
|
|
@ -68,7 +68,6 @@ public class DefaultSegmentInfosWriter extends SegmentInfosWriter {
|
|||
out.writeString(codecID); // write codecID
|
||||
out.writeLong(infos.version);
|
||||
out.writeInt(infos.counter); // write counter
|
||||
out.writeLong(infos.getGlobalFieldMapVersion());
|
||||
out.writeInt(infos.size()); // write infos
|
||||
for (SegmentInfo si : infos) {
|
||||
writeInfo(out, si);
|
||||
|
|
|
@ -56,10 +56,6 @@ public class SimpleTextSegmentInfosReader extends SegmentInfosReader {
|
|||
assert StringHelper.startsWith(scratch, COUNTER);
|
||||
infos.counter = Integer.parseInt(readString(COUNTER.length, scratch));
|
||||
|
||||
SimpleTextUtil.readLine(input, scratch);
|
||||
assert StringHelper.startsWith(scratch, FNX_VERSION);
|
||||
infos.setGlobalFieldMapVersion(Long.parseLong(readString(FNX_VERSION.length, scratch)));
|
||||
|
||||
SimpleTextUtil.readLine(input, scratch);
|
||||
assert StringHelper.startsWith(scratch, NUM_USERDATA);
|
||||
int numUserData = Integer.parseInt(readString(NUM_USERDATA.length, scratch));
|
||||
|
|
|
@ -43,7 +43,6 @@ public class SimpleTextSegmentInfosWriter extends SegmentInfosWriter {
|
|||
|
||||
final static BytesRef VERSION = new BytesRef("version ");
|
||||
final static BytesRef COUNTER = new BytesRef("counter ");
|
||||
final static BytesRef FNX_VERSION = new BytesRef("global field map version ");
|
||||
final static BytesRef NUM_USERDATA = new BytesRef("user data entries ");
|
||||
final static BytesRef USERDATA_KEY = new BytesRef(" key ");
|
||||
final static BytesRef USERDATA_VALUE = new BytesRef(" value ");
|
||||
|
@ -87,11 +86,6 @@ public class SimpleTextSegmentInfosWriter extends SegmentInfosWriter {
|
|||
SimpleTextUtil.write(out, COUNTER);
|
||||
SimpleTextUtil.write(out, Integer.toString(infos.counter), scratch);
|
||||
SimpleTextUtil.writeNewline(out);
|
||||
|
||||
// global field map version
|
||||
SimpleTextUtil.write(out, FNX_VERSION);
|
||||
SimpleTextUtil.write(out, Long.toString(infos.getGlobalFieldMapVersion()), scratch);
|
||||
SimpleTextUtil.writeNewline(out);
|
||||
|
||||
// user data
|
||||
int numUserDataEntries = infos.getUserData() == null ? 0 : infos.getUserData().size();
|
||||
|
|
|
@ -1082,7 +1082,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
w3.close();
|
||||
// we should now see segments_X,
|
||||
// segments.gen,_Y.cfs,_Y.cfe, _Z.fnx
|
||||
assertEquals("Only one compound segment should exist, but got: " + Arrays.toString(dir.listAll()), 5, dir.listAll().length);
|
||||
assertEquals("Only one compound segment should exist, but got: " + Arrays.toString(dir.listAll()), 4, dir.listAll().length);
|
||||
dir.close();
|
||||
}
|
||||
|
||||
|
|
|
@ -583,8 +583,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
"_0_1.del",
|
||||
"_0_1.s" + contentFieldIndex,
|
||||
"segments_2",
|
||||
"segments.gen",
|
||||
"_1.fnx"};
|
||||
"segments.gen"};
|
||||
|
||||
String[] actual = dir.listAll();
|
||||
Arrays.sort(expected);
|
||||
|
|
|
@ -179,7 +179,6 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
FieldInfos fis1 = sis.info(0).getFieldInfos();
|
||||
assertEquals("f1", fis1.fieldInfo(0).name);
|
||||
assertEquals("f2", fis1.fieldInfo(1).name);
|
||||
assertTrue(dir.fileExists("_1.fnx"));
|
||||
}
|
||||
|
||||
|
||||
|
@ -203,8 +202,6 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
assertEquals("f1", fis2.fieldInfo(0).name);
|
||||
assertNull(fis2.fieldInfo(1));
|
||||
assertEquals("f3", fis2.fieldInfo(2).name);
|
||||
assertFalse(dir.fileExists("_1.fnx"));
|
||||
assertTrue(dir.fileExists("_2.fnx"));
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -232,9 +229,6 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
assertEquals("f1", fis3.fieldInfo(0).name);
|
||||
assertEquals("f2", fis3.fieldInfo(1).name);
|
||||
assertEquals("f3", fis3.fieldInfo(2).name);
|
||||
assertFalse(dir.fileExists("_1.fnx"));
|
||||
assertTrue(dir.fileExists("_2.fnx"));
|
||||
assertFalse(dir.fileExists("_3.fnx"));
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -262,9 +256,6 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
assertEquals("f1", fis1.fieldInfo(0).name);
|
||||
assertEquals("f2", fis1.fieldInfo(1).name);
|
||||
assertEquals("f3", fis1.fieldInfo(2).name);
|
||||
assertFalse(dir.fileExists("_1.fnx"));
|
||||
assertTrue(dir.fileExists("_2.fnx"));
|
||||
assertFalse(dir.fileExists("_3.fnx"));
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,529 +0,0 @@
|
|||
package org.apache.lucene.index;
|
||||
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.BinaryField;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.index.FieldInfos.FieldNumberBiMap;
|
||||
import org.apache.lucene.index.codecs.DefaultSegmentInfosWriter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.apache.lucene.util.FailOnNonBulkMergesInfoStream;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
public class TestGlobalFieldNumbers extends LuceneTestCase {
|
||||
|
||||
public void testGlobalFieldNumberFiles() throws IOException {
|
||||
int num = atLeast(3);
|
||||
for (int i = 0; i < num; i++) {
|
||||
Directory dir = newDirectory();
|
||||
{
|
||||
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer(random));
|
||||
IndexWriter writer = new IndexWriter(dir, config);
|
||||
Document d = new Document();
|
||||
d.add(new Field("f1", "d1 first field", TextField.TYPE_STORED));
|
||||
d.add(new Field("f2", "d1 second field", TextField.TYPE_STORED));
|
||||
writer.addDocument(d);
|
||||
for (String string : writer.getIndexFileNames()) {
|
||||
assertFalse(string.endsWith(".fnx"));
|
||||
}
|
||||
writer.commit();
|
||||
Collection<String> files = writer.getIndexFileNames();
|
||||
files.remove("_1.fnx");
|
||||
for (String string : files) {
|
||||
assertFalse(string.endsWith(".fnx"));
|
||||
}
|
||||
|
||||
assertFNXFiles(dir, "_1.fnx");
|
||||
d = new Document();
|
||||
d.add(new Field("f1", "d2 first field", TextField.TYPE_STORED));
|
||||
d.add(new BinaryField("f3", new byte[] { 1, 2, 3 }));
|
||||
writer.addDocument(d);
|
||||
writer.commit();
|
||||
files = writer.getIndexFileNames();
|
||||
files.remove("_2.fnx");
|
||||
for (String string : files) {
|
||||
assertFalse(string.endsWith(".fnx"));
|
||||
}
|
||||
writer.close();
|
||||
assertFNXFiles(dir, "_2.fnx");
|
||||
}
|
||||
|
||||
{
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document d = new Document();
|
||||
d.add(new Field("f1", "d3 first field", TextField.TYPE_STORED));
|
||||
d.add(new Field("f2", "d3 second field", TextField.TYPE_STORED));
|
||||
d.add(new BinaryField("f3", new byte[] { 1, 2, 3, 4, 5 }));
|
||||
writer.addDocument(d);
|
||||
writer.close();
|
||||
Collection<String> files = writer.getIndexFileNames();
|
||||
files.remove("_2.fnx");
|
||||
for (String string : files) {
|
||||
assertFalse(string.endsWith(".fnx"));
|
||||
}
|
||||
|
||||
assertFNXFiles(dir, "_2.fnx");
|
||||
}
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setInfoStream(new FailOnNonBulkMergesInfoStream()));
|
||||
writer.forceMerge(1);
|
||||
writer.close();
|
||||
assertFNXFiles(dir, "_2.fnx");
|
||||
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
||||
public void testIndexReaderCommit() throws IOException {
|
||||
int num = atLeast(3);
|
||||
for (int i = 0; i < num; i++) {
|
||||
Directory dir = newDirectory();
|
||||
{
|
||||
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer(random));
|
||||
IndexWriter writer = new IndexWriter(dir, config);
|
||||
Document d = new Document();
|
||||
d.add(new Field("f1", "d1 first field", TextField.TYPE_STORED));
|
||||
d.add(new Field("f2", "d1 second field", TextField.TYPE_STORED));
|
||||
writer.addDocument(d);
|
||||
writer.commit();
|
||||
assertFNXFiles(dir, "_1.fnx");
|
||||
d = new Document();
|
||||
d.add(new Field("f1", "d2 first field", TextField.TYPE_STORED));
|
||||
d.add(new BinaryField("f3", new byte[] { 1, 2, 3 }));
|
||||
writer.addDocument(d);
|
||||
writer.commit();
|
||||
writer.close();
|
||||
assertFNXFiles(dir, "_2.fnx");
|
||||
}
|
||||
IndexReader reader = IndexReader.open(dir, false);
|
||||
reader.deleteDocument(0);
|
||||
reader.commit();
|
||||
reader.close();
|
||||
// make sure this reader can not modify the field map
|
||||
assertFNXFiles(dir, "_2.fnx");
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setInfoStream(new FailOnNonBulkMergesInfoStream()));
|
||||
writer.forceMerge(1);
|
||||
writer.close();
|
||||
assertFNXFiles(dir, "_2.fnx");
|
||||
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
||||
public void testGlobalFieldNumberFilesAcrossCommits() throws IOException {
|
||||
int num = atLeast(3);
|
||||
for (int i = 0; i < num; i++) {
|
||||
Directory dir = newDirectory();
|
||||
{
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(
|
||||
new KeepAllDeletionPolicy()));
|
||||
Document d = new Document();
|
||||
d.add(new Field("f1", "d1 first field", TextField.TYPE_STORED));
|
||||
d.add(new Field("f2", "d1 second field", TextField.TYPE_STORED));
|
||||
writer.addDocument(d);
|
||||
writer.commit();
|
||||
assertFNXFiles(dir, "_1.fnx");
|
||||
d = new Document();
|
||||
d.add(new Field("f1", "d2 first field", TextField.TYPE_STORED));
|
||||
d.add(new BinaryField("f3", new byte[] { 1, 2, 3 }));
|
||||
writer.addDocument(d);
|
||||
writer.commit();
|
||||
writer.commit();
|
||||
writer.commit();
|
||||
assertFNXFiles(dir, "_1.fnx", "_2.fnx");
|
||||
writer.close();
|
||||
assertFNXFiles(dir, "_1.fnx", "_2.fnx");
|
||||
}
|
||||
|
||||
{
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document d = new Document();
|
||||
d.add(new Field("f1", "d3 first field", TextField.TYPE_STORED));
|
||||
d.add(new Field("f2", "d3 second field", TextField.TYPE_STORED));
|
||||
d.add(new BinaryField("f3", new byte[] { 1, 2, 3, 4, 5 }));
|
||||
writer.addDocument(d);
|
||||
writer.close();
|
||||
assertFNXFiles(dir, "_2.fnx");
|
||||
}
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setInfoStream(new FailOnNonBulkMergesInfoStream()));
|
||||
writer.forceMerge(1);
|
||||
writer.close();
|
||||
assertFNXFiles(dir, "_2.fnx");
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
||||
public void testGlobalFieldNumberOnOldCommit() throws IOException {
|
||||
int num = atLeast(3);
|
||||
for (int i = 0; i < num; i++) {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(
|
||||
new KeepAllDeletionPolicy()));
|
||||
Document d = new Document();
|
||||
d.add(new Field("f1", "d1 first field", TextField.TYPE_STORED));
|
||||
d.add(new Field("f2", "d1 second field", TextField.TYPE_STORED));
|
||||
writer.addDocument(d);
|
||||
writer.commit();
|
||||
assertFNXFiles(dir, "_1.fnx");
|
||||
d = new Document();
|
||||
d.add(new Field("f1", "d2 first field", TextField.TYPE_STORED));
|
||||
d.add(new BinaryField("f3", new byte[] { 1, 2, 3 }));
|
||||
writer.addDocument(d);
|
||||
assertFNXFiles(dir, "_1.fnx");
|
||||
writer.close();
|
||||
assertFNXFiles(dir, "_1.fnx", "_2.fnx");
|
||||
// open first commit
|
||||
List<IndexCommit> listCommits = IndexReader.listCommits(dir);
|
||||
assertEquals(2, listCommits.size());
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer(random)).setIndexDeletionPolicy(
|
||||
new KeepAllDeletionPolicy()).setIndexCommit(listCommits.get(0)));
|
||||
|
||||
d = new Document();
|
||||
d.add(new Field("f1", "d2 first field", TextField.TYPE_STORED));
|
||||
d.add(new BinaryField("f3", new byte[] { 1, 2, 3 }));
|
||||
writer.addDocument(d);
|
||||
writer.commit();
|
||||
// now we have 3 files since f3 is not present in the first commit
|
||||
assertFNXFiles(dir, "_1.fnx", "_2.fnx", "_3.fnx");
|
||||
writer.close();
|
||||
assertFNXFiles(dir, "_1.fnx", "_2.fnx", "_3.fnx");
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer(random)));
|
||||
writer.commit();
|
||||
listCommits = IndexReader.listCommits(dir);
|
||||
assertEquals(1, listCommits.size());
|
||||
assertFNXFiles(dir, "_3.fnx");
|
||||
writer.close();
|
||||
assertFNXFiles(dir, "_3.fnx");
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
||||
private final Directory buildRandomIndex(String[] fieldNames, int numDocs,
|
||||
IndexWriterConfig conf) throws CorruptIndexException,
|
||||
LockObtainFailedException, IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
Document doc = new Document();
|
||||
final int numFields = 1 + random.nextInt(fieldNames.length);
|
||||
for (int j = 0; j < numFields; j++) {
|
||||
FieldType customType = new FieldType();
|
||||
customType.setIndexed(true);
|
||||
customType.setTokenized(random.nextBoolean());
|
||||
customType.setOmitNorms(random.nextBoolean());
|
||||
doc.add(newField(fieldNames[random.nextInt(fieldNames.length)],
|
||||
_TestUtil.randomRealisticUnicodeString(random),
|
||||
customType));
|
||||
|
||||
}
|
||||
writer.addDocument(doc);
|
||||
if (random.nextInt(20) == 0) {
|
||||
writer.commit();
|
||||
}
|
||||
}
|
||||
writer.close();
|
||||
return dir;
|
||||
}
|
||||
|
||||
public void testForceMerge() throws IOException {
|
||||
for (int i = 0; i < 2*RANDOM_MULTIPLIER; i++) {
|
||||
Set<String> fieldNames = new HashSet<String>();
|
||||
final int numFields = 2 + (TEST_NIGHTLY ? random.nextInt(200) : random.nextInt(20));
|
||||
for (int j = 0; j < numFields; j++) {
|
||||
fieldNames.add("field_" + j);
|
||||
}
|
||||
Directory base = buildRandomIndex(fieldNames.toArray(new String[0]),
|
||||
20 + random.nextInt(100),
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
IndexWriter writer = new IndexWriter(base, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
FieldNumberBiMap globalFieldMap = writer.segmentInfos
|
||||
.getOrLoadGlobalFieldNumberMap(base);
|
||||
Set<Entry<String, Integer>> entries = globalFieldMap.entries();
|
||||
writer.forceMerge(1);
|
||||
writer.commit();
|
||||
writer.close();
|
||||
Set<Entry<String, Integer>> afterOptmize = globalFieldMap.entries();
|
||||
assertEquals(entries, afterOptmize);
|
||||
base.close();
|
||||
}
|
||||
}
|
||||
|
||||
public void testAddIndexesStableFieldNumbers() throws IOException {
|
||||
for (int i = 0; i < 2*RANDOM_MULTIPLIER; i++) {
|
||||
Set<String> fieldNames = new HashSet<String>();
|
||||
final int numFields = 2 + (TEST_NIGHTLY ? random.nextInt(50) : random.nextInt(10));
|
||||
for (int j = 0; j < numFields; j++) {
|
||||
fieldNames.add("field_" + j);
|
||||
}
|
||||
|
||||
Directory base = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(base, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document doc = new Document();
|
||||
for (String string : fieldNames) {
|
||||
FieldType customType = new FieldType();
|
||||
customType.setIndexed(true);
|
||||
customType.setTokenized(random.nextBoolean());
|
||||
customType.setOmitNorms(random.nextBoolean());
|
||||
doc.add(newField(string,
|
||||
_TestUtil.randomRealisticUnicodeString(random),
|
||||
customType));
|
||||
|
||||
}
|
||||
writer.addDocument(doc);
|
||||
writer.commit();
|
||||
FieldNumberBiMap globalFieldMap = writer.segmentInfos
|
||||
.getOrLoadGlobalFieldNumberMap(base);
|
||||
final Set<Entry<String, Integer>> entries = globalFieldMap.entries();
|
||||
assertEquals(entries.size(), fieldNames.size());
|
||||
for (Entry<String, Integer> entry : entries) {
|
||||
// all fields are in this fieldMap
|
||||
assertTrue(fieldNames.contains(entry.getKey()));
|
||||
}
|
||||
writer.close();
|
||||
|
||||
int numIndexes = 1 + random.nextInt(10);
|
||||
for (int j = 0; j < numIndexes; j++) {
|
||||
Directory toAdd = buildRandomIndex(fieldNames.toArray(new String[0]),
|
||||
1 + random.nextInt(50),
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
IndexWriter w = new IndexWriter(base, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
if (random.nextBoolean()) {
|
||||
IndexReader open = IndexReader.open(toAdd);
|
||||
w.addIndexes(open);
|
||||
open.close();
|
||||
} else {
|
||||
w.addIndexes(toAdd);
|
||||
}
|
||||
|
||||
w.close();
|
||||
FieldNumberBiMap map = w.segmentInfos
|
||||
.getOrLoadGlobalFieldNumberMap(toAdd);
|
||||
assertEquals(entries, map.entries());
|
||||
toAdd.close();
|
||||
}
|
||||
IndexWriter w = new IndexWriter(base, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
|
||||
new LogByteSizeMergePolicy()));
|
||||
w.forceMerge(1);
|
||||
w.close();
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(base);
|
||||
SegmentInfo segmentInfo = sis.info(sis.size() - 1);// last segment must
|
||||
// have all fields with
|
||||
// consistent numbers
|
||||
FieldInfos fieldInfos = segmentInfo.getFieldInfos();
|
||||
assertEquals(fieldInfos.size(), entries.size());
|
||||
for (Entry<String, Integer> entry : entries) {
|
||||
assertEquals(entry.getValue(),
|
||||
Integer.valueOf(fieldInfos.fieldNumber(entry.getKey())));
|
||||
assertEquals(entry.getKey(), fieldInfos.fieldName(entry.getValue()));
|
||||
}
|
||||
base.close();
|
||||
}
|
||||
}
|
||||
|
||||
final String[] oldNames = { "30.cfs", "30.nocfs", "31.cfs", "31.nocfs", };
|
||||
|
||||
public void testAddOldIndex() throws IOException {
|
||||
int i = random.nextInt(oldNames.length);
|
||||
File oldIndxeDir = _TestUtil.getTempDir(oldNames[i]);
|
||||
Directory dir = null;
|
||||
try {
|
||||
_TestUtil
|
||||
.unzip(getDataFile("index." + oldNames[i] + ".zip"), oldIndxeDir);
|
||||
dir = newFSDirectory(oldIndxeDir);
|
||||
SegmentInfos infos = new SegmentInfos();
|
||||
infos.read(dir);
|
||||
SortedMap<Integer, String> sortedMap = new TreeMap<Integer, String>();
|
||||
|
||||
FieldNumberBiMap biMap = new FieldNumberBiMap();
|
||||
int maxFieldNum = Integer.MIN_VALUE;
|
||||
for (SegmentInfo segmentInfo : infos) {
|
||||
for (FieldInfo fieldInfo : segmentInfo.getFieldInfos()) {
|
||||
int globNumber = biMap.addOrGet(fieldInfo.name, fieldInfo.number);
|
||||
maxFieldNum = Math.max(maxFieldNum, globNumber);
|
||||
sortedMap.put(globNumber, fieldInfo.name);
|
||||
}
|
||||
}
|
||||
Directory base = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(base, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
|
||||
NoMergePolicy.NO_COMPOUND_FILES));
|
||||
|
||||
SortedMap<Integer, String> copySortedMap = new TreeMap<Integer, String>(
|
||||
sortedMap);
|
||||
while (!sortedMap.isEmpty()) { // add every field at least once
|
||||
Document doc = new Document();
|
||||
int nextField = random.nextInt(maxFieldNum + 1);
|
||||
sortedMap.remove(nextField);
|
||||
|
||||
String name = copySortedMap.get(nextField);
|
||||
assertNotNull(name);
|
||||
|
||||
FieldType customType = new FieldType();
|
||||
customType.setIndexed(true);
|
||||
customType.setTokenized(random.nextBoolean());
|
||||
customType.setOmitNorms(random.nextBoolean());
|
||||
doc.add(newField(name, _TestUtil.randomRealisticUnicodeString(random),
|
||||
customType));
|
||||
writer.addDocument(doc);
|
||||
if (random.nextInt(10) == 0) {
|
||||
writer.commit();
|
||||
}
|
||||
}
|
||||
Set<Entry<String, Integer>> expectedEntries = writer.segmentInfos
|
||||
.getOrLoadGlobalFieldNumberMap(base).entries();
|
||||
writer.addIndexes(dir); // add the old index
|
||||
writer.close();
|
||||
|
||||
writer = new IndexWriter(base, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES));
|
||||
writer.commit(); // make sure the old index is the latest segment
|
||||
writer.close();
|
||||
|
||||
// we don't merge here since we use NoMergePolicy
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(base);
|
||||
// check that the latest global field numbers are consistent and carried
|
||||
// over from the 4.0 index
|
||||
FieldNumberBiMap actualGlobalMap = sis
|
||||
.getOrLoadGlobalFieldNumberMap(base);
|
||||
assertEquals(expectedEntries, actualGlobalMap.entries());
|
||||
base.close();
|
||||
} finally {
|
||||
if (dir != null)
|
||||
dir.close();
|
||||
_TestUtil.rmDir(oldIndxeDir);
|
||||
}
|
||||
}
|
||||
|
||||
public void testFilesOnOldIndex() throws IOException {
|
||||
int i = random.nextInt(oldNames.length);
|
||||
File oldIndxeDir = _TestUtil.getTempDir(oldNames[i]);
|
||||
Directory dir = null;
|
||||
|
||||
MergePolicy policy = random.nextBoolean() ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES;
|
||||
try {
|
||||
_TestUtil
|
||||
.unzip(getDataFile("index." + oldNames[i] + ".zip"), oldIndxeDir);
|
||||
dir = newFSDirectory(oldIndxeDir);
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(policy));
|
||||
SegmentInfos segmentInfos = writer.segmentInfos;
|
||||
assertTrue(DefaultSegmentInfosWriter.FORMAT_4_0 < segmentInfos.getFormat());
|
||||
assertEquals(0, segmentInfos.getGlobalFieldMapVersion());
|
||||
for (String string : writer.getIndexFileNames()) {
|
||||
assertFalse(string.endsWith(".fnx"));
|
||||
}
|
||||
writer.commit();
|
||||
|
||||
assertTrue(DefaultSegmentInfosWriter.FORMAT_4_0 < segmentInfos.getFormat());
|
||||
assertEquals(0, segmentInfos.getGlobalFieldMapVersion());
|
||||
Collection<String> files = writer.getIndexFileNames();
|
||||
for (String string : files) {
|
||||
assertFalse(string.endsWith(".fnx"));
|
||||
}
|
||||
|
||||
Document d = new Document();
|
||||
d.add(new Field("f1", "d1 first field", TextField.TYPE_STORED));
|
||||
writer.addDocument(d);
|
||||
writer.prepareCommit();
|
||||
// the fnx file should still be under control of the SIS
|
||||
assertTrue(DefaultSegmentInfosWriter.FORMAT_4_0 < segmentInfos.getFormat());
|
||||
assertEquals(0, segmentInfos.getLastGlobalFieldMapVersion());
|
||||
assertEquals(1, segmentInfos.getGlobalFieldMapVersion());
|
||||
files = writer.getIndexFileNames();
|
||||
for (String string : files) {
|
||||
assertFalse(string.endsWith(".fnx"));
|
||||
}
|
||||
|
||||
writer.commit();
|
||||
|
||||
// now we should see the fnx file even if this is a 3.x segment
|
||||
assertTrue(DefaultSegmentInfosWriter.FORMAT_4_0 < segmentInfos.getFormat());
|
||||
assertEquals(1, segmentInfos.getGlobalFieldMapVersion());
|
||||
assertEquals(1, segmentInfos.getLastGlobalFieldMapVersion());
|
||||
files = writer.getIndexFileNames();
|
||||
assertTrue(files.remove("_1.fnx"));
|
||||
for (String string : files) {
|
||||
assertFalse(string.endsWith(".fnx"));
|
||||
}
|
||||
writer.close();
|
||||
} finally {
|
||||
if (dir != null)
|
||||
dir.close();
|
||||
_TestUtil.rmDir(oldIndxeDir);
|
||||
}
|
||||
}
|
||||
|
||||
class KeepAllDeletionPolicy implements IndexDeletionPolicy {
|
||||
public void onInit(List<? extends IndexCommit> commits) throws IOException {
|
||||
}
|
||||
|
||||
public void onCommit(List<? extends IndexCommit> commits)
|
||||
throws IOException {
|
||||
}
|
||||
}
|
||||
|
||||
public static void assertFNXFiles(Directory dir, String... expectedFnxFiles)
|
||||
throws IOException {
|
||||
String[] listAll = dir.listAll();
|
||||
Set<String> fnxFiles = new HashSet<String>();
|
||||
for (String string : listAll) {
|
||||
if (string.endsWith(".fnx")) {
|
||||
fnxFiles.add(string);
|
||||
}
|
||||
}
|
||||
assertEquals("" + fnxFiles, expectedFnxFiles.length, fnxFiles.size());
|
||||
for (String string : expectedFnxFiles) {
|
||||
assertTrue(" missing fnx file: " + string, fnxFiles.contains(string));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -916,10 +916,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
} catch (RuntimeException re) {
|
||||
// Expected
|
||||
}
|
||||
assertTrue(dir.fileExists("_1.fnx"));
|
||||
assertTrue(failure.failOnCommit && failure.failOnDeleteFile);
|
||||
w.rollback();
|
||||
assertFalse(dir.fileExists("_1.fnx"));
|
||||
assertEquals(0, dir.listAll().length);
|
||||
dir.close();
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue