parent
f71beacbdb
commit
50b9a8d6f2
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.common.lucene;
|
||||
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.codecs.CodecUtil;
|
||||
|
@ -120,6 +121,7 @@ public class Lucene {
|
|||
*/
|
||||
public static Iterable<String> files(SegmentInfos infos) throws IOException {
|
||||
final List<Collection<String>> list = new ArrayList<>();
|
||||
list.add(Collections.singleton(infos.getSegmentsFileName()));
|
||||
for (SegmentCommitInfo info : infos) {
|
||||
list.add(info.files());
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.common.lucene;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.TextField;
|
||||
|
@ -29,6 +30,9 @@ import org.elasticsearch.test.ElasticsearchLuceneTestCase;
|
|||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
@ -100,4 +104,53 @@ public class LuceneTest extends ElasticsearchLuceneTestCase {
|
|||
dir.close();
|
||||
|
||||
}
|
||||
|
||||
public void testFiles() throws IOException {
|
||||
MockDirectoryWrapper dir = newMockDirectory();
|
||||
dir.setEnableVirusScanner(false);
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
|
||||
iwc.setMergePolicy(NoMergePolicy.INSTANCE);
|
||||
iwc.setMaxBufferedDocs(2);
|
||||
iwc.setUseCompoundFile(true);
|
||||
IndexWriter writer = new IndexWriter(dir, iwc);
|
||||
Document doc = new Document();
|
||||
doc.add(new TextField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
|
||||
writer.addDocument(doc);
|
||||
writer.commit();
|
||||
Set<String> files = new HashSet<>();
|
||||
for (String f : Lucene.files(Lucene.readSegmentInfos(dir))) {
|
||||
files.add(f);
|
||||
}
|
||||
|
||||
assertTrue(files.toString(), files.contains("segments_1"));
|
||||
assertTrue(files.toString(), files.contains("_0.cfs"));
|
||||
assertTrue(files.toString(), files.contains("_0.cfe"));
|
||||
assertTrue(files.toString(), files.contains("_0.si"));
|
||||
|
||||
doc = new Document();
|
||||
doc.add(new TextField("id", "2", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
|
||||
writer.addDocument(doc);
|
||||
|
||||
doc = new Document();
|
||||
doc.add(new TextField("id", "3", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
|
||||
writer.addDocument(doc);
|
||||
writer.commit();
|
||||
|
||||
files.clear();
|
||||
for (String f : Lucene.files(Lucene.readSegmentInfos(dir))) {
|
||||
files.add(f);
|
||||
}
|
||||
assertFalse(files.toString(), files.contains("segments_1"));
|
||||
assertTrue(files.toString(), files.contains("segments_2"));
|
||||
assertTrue(files.toString(), files.contains("_0.cfs"));
|
||||
assertTrue(files.toString(), files.contains("_0.cfe"));
|
||||
assertTrue(files.toString(), files.contains("_0.si"));
|
||||
|
||||
assertTrue(files.toString(), files.contains("_1.cfs"));
|
||||
assertTrue(files.toString(), files.contains("_1.cfe"));
|
||||
assertTrue(files.toString(), files.contains("_1.si"));
|
||||
writer.close();
|
||||
dir.close();
|
||||
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue