[TEST] Use NoMergePolicy in EngineTest#testSegments

These tests rely on that there are no merges but since MP is now
random this can break the test.
This commit is contained in:
Simon Willnauer 2015-06-13 21:36:08 +02:00
parent 58ccb39dee
commit 0ac09fde95
2 changed files with 112 additions and 101 deletions

View File

@ -262,6 +262,9 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test @Test
public void testSegments() throws Exception { public void testSegments() throws Exception {
IndexSettingsService indexSettingsService = new IndexSettingsService(shardId.index(), Settings.builder().put(defaultSettings).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
try (Store store = createStore();
Engine engine = createEngine(indexSettingsService, store, createTempDir(), new MergeSchedulerConfig(defaultSettings), NoMergePolicy.INSTANCE)) {
List<Segment> segments = engine.segments(false); List<Segment> segments = engine.segments(false);
assertThat(segments.isEmpty(), equalTo(true)); assertThat(segments.isEmpty(), equalTo(true));
assertThat(engine.segmentsStats().getCount(), equalTo(0l)); assertThat(engine.segmentsStats().getCount(), equalTo(0l));
@ -378,6 +381,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
assertThat(segments.get(2).getDeletedDocs(), equalTo(0)); assertThat(segments.get(2).getDeletedDocs(), equalTo(0));
assertThat(segments.get(2).isCompound(), equalTo(true)); assertThat(segments.get(2).isCompound(), equalTo(true));
} }
}
public void testVerboseSegments() throws Exception { public void testVerboseSegments() throws Exception {
List<Segment> segments = engine.segments(true); List<Segment> segments = engine.segments(true);

View File

@ -208,18 +208,22 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
} }
protected ShadowEngine createShadowEngine(IndexSettingsService indexSettingsService, Store store) { protected ShadowEngine createShadowEngine(IndexSettingsService indexSettingsService, Store store) {
return new ShadowEngine(config(indexSettingsService, store, null, new MergeSchedulerConfig(indexSettingsService.indexSettings()))); return new ShadowEngine(config(indexSettingsService, store, null, new MergeSchedulerConfig(indexSettingsService.indexSettings()), null));
} }
protected InternalEngine createInternalEngine(IndexSettingsService indexSettingsService, Store store, Path translogPath) { protected InternalEngine createInternalEngine(IndexSettingsService indexSettingsService, Store store, Path translogPath) {
return new InternalEngine(config(indexSettingsService, store, translogPath, new MergeSchedulerConfig(indexSettingsService.indexSettings())), true); return createInternalEngine(indexSettingsService, store, translogPath, newMergePolicy());
} }
public EngineConfig config(IndexSettingsService indexSettingsService, Store store, Path translogPath, MergeSchedulerConfig mergeSchedulerConfig) { protected InternalEngine createInternalEngine(IndexSettingsService indexSettingsService, Store store, Path translogPath, MergePolicy mergePolicy) {
return new InternalEngine(config(indexSettingsService, store, translogPath, new MergeSchedulerConfig(indexSettingsService.indexSettings()), mergePolicy), true);
}
public EngineConfig config(IndexSettingsService indexSettingsService, Store store, Path translogPath, MergeSchedulerConfig mergeSchedulerConfig, MergePolicy mergePolicy) {
IndexWriterConfig iwc = newIndexWriterConfig(); IndexWriterConfig iwc = newIndexWriterConfig();
TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettingsService.getSettings(), Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool); TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettingsService.getSettings(), Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool);
EngineConfig config = new EngineConfig(shardId, threadPool, new ShardIndexingService(shardId, EMPTY_SETTINGS, new ShardSlowLogIndexingService(shardId, EMPTY_SETTINGS, indexSettingsService)), indexSettingsService EngineConfig config = new EngineConfig(shardId, threadPool, new ShardIndexingService(shardId, EMPTY_SETTINGS, new ShardSlowLogIndexingService(shardId, EMPTY_SETTINGS, indexSettingsService)), indexSettingsService
, null, store, createSnapshotDeletionPolicy(),newMergePolicy(), mergeSchedulerConfig, , null, store, createSnapshotDeletionPolicy(), mergePolicy, mergeSchedulerConfig,
iwc.getAnalyzer(), iwc.getSimilarity() , new CodecService(shardId.index()), new Engine.FailedEngineListener() { iwc.getAnalyzer(), iwc.getSimilarity() , new CodecService(shardId.index()), new Engine.FailedEngineListener() {
@Override @Override
public void onFailedEngine(ShardId shardId, String reason, @Nullable Throwable t) { public void onFailedEngine(ShardId shardId, String reason, @Nullable Throwable t) {
@ -264,6 +268,9 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
@Test @Test
public void testSegments() throws Exception { public void testSegments() throws Exception {
IndexSettingsService indexSettingsService = new IndexSettingsService(shardId.index(), Settings.builder().put(defaultSettings).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build());
primaryEngine.close(); // recreate without merging
primaryEngine = createInternalEngine(indexSettingsService, store, createTempDir(), NoMergePolicy.INSTANCE);
List<Segment> segments = primaryEngine.segments(false); List<Segment> segments = primaryEngine.segments(false);
assertThat(segments.isEmpty(), equalTo(true)); assertThat(segments.isEmpty(), equalTo(true));
assertThat(primaryEngine.segmentsStats().getCount(), equalTo(0l)); assertThat(primaryEngine.segmentsStats().getCount(), equalTo(0l));