Merge pull request #207 from metamx/dynamicConfigs

Make some master configuration properties dynamically configurable
This commit is contained in:
cheddar 2013-08-08 16:42:08 -07:00
commit 3e8406a2e6
18 changed files with 334 additions and 115 deletions

View File

@ -102,6 +102,7 @@ public class InfoResource
private final DatabaseRuleManager databaseRuleManager;
private final IndexingServiceClient indexingServiceClient;
@Inject
public InfoResource(
DruidMaster master,
@ -129,6 +130,7 @@ public class InfoResource
.build();
}
@GET
@Path("/cluster")
@Produces("application/json")

View File

@ -283,7 +283,8 @@ public class MasterMain
databaseRuleManager,
master,
jsonMapper,
indexingServiceClient
indexingServiceClient,
configManager
)
);

View File

@ -24,13 +24,10 @@ import com.metamx.druid.master.LoadPeonCallback;
import javax.inject.Inject;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Response;
import java.util.List;
import java.util.Map;
/**
*/
@ -110,11 +107,5 @@ public class MasterResource
return resp;
}
@GET
@Path("/loadstatus")
@Produces("application/json")
public Map<String, Double> getLoadStatus()
{
return master.getLoadStatus();
}
}

View File

@ -0,0 +1,69 @@
/*
* Druid - a distributed column store.
* Copyright (C) 2012 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package com.metamx.druid.http;
import com.metamx.druid.config.JacksonConfigManager;
import com.metamx.druid.master.MasterSegmentSettings;
import javax.inject.Inject;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Response;
/**
*/
@Path("/master/config")
public class MasterSegmentSettingsResource
{
private final JacksonConfigManager manager;
@Inject
public MasterSegmentSettingsResource(
JacksonConfigManager manager
)
{
this.manager=manager;
}
@GET
@Produces("application/json")
public Response getDynamicConfigs()
{
Response.ResponseBuilder builder = Response.status(Response.Status.OK)
.entity(
manager.watch(MasterSegmentSettings.CONFIG_KEY,MasterSegmentSettings.class).get()
);
return builder.build();
}
@POST
@Consumes("application/json")
public Response setDynamicConfigs(
final MasterSegmentSettings masterSegmentSettings
)
{
if (!manager.set(MasterSegmentSettings.CONFIG_KEY, masterSegmentSettings)) {
return Response.status(Response.Status.BAD_REQUEST).build();
}
return Response.status(Response.Status.OK).build();
}
}

View File

@ -25,6 +25,7 @@ import com.google.inject.Provides;
import com.google.inject.util.Providers;
import com.metamx.druid.client.InventoryView;
import com.metamx.druid.client.indexing.IndexingServiceClient;
import com.metamx.druid.config.JacksonConfigManager;
import com.metamx.druid.db.DatabaseRuleManager;
import com.metamx.druid.db.DatabaseSegmentManager;
import com.metamx.druid.master.DruidMaster;
@ -43,6 +44,8 @@ public class MasterServletModule extends JerseyServletModule
private final DruidMaster master;
private final ObjectMapper jsonMapper;
private final IndexingServiceClient indexingServiceClient;
private final JacksonConfigManager configManager;
public MasterServletModule(
InventoryView serverInventoryView,
@ -50,7 +53,8 @@ public class MasterServletModule extends JerseyServletModule
DatabaseRuleManager databaseRuleManager,
DruidMaster master,
ObjectMapper jsonMapper,
IndexingServiceClient indexingServiceClient
IndexingServiceClient indexingServiceClient,
JacksonConfigManager configManager
)
{
this.serverInventoryView = serverInventoryView;
@ -59,17 +63,20 @@ public class MasterServletModule extends JerseyServletModule
this.master = master;
this.jsonMapper = jsonMapper;
this.indexingServiceClient = indexingServiceClient;
this.configManager = configManager;
}
@Override
protected void configureServlets()
{
bind(MasterSegmentSettingsResource.class);
bind(InfoResource.class);
bind(MasterResource.class);
bind(InventoryView.class).toInstance(serverInventoryView);
bind(DatabaseSegmentManager.class).toInstance(segmentInventoryManager);
bind(DatabaseRuleManager.class).toInstance(databaseRuleManager);
bind(DruidMaster.class).toInstance(master);
bind(JacksonConfigManager.class).toInstance(configManager);
if (indexingServiceClient == null) {
bind(IndexingServiceClient.class).toProvider(Providers.<IndexingServiceClient>of(null));
}

View File

@ -95,6 +95,7 @@ public class DruidMaster
private final Map<String, LoadQueuePeon> loadManagementPeons;
private final AtomicReference<LeaderLatch> leaderLatch;
private volatile AtomicReference<MasterSegmentSettings> segmentSettingsAtomicReference;
public DruidMaster(
DruidMasterConfig config,
@ -156,6 +157,7 @@ public class DruidMaster
this.exec = scheduledExecutorFactory.create(1, "Master-Exec--%d");
this.leaderLatch = new AtomicReference<LeaderLatch>(null);
this.segmentSettingsAtomicReference= new AtomicReference<MasterSegmentSettings>(null);
this.loadManagementPeons = loadQueuePeonMap;
}
@ -465,7 +467,7 @@ public class DruidMaster
serverInventoryView.start();
final List<Pair<? extends MasterRunnable, Duration>> masterRunnables = Lists.newArrayList();
segmentSettingsAtomicReference = configManager.watch(MasterSegmentSettings.CONFIG_KEY, MasterSegmentSettings.class,new MasterSegmentSettings.Builder().build());
masterRunnables.add(Pair.of(new MasterComputeManagerRunnable(), config.getMasterPeriod()));
if (indexingServiceClient != null) {
@ -650,18 +652,15 @@ public class DruidMaster
}
// Do master stuff.
DruidMasterRuntimeParams params =
DruidMasterRuntimeParams.newBuilder()
.withStartTime(startTime)
.withDatasources(databaseSegmentManager.getInventory())
.withMillisToWaitBeforeDeleting(config.getMillisToWaitBeforeDeleting())
.withMasterSegmentSettings(segmentSettingsAtomicReference.get())
.withEmitter(emitter)
.withMergeBytesLimit(config.getMergeBytesLimit())
.withMergeSegmentsLimit(config.getMergeSegmentsLimit())
.withMaxSegmentsToMove(config.getMaxSegmentsToMove())
.build();
for (DruidMasterHelper helper : helpers) {
params = helper.run(params);
}
@ -756,6 +755,9 @@ public class DruidMaster
.withLoadManagementPeons(loadManagementPeons)
.withSegmentReplicantLookup(segmentReplicantLookup)
.withBalancerReferenceTimestamp(DateTime.now())
.withMasterSegmentSettings(
segmentSettingsAtomicReference.get()
)
.build();
}
},

View File

@ -79,7 +79,7 @@ public class DruidMasterBalancer implements DruidMasterHelper
final MasterStats stats = new MasterStats();
final DateTime referenceTimestamp = params.getBalancerReferenceTimestamp();
final BalancerCostAnalyzer analyzer = params.getBalancerCostAnalyzer(referenceTimestamp);
final int maxSegmentsToMove = params.getMaxSegmentsToMove();
final int maxSegmentsToMove = params.getMasterSegmentSettings().getMaxSegmentsToMove();
for (Map.Entry<String, MinMaxPriorityQueue<ServerHolder>> entry :
params.getDruidCluster().getCluster().entrySet()) {

View File

@ -46,11 +46,8 @@ public class DruidMasterRuntimeParams
private final Map<String, LoadQueuePeon> loadManagementPeons;
private final ReplicationThrottler replicationManager;
private final ServiceEmitter emitter;
private final long millisToWaitBeforeDeleting;
private final MasterSegmentSettings masterSegmentSettings;
private final MasterStats stats;
private final long mergeBytesLimit;
private final int mergeSegmentsLimit;
private final int maxSegmentsToMove;
private final DateTime balancerReferenceTimestamp;
public DruidMasterRuntimeParams(
@ -63,11 +60,8 @@ public class DruidMasterRuntimeParams
Map<String, LoadQueuePeon> loadManagementPeons,
ReplicationThrottler replicationManager,
ServiceEmitter emitter,
long millisToWaitBeforeDeleting,
MasterSegmentSettings masterSegmentSettings,
MasterStats stats,
long mergeBytesLimit,
int mergeSegmentsLimit,
int maxSegmentsToMove,
DateTime balancerReferenceTimestamp
)
{
@ -80,11 +74,8 @@ public class DruidMasterRuntimeParams
this.loadManagementPeons = loadManagementPeons;
this.replicationManager = replicationManager;
this.emitter = emitter;
this.millisToWaitBeforeDeleting = millisToWaitBeforeDeleting;
this.masterSegmentSettings = masterSegmentSettings;
this.stats = stats;
this.mergeBytesLimit = mergeBytesLimit;
this.mergeSegmentsLimit = mergeSegmentsLimit;
this.maxSegmentsToMove = maxSegmentsToMove;
this.balancerReferenceTimestamp = balancerReferenceTimestamp;
}
@ -133,9 +124,9 @@ public class DruidMasterRuntimeParams
return emitter;
}
public long getMillisToWaitBeforeDeleting()
public MasterSegmentSettings getMasterSegmentSettings()
{
return millisToWaitBeforeDeleting;
return masterSegmentSettings;
}
public MasterStats getMasterStats()
@ -143,21 +134,6 @@ public class DruidMasterRuntimeParams
return stats;
}
public long getMergeBytesLimit()
{
return mergeBytesLimit;
}
public int getMergeSegmentsLimit()
{
return mergeSegmentsLimit;
}
public int getMaxSegmentsToMove()
{
return maxSegmentsToMove;
}
public DateTime getBalancerReferenceTimestamp()
{
return balancerReferenceTimestamp;
@ -170,7 +146,7 @@ public class DruidMasterRuntimeParams
public boolean hasDeletionWaitTimeElapsed()
{
return (System.currentTimeMillis() - getStartTime() > getMillisToWaitBeforeDeleting());
return (System.currentTimeMillis() - getStartTime() > masterSegmentSettings.getMillisToWaitBeforeDeleting());
}
public static Builder newBuilder()
@ -190,11 +166,8 @@ public class DruidMasterRuntimeParams
loadManagementPeons,
replicationManager,
emitter,
millisToWaitBeforeDeleting,
masterSegmentSettings,
stats,
mergeBytesLimit,
mergeSegmentsLimit,
maxSegmentsToMove,
balancerReferenceTimestamp
);
}
@ -210,11 +183,8 @@ public class DruidMasterRuntimeParams
private final Map<String, LoadQueuePeon> loadManagementPeons;
private ReplicationThrottler replicationManager;
private ServiceEmitter emitter;
private long millisToWaitBeforeDeleting;
private MasterSegmentSettings masterSegmentSettings;
private MasterStats stats;
private long mergeBytesLimit;
private int mergeSegmentsLimit;
private int maxSegmentsToMove;
private DateTime balancerReferenceTimestamp;
Builder()
@ -228,11 +198,8 @@ public class DruidMasterRuntimeParams
this.loadManagementPeons = Maps.newHashMap();
this.replicationManager = null;
this.emitter = null;
this.millisToWaitBeforeDeleting = 0;
this.stats = new MasterStats();
this.mergeBytesLimit = 0;
this.mergeSegmentsLimit = 0;
this.maxSegmentsToMove = 0;
this.masterSegmentSettings = new MasterSegmentSettings.Builder().build();
this.balancerReferenceTimestamp = null;
}
@ -246,11 +213,8 @@ public class DruidMasterRuntimeParams
Map<String, LoadQueuePeon> loadManagementPeons,
ReplicationThrottler replicationManager,
ServiceEmitter emitter,
long millisToWaitBeforeDeleting,
MasterSegmentSettings masterSegmentSettings,
MasterStats stats,
long mergeBytesLimit,
int mergeSegmentsLimit,
int maxSegmentsToMove,
DateTime balancerReferenceTimestamp
)
{
@ -263,11 +227,8 @@ public class DruidMasterRuntimeParams
this.loadManagementPeons = loadManagementPeons;
this.replicationManager = replicationManager;
this.emitter = emitter;
this.millisToWaitBeforeDeleting = millisToWaitBeforeDeleting;
this.masterSegmentSettings = masterSegmentSettings;
this.stats = stats;
this.mergeBytesLimit = mergeBytesLimit;
this.mergeSegmentsLimit = mergeSegmentsLimit;
this.maxSegmentsToMove = maxSegmentsToMove;
this.balancerReferenceTimestamp = balancerReferenceTimestamp;
}
@ -283,11 +244,8 @@ public class DruidMasterRuntimeParams
loadManagementPeons,
replicationManager,
emitter,
millisToWaitBeforeDeleting,
masterSegmentSettings,
stats,
mergeBytesLimit,
mergeSegmentsLimit,
maxSegmentsToMove,
balancerReferenceTimestamp
);
}
@ -346,33 +304,15 @@ public class DruidMasterRuntimeParams
return this;
}
public Builder withMillisToWaitBeforeDeleting(long millisToWaitBeforeDeleting)
{
this.millisToWaitBeforeDeleting = millisToWaitBeforeDeleting;
return this;
}
public Builder withMasterStats(MasterStats stats)
{
this.stats.accumulate(stats);
return this;
}
public Builder withMergeBytesLimit(long mergeBytesLimit)
public Builder withMasterSegmentSettings(MasterSegmentSettings configs)
{
this.mergeBytesLimit = mergeBytesLimit;
return this;
}
public Builder withMergeSegmentsLimit(int mergeSegmentsLimit)
{
this.mergeSegmentsLimit = mergeSegmentsLimit;
return this;
}
public Builder withMaxSegmentsToMove(int maxSegmentsToMove)
{
this.maxSegmentsToMove = maxSegmentsToMove;
this.masterSegmentSettings = configs;
return this;
}

View File

@ -21,7 +21,6 @@ package com.metamx.druid.master;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.HashMultiset;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
@ -100,9 +99,9 @@ public class DruidMasterSegmentMerger implements DruidMasterHelper
for (int i = 0; i < timelineObjects.size(); i++) {
if (!segmentsToMerge.add(timelineObjects.get(i))
|| segmentsToMerge.getByteCount() > params.getMergeBytesLimit()
|| segmentsToMerge.getSegmentCount() >= params.getMergeSegmentsLimit()) {
i -= segmentsToMerge.backtrack(params.getMergeBytesLimit());
|| segmentsToMerge.getByteCount() > params.getMasterSegmentSettings().getMergeBytesLimit()
|| segmentsToMerge.getSegmentCount() >= params.getMasterSegmentSettings().getMergeSegmentsLimit()) {
i -= segmentsToMerge.backtrack(params.getMasterSegmentSettings().getMergeBytesLimit());
if (segmentsToMerge.getSegmentCount() > 1) {
stats.addToGlobalStat("mergedCount", mergeSegments(segmentsToMerge, entry.getKey()));
@ -118,7 +117,7 @@ public class DruidMasterSegmentMerger implements DruidMasterHelper
}
// Finish any timelineObjects to merge that may have not hit threshold
segmentsToMerge.backtrack(params.getMergeBytesLimit());
segmentsToMerge.backtrack(params.getMasterSegmentSettings().getMergeBytesLimit());
if (segmentsToMerge.getSegmentCount() > 1) {
stats.addToGlobalStat("mergedCount", mergeSegments(segmentsToMerge, entry.getKey()));
}

View File

@ -0,0 +1,129 @@
/*
* Druid - a distributed column store.
* Copyright (C) 2012 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package com.metamx.druid.master;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
public class MasterSegmentSettings
{
public static final String CONFIG_KEY = "master.dynamicConfigs";
private long millisToWaitBeforeDeleting=15 * 60 * 1000L;
private long mergeBytesLimit= 100000000L;
private int mergeSegmentsLimit = Integer.MAX_VALUE;
private int maxSegmentsToMove = 5;
@JsonCreator
public MasterSegmentSettings(
@JsonProperty("millisToWaitBeforeDeleting") Long millisToWaitBeforeDeleting,
@JsonProperty("mergeBytesLimit") Long mergeBytesLimit,
@JsonProperty("mergeSegmentsLimit") Integer mergeSegmentsLimit,
@JsonProperty("maxSegmentsToMove") Integer maxSegmentsToMove
)
{
this.maxSegmentsToMove=maxSegmentsToMove;
this.millisToWaitBeforeDeleting=millisToWaitBeforeDeleting;
this.mergeSegmentsLimit=mergeSegmentsLimit;
this.mergeBytesLimit=mergeBytesLimit;
}
public static String getConfigKey()
{
return CONFIG_KEY;
}
@JsonProperty
public long getMillisToWaitBeforeDeleting()
{
return millisToWaitBeforeDeleting;
}
@JsonProperty
public long getMergeBytesLimit()
{
return mergeBytesLimit;
}
@JsonProperty
public int getMergeSegmentsLimit()
{
return mergeSegmentsLimit;
}
@JsonProperty
public int getMaxSegmentsToMove()
{
return maxSegmentsToMove;
}
public static class Builder
{
public static final String CONFIG_KEY = "master.dynamicConfigs";
private long millisToWaitBeforeDeleting;
private long mergeBytesLimit;
private int mergeSegmentsLimit;
private int maxSegmentsToMove;
public Builder()
{
this.millisToWaitBeforeDeleting=15 * 60 * 1000L;
this.mergeBytesLimit= 100000000L;
this.mergeSegmentsLimit= Integer.MAX_VALUE;
this.maxSegmentsToMove = 5;
}
public Builder(long millisToWaitBeforeDeleting, long mergeBytesLimit, int mergeSegmentsLimit, int maxSegmentsToMove)
{
this.millisToWaitBeforeDeleting = millisToWaitBeforeDeleting;
this.mergeBytesLimit = mergeBytesLimit;
this.mergeSegmentsLimit = mergeSegmentsLimit;
this.maxSegmentsToMove = maxSegmentsToMove;
}
public Builder withMillisToWaitBeforeDeleting(long millisToWaitBeforeDeleting)
{
this.millisToWaitBeforeDeleting=millisToWaitBeforeDeleting;
return this;
}
public Builder withMergeBytesLimit(long mergeBytesLimit)
{
this.mergeBytesLimit=mergeBytesLimit;
return this;
}
public Builder withMergeSegmentsLimit(int mergeSegmentsLimit)
{
this.mergeSegmentsLimit=mergeSegmentsLimit;
return this;
}
public Builder withMaxSegmentsToMove(int maxSegmentsToMove)
{
this.maxSegmentsToMove=maxSegmentsToMove;
return this;
}
public MasterSegmentSettings build()
{
return new MasterSegmentSettings(millisToWaitBeforeDeleting,mergeBytesLimit,mergeSegmentsLimit,maxSegmentsToMove);
}
}
}

View File

@ -37,6 +37,9 @@
<div>
<a href="rules.html">Configure Compute Node Rules</a>
</div>
<div>
<a href="masterSegmentSettings.html">Configure Dynamic Master Parameters</a>
</div>
<div>
<a href="enable.html">Enable/Disable Datasources</a>
</div>

View File

@ -0,0 +1,25 @@
$(function () {
$.get('../master/config', function (data) {
document.getElementById("millis").value=data["millisToWaitBeforeDeleting"];
document.getElementById("mergeBytes").value = data["mergeBytesLimit"];
document.getElementById("mergeSegments").value = data["mergeSegmentsLimit"];
document.getElementById("maxSegments").value = data["maxSegmentsToMove"];
});
$("#submit").click( function ()
{
values = {};
list = $('form').serializeArray();
for (var i=0;i< list.length;i++)
{
values[list[i]["name"]]=list[i]["value"];
}
$.ajax({
url:'../master/config',
type:"POST",
data: JSON.stringify(values),
contentType:"application/json; charset=utf-8",
dataType:"json"
});
});
});

View File

@ -78,4 +78,4 @@ function initDataTable(el) {
this.value = asInitVals[$("thead input").index(this)];
}
});
}
}

View File

@ -0,0 +1,23 @@
<!DOCTYPE html>
<html>
<head>
<title>Configure Dynamic Master Parameters</title>
</head>
<body>
<form action="process_config.html" autocomplete="on" id ="configs">
millisToWaitBeforeDeleting:<input type="text" name="millisToWaitBeforeDeleting" id="millis">
<br>
mergeBytesLimit: <input type="text" name="mergeBytesLimit" id="mergeBytes">
<br>
mergeSegmentsLimit: <input type="text" name = "mergeSegmentsLimit" id="mergeSegments">
<br>
maxSegmentsToMove: <input type= "text" name ="maxSegmentsToMove" id ="maxSegments">
<br>
<button type="button" id="submit"> Submit </button>
</form>
</body>
<script type="text/javascript" src="js/jquery-1.8.3.js"></script>
<script type="text/javascript" src="js/masterSegmentSettings.js"></script>
</script>
</html>

View File

@ -167,7 +167,6 @@ public class DruidMasterBalancerTest
LoadQueuePeonTester fromPeon = new LoadQueuePeonTester();
LoadQueuePeonTester toPeon = new LoadQueuePeonTester();
DruidMasterRuntimeParams params =
DruidMasterRuntimeParams.newBuilder()
.withDruidCluster(
@ -184,9 +183,19 @@ public class DruidMasterBalancerTest
)
)
)
.withLoadManagementPeons(ImmutableMap.<String, LoadQueuePeon>of("from", fromPeon, "to", toPeon))
.withLoadManagementPeons(
ImmutableMap.<String, LoadQueuePeon>of(
"from",
fromPeon,
"to",
toPeon
)
)
.withAvailableSegments(segments.values())
.withMaxSegmentsToMove(MAX_SEGMENTS_TO_MOVE)
.withMasterSegmentSettings(
new MasterSegmentSettings.Builder().withMaxSegmentsToMove(MAX_SEGMENTS_TO_MOVE)
.build()
)
.withBalancerReferenceTimestamp(new DateTime("2013-01-01"))
.build();
@ -262,9 +271,24 @@ public class DruidMasterBalancerTest
)
)
)
.withLoadManagementPeons(ImmutableMap.<String, LoadQueuePeon>of("1", peon1, "2", peon2, "3", peon3, "4", peon4))
.withLoadManagementPeons(
ImmutableMap.<String, LoadQueuePeon>of(
"1",
peon1,
"2",
peon2,
"3",
peon3,
"4",
peon4
)
)
.withAvailableSegments(segments.values())
.withMaxSegmentsToMove(MAX_SEGMENTS_TO_MOVE)
.withMasterSegmentSettings(
new MasterSegmentSettings.Builder().withMaxSegmentsToMove(
MAX_SEGMENTS_TO_MOVE
).build()
)
.withBalancerReferenceTimestamp(new DateTime("2013-01-01"))
.build();

View File

@ -177,8 +177,7 @@ public class DruidMasterRuleRunnerTest
.withAvailableSegments(availableSegments)
.withDatabaseRuleManager(databaseRuleManager)
.withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster()))
.withMaxSegmentsToMove(5)
.withBalancerReferenceTimestamp(new DateTime("2013-01-01"))
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMaxSegmentsToMove(5).build())
.build();
DruidMasterRuntimeParams afterParams = ruleRunner.run(params);
@ -523,7 +522,7 @@ public class DruidMasterRuleRunnerTest
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
.withDruidCluster(druidCluster)
.withMillisToWaitBeforeDeleting(0L)
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build())
.withAvailableSegments(availableSegments)
.withDatabaseRuleManager(databaseRuleManager)
.withSegmentReplicantLookup(segmentReplicantLookup)
@ -597,7 +596,7 @@ public class DruidMasterRuleRunnerTest
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
.withDruidCluster(druidCluster)
.withMillisToWaitBeforeDeleting(0L)
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build())
.withAvailableSegments(availableSegments)
.withDatabaseRuleManager(databaseRuleManager)
.withSegmentReplicantLookup(segmentReplicantLookup)
@ -678,7 +677,7 @@ public class DruidMasterRuleRunnerTest
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
.withDruidCluster(druidCluster)
.withMillisToWaitBeforeDeleting(0L)
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build())
.withAvailableSegments(availableSegments)
.withDatabaseRuleManager(databaseRuleManager)
.withSegmentReplicantLookup(segmentReplicantLookup)
@ -755,7 +754,7 @@ public class DruidMasterRuleRunnerTest
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
.withDruidCluster(druidCluster)
.withMillisToWaitBeforeDeleting(0L)
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build())
.withAvailableSegments(availableSegments)
.withDatabaseRuleManager(databaseRuleManager)
.withSegmentReplicantLookup(segmentReplicantLookup)
@ -844,7 +843,7 @@ public class DruidMasterRuleRunnerTest
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
.withDruidCluster(druidCluster)
.withMillisToWaitBeforeDeleting(0L)
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build())
.withAvailableSegments(availableSegments)
.withDatabaseRuleManager(databaseRuleManager)
.withSegmentReplicantLookup(segmentReplicantLookup)
@ -1030,7 +1029,7 @@ public class DruidMasterRuleRunnerTest
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
.withDruidCluster(druidCluster)
.withMillisToWaitBeforeDeleting(0L)
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build())
.withAvailableSegments(longerAvailableSegments)
.withDatabaseRuleManager(databaseRuleManager)
.withSegmentReplicantLookup(segmentReplicantLookup)

View File

@ -459,10 +459,15 @@ public class DruidMasterSegmentMergerTest
final DruidMasterSegmentMerger merger = new DruidMasterSegmentMerger(indexingServiceClient, whitelistRef);
final DruidMasterRuntimeParams params = DruidMasterRuntimeParams.newBuilder()
.withAvailableSegments(ImmutableSet.copyOf(segments))
.withMergeBytesLimit(mergeBytesLimit)
.withMergeSegmentsLimit(mergeSegmentsLimit)
.withMasterSegmentSettings(
new MasterSegmentSettings.Builder().withMergeBytesLimit(
mergeBytesLimit
).withMergeSegmentsLimit(
mergeSegmentsLimit
)
.build()
)
.build();
merger.run(params);
return retVal;
}