Merge pull request #251 from metamx/fix-config

Make dynamic master resource configuration work again
This commit is contained in:
cheddar 2013-09-30 10:54:11 -07:00
commit 16558404d6
25 changed files with 401 additions and 317 deletions

View File

@ -35,7 +35,6 @@ import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.metamx.common.Granularity; import com.metamx.common.Granularity;
import com.metamx.common.ISE; import com.metamx.common.ISE;
import com.metamx.common.MapUtils;
import com.metamx.common.guava.FunctionalIterable; import com.metamx.common.guava.FunctionalIterable;
import com.metamx.common.logger.Logger; import com.metamx.common.logger.Logger;
import io.druid.common.utils.JodaUtils; import io.druid.common.utils.JodaUtils;
@ -52,8 +51,6 @@ import io.druid.indexer.rollup.DataRollupSpec;
import io.druid.indexer.updater.DbUpdaterJobSpec; import io.druid.indexer.updater.DbUpdaterJobSpec;
import io.druid.jackson.DefaultObjectMapper; import io.druid.jackson.DefaultObjectMapper;
import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactory;
import io.druid.segment.serde.Registererer;
import io.druid.segment.serde.Registererers;
import io.druid.timeline.DataSegment; import io.druid.timeline.DataSegment;
import io.druid.timeline.partition.ShardSpec; import io.druid.timeline.partition.ShardSpec;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -65,11 +62,9 @@ import org.joda.time.DateTime;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.joda.time.format.ISODateTimeFormat; import org.joda.time.format.ISODateTimeFormat;
import javax.annotation.Nullable;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;

View File

@ -1,37 +0,0 @@
/*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.segment.serde;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* This is a "factory" interface for registering handlers in the system. It exists because I'm unaware of
* another way to register the complex serdes in the MR jobs that run on Hadoop. As such, instances of this interface
* must be instantiatable via a no argument default constructor (the MR jobs on Hadoop use reflection to instantiate
* instances).
*
* The name is not a typo, I felt that it needed an extra "er" to make the pronunciation that much more difficult.
*/
public interface Registererer
{
public void register();
public void registerSubType(ObjectMapper jsonMapper);
}

View File

@ -1,42 +0,0 @@
/*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.segment.serde;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
*/
public class Registererers
{
public static void registerHandlers(Iterable<Registererer> registererers, Iterable<ObjectMapper> mappers)
{
for (Registererer registererer : registererers) {
if (!doneRegister) {
registererer.register();
}
for (ObjectMapper mapper : mappers) {
registererer.registerSubType(mapper);
}
}
doneRegister = true;
}
private static boolean doneRegister = false;
}

View File

@ -20,7 +20,7 @@
package io.druid.server.http; package io.druid.server.http;
import io.druid.common.config.JacksonConfigManager; import io.druid.common.config.JacksonConfigManager;
import io.druid.server.master.MasterSegmentSettings; import io.druid.server.master.MasterDynamicConfig;
import javax.inject.Inject; import javax.inject.Inject;
import javax.ws.rs.Consumes; import javax.ws.rs.Consumes;
@ -33,38 +33,38 @@ import javax.ws.rs.core.Response;
/** /**
*/ */
@Path("/master/config") @Path("/master/config")
public class MasterSegmentSettingsResource public class MasterDynamicConfigsResource
{ {
private final JacksonConfigManager manager; private final JacksonConfigManager manager;
@Inject @Inject
public MasterSegmentSettingsResource( public MasterDynamicConfigsResource(
JacksonConfigManager manager JacksonConfigManager manager
) )
{ {
this.manager=manager; this.manager = manager;
} }
@GET @GET
@Produces("application/json") @Produces("application/json")
public Response getDynamicConfigs() public Response getDynamicConfigs()
{ {
Response.ResponseBuilder builder = Response.status(Response.Status.OK) return Response.ok(
.entity( manager.watch(
manager.watch(MasterSegmentSettings.CONFIG_KEY,MasterSegmentSettings.class).get() MasterDynamicConfig.CONFIG_KEY,
); MasterDynamicConfig.class
return builder.build(); ).get()
).build();
} }
@POST @POST
@Consumes("application/json") @Consumes("application/json")
public Response setDynamicConfigs( public Response setDynamicConfigs(final MasterDynamicConfig dynamicConfig)
final MasterSegmentSettings masterSegmentSettings
)
{ {
if (!manager.set(MasterSegmentSettings.CONFIG_KEY, masterSegmentSettings)) { if (!manager.set(MasterDynamicConfig.CONFIG_KEY, dynamicConfig)) {
return Response.status(Response.Status.BAD_REQUEST).build(); return Response.status(Response.Status.BAD_REQUEST).build();
} }
return Response.status(Response.Status.OK).build(); return Response.ok().build();
} }
} }

View File

@ -22,6 +22,7 @@ package io.druid.server.http;
import com.google.inject.Inject; import com.google.inject.Inject;
import io.druid.server.master.DruidMaster; import io.druid.server.master.DruidMaster;
import io.druid.server.master.LoadPeonCallback; import io.druid.server.master.LoadPeonCallback;
import io.druid.server.master.MasterDynamicConfig;
import javax.ws.rs.Consumes; import javax.ws.rs.Consumes;
import javax.ws.rs.GET; import javax.ws.rs.GET;
@ -30,7 +31,6 @@ import javax.ws.rs.Path;
import javax.ws.rs.Produces; import javax.ws.rs.Produces;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import java.util.List; import java.util.List;
import java.util.Map;
/** /**
*/ */
@ -113,8 +113,8 @@ public class MasterResource
@GET @GET
@Path("/loadstatus") @Path("/loadstatus")
@Produces("application/json") @Produces("application/json")
public Map<String, Double> getLoadStatus() public Response getLoadStatus()
{ {
return master.getLoadStatus(); return Response.ok(master.getLoadStatus()).build();
} }
} }

View File

@ -84,6 +84,7 @@ public class DruidMaster
private volatile boolean started = false; private volatile boolean started = false;
private volatile boolean master = false; private volatile boolean master = false;
private volatile AtomicReference<MasterDynamicConfig> dynamicConfigs;
private final DruidMasterConfig config; private final DruidMasterConfig config;
private final ZkPathsConfig zkPaths; private final ZkPathsConfig zkPaths;
@ -98,7 +99,6 @@ public class DruidMaster
private final LoadQueueTaskMaster taskMaster; private final LoadQueueTaskMaster taskMaster;
private final Map<String, LoadQueuePeon> loadManagementPeons; private final Map<String, LoadQueuePeon> loadManagementPeons;
private final AtomicReference<LeaderLatch> leaderLatch; private final AtomicReference<LeaderLatch> leaderLatch;
private volatile AtomicReference<MasterSegmentSettings> segmentSettingsAtomicReference;
@Inject @Inject
public DruidMaster( public DruidMaster(
@ -161,7 +161,7 @@ public class DruidMaster
this.exec = scheduledExecutorFactory.create(1, "Master-Exec--%d"); this.exec = scheduledExecutorFactory.create(1, "Master-Exec--%d");
this.leaderLatch = new AtomicReference<>(null); this.leaderLatch = new AtomicReference<>(null);
this.segmentSettingsAtomicReference = new AtomicReference<>(null); this.dynamicConfigs = new AtomicReference<>(null);
this.loadManagementPeons = loadQueuePeonMap; this.loadManagementPeons = loadQueuePeonMap;
} }
@ -215,6 +215,11 @@ public class DruidMaster
return loadStatus; return loadStatus;
} }
public MasterDynamicConfig getDynamicConfigs()
{
return dynamicConfigs.get();
}
public void removeSegment(DataSegment segment) public void removeSegment(DataSegment segment)
{ {
log.info("Removing Segment[%s]", segment); log.info("Removing Segment[%s]", segment);
@ -471,10 +476,10 @@ public class DruidMaster
serverInventoryView.start(); serverInventoryView.start();
final List<Pair<? extends MasterRunnable, Duration>> masterRunnables = Lists.newArrayList(); final List<Pair<? extends MasterRunnable, Duration>> masterRunnables = Lists.newArrayList();
segmentSettingsAtomicReference = configManager.watch( dynamicConfigs = configManager.watch(
MasterSegmentSettings.CONFIG_KEY, MasterDynamicConfig.CONFIG_KEY,
MasterSegmentSettings.class, MasterDynamicConfig.class,
new MasterSegmentSettings.Builder().build() new MasterDynamicConfig.Builder().build()
); );
masterRunnables.add(Pair.of(new MasterComputeManagerRunnable(), config.getMasterPeriod())); masterRunnables.add(Pair.of(new MasterComputeManagerRunnable(), config.getMasterPeriod()));
if (indexingServiceClient != null) { if (indexingServiceClient != null) {
@ -663,7 +668,7 @@ public class DruidMaster
DruidMasterRuntimeParams.newBuilder() DruidMasterRuntimeParams.newBuilder()
.withStartTime(startTime) .withStartTime(startTime)
.withDatasources(databaseSegmentManager.getInventory()) .withDatasources(databaseSegmentManager.getInventory())
.withMasterSegmentSettings(segmentSettingsAtomicReference.get()) .withDynamicConfigs(dynamicConfigs.get())
.withEmitter(emitter) .withEmitter(emitter)
.build(); .build();
@ -750,13 +755,11 @@ public class DruidMaster
.withLoadManagementPeons(loadManagementPeons) .withLoadManagementPeons(loadManagementPeons)
.withSegmentReplicantLookup(segmentReplicantLookup) .withSegmentReplicantLookup(segmentReplicantLookup)
.withBalancerReferenceTimestamp(DateTime.now()) .withBalancerReferenceTimestamp(DateTime.now())
.withMasterSegmentSettings(segmentSettingsAtomicReference.get()) .withDynamicConfigs(dynamicConfigs.get())
.build(); .build();
} }
}, },
new DruidMasterRuleRunner( new DruidMasterRuleRunner(DruidMaster.this),
DruidMaster.this, config.getReplicantLifetime(), config.getReplicantThrottleLimit()
),
new DruidMasterCleanup(DruidMaster.this), new DruidMasterCleanup(DruidMaster.this),
new DruidMasterBalancer(DruidMaster.this), new DruidMasterBalancer(DruidMaster.this),
new DruidMasterLogger() new DruidMasterLogger()

View File

@ -79,7 +79,7 @@ public class DruidMasterBalancer implements DruidMasterHelper
final MasterStats stats = new MasterStats(); final MasterStats stats = new MasterStats();
final DateTime referenceTimestamp = params.getBalancerReferenceTimestamp(); final DateTime referenceTimestamp = params.getBalancerReferenceTimestamp();
final BalancerStrategy strategy = params.getBalancerStrategyFactory().createBalancerStrategy(referenceTimestamp); final BalancerStrategy strategy = params.getBalancerStrategyFactory().createBalancerStrategy(referenceTimestamp);
final int maxSegmentsToMove = params.getMasterSegmentSettings().getMaxSegmentsToMove(); final int maxSegmentsToMove = params.getMasterDynamicConfig().getMaxSegmentsToMove();
for (Map.Entry<String, MinMaxPriorityQueue<ServerHolder>> entry : for (Map.Entry<String, MinMaxPriorityQueue<ServerHolder>> entry :
params.getDruidCluster().getCluster().entrySet()) { params.getDruidCluster().getCluster().entrySet()) {
@ -124,7 +124,7 @@ public class DruidMasterBalancer implements DruidMasterHelper
} }
} }
stats.addToTieredStat("movedCount", tier, currentlyMovingSegments.get(tier).size()); stats.addToTieredStat("movedCount", tier, currentlyMovingSegments.get(tier).size());
if (params.getMasterSegmentSettings().isEmitBalancingStats()) { if (params.getMasterDynamicConfig().emitBalancingStats()) {
strategy.emitStats(tier, stats, serverHolderList); strategy.emitStats(tier, stats, serverHolderList);
} }

View File

@ -54,14 +54,6 @@ public abstract class DruidMasterConfig
return false; return false;
} }
@Config("druid.master.replicant.lifetime")
@Default("15")
public abstract int getReplicantLifetime();
@Config("druid.master.replicant.throttleLimit")
@Default("10")
public abstract int getReplicantThrottleLimit();
@Config("druid.master.load.timeout") @Config("druid.master.load.timeout")
public Duration getLoadTimeoutDelay() public Duration getLoadTimeoutDelay()
{ {

View File

@ -37,10 +37,21 @@ public class DruidMasterRuleRunner implements DruidMasterHelper
private final DruidMaster master; private final DruidMaster master;
public DruidMasterRuleRunner(DruidMaster master, int replicantLifeTime, int replicantThrottleLimit) public DruidMasterRuleRunner(DruidMaster master)
{ {
this(
new ReplicationThrottler(
master.getDynamicConfigs().getReplicationThrottleLimit(),
master.getDynamicConfigs().getReplicantLifetime()
),
master
);
}
public DruidMasterRuleRunner(ReplicationThrottler replicatorThrottler, DruidMaster master)
{
this.replicatorThrottler = replicatorThrottler;
this.master = master; this.master = master;
this.replicatorThrottler = new ReplicationThrottler(replicantThrottleLimit, replicantLifeTime);
} }
@Override @Override

View File

@ -46,7 +46,7 @@ public class DruidMasterRuntimeParams
private final Map<String, LoadQueuePeon> loadManagementPeons; private final Map<String, LoadQueuePeon> loadManagementPeons;
private final ReplicationThrottler replicationManager; private final ReplicationThrottler replicationManager;
private final ServiceEmitter emitter; private final ServiceEmitter emitter;
private final MasterSegmentSettings masterSegmentSettings; private final MasterDynamicConfig masterDynamicConfig;
private final MasterStats stats; private final MasterStats stats;
private final DateTime balancerReferenceTimestamp; private final DateTime balancerReferenceTimestamp;
private final BalancerStrategyFactory strategyFactory; private final BalancerStrategyFactory strategyFactory;
@ -61,7 +61,7 @@ public class DruidMasterRuntimeParams
Map<String, LoadQueuePeon> loadManagementPeons, Map<String, LoadQueuePeon> loadManagementPeons,
ReplicationThrottler replicationManager, ReplicationThrottler replicationManager,
ServiceEmitter emitter, ServiceEmitter emitter,
MasterSegmentSettings masterSegmentSettings, MasterDynamicConfig masterDynamicConfig,
MasterStats stats, MasterStats stats,
DateTime balancerReferenceTimestamp, DateTime balancerReferenceTimestamp,
BalancerStrategyFactory strategyFactory BalancerStrategyFactory strategyFactory
@ -76,7 +76,7 @@ public class DruidMasterRuntimeParams
this.loadManagementPeons = loadManagementPeons; this.loadManagementPeons = loadManagementPeons;
this.replicationManager = replicationManager; this.replicationManager = replicationManager;
this.emitter = emitter; this.emitter = emitter;
this.masterSegmentSettings = masterSegmentSettings; this.masterDynamicConfig = masterDynamicConfig;
this.stats = stats; this.stats = stats;
this.balancerReferenceTimestamp = balancerReferenceTimestamp; this.balancerReferenceTimestamp = balancerReferenceTimestamp;
this.strategyFactory = strategyFactory; this.strategyFactory = strategyFactory;
@ -127,9 +127,9 @@ public class DruidMasterRuntimeParams
return emitter; return emitter;
} }
public MasterSegmentSettings getMasterSegmentSettings() public MasterDynamicConfig getMasterDynamicConfig()
{ {
return masterSegmentSettings; return masterDynamicConfig;
} }
public MasterStats getMasterStats() public MasterStats getMasterStats()
@ -149,7 +149,7 @@ public class DruidMasterRuntimeParams
public boolean hasDeletionWaitTimeElapsed() public boolean hasDeletionWaitTimeElapsed()
{ {
return (System.currentTimeMillis() - getStartTime() > masterSegmentSettings.getMillisToWaitBeforeDeleting()); return (System.currentTimeMillis() - getStartTime() > masterDynamicConfig.getMillisToWaitBeforeDeleting());
} }
public static Builder newBuilder() public static Builder newBuilder()
@ -169,7 +169,7 @@ public class DruidMasterRuntimeParams
loadManagementPeons, loadManagementPeons,
replicationManager, replicationManager,
emitter, emitter,
masterSegmentSettings, masterDynamicConfig,
stats, stats,
balancerReferenceTimestamp, balancerReferenceTimestamp,
strategyFactory strategyFactory
@ -187,7 +187,7 @@ public class DruidMasterRuntimeParams
private final Map<String, LoadQueuePeon> loadManagementPeons; private final Map<String, LoadQueuePeon> loadManagementPeons;
private ReplicationThrottler replicationManager; private ReplicationThrottler replicationManager;
private ServiceEmitter emitter; private ServiceEmitter emitter;
private MasterSegmentSettings masterSegmentSettings; private MasterDynamicConfig masterDynamicConfig;
private MasterStats stats; private MasterStats stats;
private DateTime balancerReferenceTimestamp; private DateTime balancerReferenceTimestamp;
private BalancerStrategyFactory strategyFactory; private BalancerStrategyFactory strategyFactory;
@ -204,7 +204,7 @@ public class DruidMasterRuntimeParams
this.replicationManager = null; this.replicationManager = null;
this.emitter = null; this.emitter = null;
this.stats = new MasterStats(); this.stats = new MasterStats();
this.masterSegmentSettings = new MasterSegmentSettings.Builder().build(); this.masterDynamicConfig = new MasterDynamicConfig.Builder().build();
this.balancerReferenceTimestamp = null; this.balancerReferenceTimestamp = null;
this.strategyFactory = new CostBalancerStrategyFactory(); this.strategyFactory = new CostBalancerStrategyFactory();
} }
@ -219,7 +219,7 @@ public class DruidMasterRuntimeParams
Map<String, LoadQueuePeon> loadManagementPeons, Map<String, LoadQueuePeon> loadManagementPeons,
ReplicationThrottler replicationManager, ReplicationThrottler replicationManager,
ServiceEmitter emitter, ServiceEmitter emitter,
MasterSegmentSettings masterSegmentSettings, MasterDynamicConfig masterDynamicConfig,
MasterStats stats, MasterStats stats,
DateTime balancerReferenceTimestamp, DateTime balancerReferenceTimestamp,
BalancerStrategyFactory strategyFactory BalancerStrategyFactory strategyFactory
@ -234,7 +234,7 @@ public class DruidMasterRuntimeParams
this.loadManagementPeons = loadManagementPeons; this.loadManagementPeons = loadManagementPeons;
this.replicationManager = replicationManager; this.replicationManager = replicationManager;
this.emitter = emitter; this.emitter = emitter;
this.masterSegmentSettings = masterSegmentSettings; this.masterDynamicConfig = masterDynamicConfig;
this.stats = stats; this.stats = stats;
this.balancerReferenceTimestamp = balancerReferenceTimestamp; this.balancerReferenceTimestamp = balancerReferenceTimestamp;
this.strategyFactory=strategyFactory; this.strategyFactory=strategyFactory;
@ -252,7 +252,7 @@ public class DruidMasterRuntimeParams
loadManagementPeons, loadManagementPeons,
replicationManager, replicationManager,
emitter, emitter,
masterSegmentSettings, masterDynamicConfig,
stats, stats,
balancerReferenceTimestamp, balancerReferenceTimestamp,
strategyFactory strategyFactory
@ -319,9 +319,9 @@ public class DruidMasterRuntimeParams
return this; return this;
} }
public Builder withMasterSegmentSettings(MasterSegmentSettings configs) public Builder withDynamicConfigs(MasterDynamicConfig configs)
{ {
this.masterSegmentSettings = configs; this.masterDynamicConfig = configs;
return this; return this;
} }

View File

@ -99,9 +99,9 @@ public class DruidMasterSegmentMerger implements DruidMasterHelper
for (int i = 0; i < timelineObjects.size(); i++) { for (int i = 0; i < timelineObjects.size(); i++) {
if (!segmentsToMerge.add(timelineObjects.get(i)) if (!segmentsToMerge.add(timelineObjects.get(i))
|| segmentsToMerge.getByteCount() > params.getMasterSegmentSettings().getMergeBytesLimit() || segmentsToMerge.getByteCount() > params.getMasterDynamicConfig().getMergeBytesLimit()
|| segmentsToMerge.getSegmentCount() >= params.getMasterSegmentSettings().getMergeSegmentsLimit()) { || segmentsToMerge.getSegmentCount() >= params.getMasterDynamicConfig().getMergeSegmentsLimit()) {
i -= segmentsToMerge.backtrack(params.getMasterSegmentSettings().getMergeBytesLimit()); i -= segmentsToMerge.backtrack(params.getMasterDynamicConfig().getMergeBytesLimit());
if (segmentsToMerge.getSegmentCount() > 1) { if (segmentsToMerge.getSegmentCount() > 1) {
stats.addToGlobalStat("mergedCount", mergeSegments(segmentsToMerge, entry.getKey())); stats.addToGlobalStat("mergedCount", mergeSegments(segmentsToMerge, entry.getKey()));
@ -117,7 +117,7 @@ public class DruidMasterSegmentMerger implements DruidMasterHelper
} }
// Finish any timelineObjects to merge that may have not hit threshold // Finish any timelineObjects to merge that may have not hit threshold
segmentsToMerge.backtrack(params.getMasterSegmentSettings().getMergeBytesLimit()); segmentsToMerge.backtrack(params.getMasterDynamicConfig().getMergeBytesLimit());
if (segmentsToMerge.getSegmentCount() > 1) { if (segmentsToMerge.getSegmentCount() > 1) {
stats.addToGlobalStat("mergedCount", mergeSegments(segmentsToMerge, entry.getKey())); stats.addToGlobalStat("mergedCount", mergeSegments(segmentsToMerge, entry.getKey()));
} }

View File

@ -21,36 +21,38 @@ package io.druid.server.master;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
public class MasterSegmentSettings public class MasterDynamicConfig
{ {
public static final String CONFIG_KEY = "master.dynamicConfigs"; public static final String CONFIG_KEY = "master.config";
private long millisToWaitBeforeDeleting = 15 * 60 * 1000L;
private long mergeBytesLimit = 100000000L; private final long millisToWaitBeforeDeleting;
private int mergeSegmentsLimit = Integer.MAX_VALUE; private final long mergeBytesLimit;
private int maxSegmentsToMove = 5; private final int mergeSegmentsLimit;
private boolean emitBalancingStats = false; private final int maxSegmentsToMove;
private final int replicantLifetime;
private final int replicationThrottleLimit;
private final boolean emitBalancingStats;
@JsonCreator @JsonCreator
public MasterSegmentSettings( public MasterDynamicConfig(
@JsonProperty("millisToWaitBeforeDeleting") Long millisToWaitBeforeDeleting, @JsonProperty("millisToWaitBeforeDeleting") long millisToWaitBeforeDeleting,
@JsonProperty("mergeBytesLimit") Long mergeBytesLimit, @JsonProperty("mergeBytesLimit") long mergeBytesLimit,
@JsonProperty("mergeSegmentsLimit") Integer mergeSegmentsLimit, @JsonProperty("mergeSegmentsLimit") int mergeSegmentsLimit,
@JsonProperty("maxSegmentsToMove") Integer maxSegmentsToMove, @JsonProperty("maxSegmentsToMove") int maxSegmentsToMove,
@JsonProperty("emitBalancingStats") Boolean emitBalancingStats @JsonProperty("replicantLifetime") int replicantLifetime,
@JsonProperty("replicationThrottleLimit") int replicationThrottleLimit,
@JsonProperty("emitBalancingStats") boolean emitBalancingStats
) )
{ {
this.maxSegmentsToMove = maxSegmentsToMove; this.maxSegmentsToMove = maxSegmentsToMove;
this.millisToWaitBeforeDeleting = millisToWaitBeforeDeleting; this.millisToWaitBeforeDeleting = millisToWaitBeforeDeleting;
this.mergeSegmentsLimit = mergeSegmentsLimit; this.mergeSegmentsLimit = mergeSegmentsLimit;
this.mergeBytesLimit = mergeBytesLimit; this.mergeBytesLimit = mergeBytesLimit;
this.replicantLifetime = replicantLifetime;
this.replicationThrottleLimit = replicationThrottleLimit;
this.emitBalancingStats = emitBalancingStats; this.emitBalancingStats = emitBalancingStats;
} }
public static String getConfigKey()
{
return CONFIG_KEY;
}
@JsonProperty @JsonProperty
public long getMillisToWaitBeforeDeleting() public long getMillisToWaitBeforeDeleting()
{ {
@ -64,7 +66,7 @@ public class MasterSegmentSettings
} }
@JsonProperty @JsonProperty
public boolean isEmitBalancingStats() public boolean emitBalancingStats()
{ {
return emitBalancingStats; return emitBalancingStats;
} }
@ -81,25 +83,40 @@ public class MasterSegmentSettings
return maxSegmentsToMove; return maxSegmentsToMove;
} }
@JsonProperty
public int getReplicantLifetime()
{
return replicantLifetime;
}
@JsonProperty
public int getReplicationThrottleLimit()
{
return replicationThrottleLimit;
}
public static class Builder public static class Builder
{ {
public static final String CONFIG_KEY = "master.dynamicConfigs";
private long millisToWaitBeforeDeleting; private long millisToWaitBeforeDeleting;
private long mergeBytesLimit; private long mergeBytesLimit;
private int mergeSegmentsLimit; private int mergeSegmentsLimit;
private int maxSegmentsToMove; private int maxSegmentsToMove;
private int replicantLifetime;
private int replicationThrottleLimit;
private boolean emitBalancingStats; private boolean emitBalancingStats;
public Builder() public Builder()
{ {
this(15 * 60 * 1000L, 100000000L, Integer.MAX_VALUE, 5, false); this(15 * 60 * 1000L, 100000000L, Integer.MAX_VALUE, 5, 15, 10, false);
} }
public Builder( private Builder(
long millisToWaitBeforeDeleting, long millisToWaitBeforeDeleting,
long mergeBytesLimit, long mergeBytesLimit,
int mergeSegmentsLimit, int mergeSegmentsLimit,
int maxSegmentsToMove, int maxSegmentsToMove,
int replicantLifetime,
int replicationThrottleLimit,
boolean emitBalancingStats boolean emitBalancingStats
) )
{ {
@ -107,6 +124,8 @@ public class MasterSegmentSettings
this.mergeBytesLimit = mergeBytesLimit; this.mergeBytesLimit = mergeBytesLimit;
this.mergeSegmentsLimit = mergeSegmentsLimit; this.mergeSegmentsLimit = mergeSegmentsLimit;
this.maxSegmentsToMove = maxSegmentsToMove; this.maxSegmentsToMove = maxSegmentsToMove;
this.replicantLifetime = replicantLifetime;
this.replicationThrottleLimit = replicationThrottleLimit;
this.emitBalancingStats = emitBalancingStats; this.emitBalancingStats = emitBalancingStats;
} }
@ -134,13 +153,27 @@ public class MasterSegmentSettings
return this; return this;
} }
public MasterSegmentSettings build() public Builder withReplicantLifetime(int replicantLifetime)
{ {
return new MasterSegmentSettings( this.replicantLifetime = replicantLifetime;
return this;
}
public Builder withReplicationThrottleLimit(int replicationThrottleLimit)
{
this.replicationThrottleLimit = replicationThrottleLimit;
return this;
}
public MasterDynamicConfig build()
{
return new MasterDynamicConfig(
millisToWaitBeforeDeleting, millisToWaitBeforeDeleting,
mergeBytesLimit, mergeBytesLimit,
mergeSegmentsLimit, mergeSegmentsLimit,
maxSegmentsToMove, maxSegmentsToMove,
replicantLifetime,
replicationThrottleLimit,
emitBalancingStats emitBalancingStats
); );
} }

View File

@ -0,0 +1,57 @@
<!DOCTYPE HTML>
<!--
~ Druid - a distributed column store.
~ Copyright (C) 2012 Metamarkets Group Inc.
~
~ This program is free software; you can redistribute it and/or
~ modify it under the terms of the GNU General Public License
~ as published by the Free Software Foundation; either version 2
~ of the License, or (at your option) any later version.
~
~ This program is distributed in the hope that it will be useful,
~ but WITHOUT ANY WARRANTY; without even the implied warranty of
~ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
~ GNU General Public License for more details.
~
~ You should have received a copy of the GNU General Public License
~ along with this program; if not, write to the Free Software
~ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-->
<html xmlns="http://www.w3.org/1999/html">
<head>
<title>Druid Master Console - Rule Configuration</title>
<link rel="shortcut icon" type="image/ico" href="images/favicon.ico">
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<meta name="Description" content="Druid Master Console Page"/>
<style type="text/css">@import "css/style.css";</style>
<style type="text/css">@import "css/jquery-ui-1.9.2.css";</style>
<style type="text/css">@import "css/config.css";</style>
<script type="text/javascript" src="js/underscore-1.2.2.js"></script>
<script type="text/javascript" src="js/jquery-1.8.3.js"></script>
<script type="text/javascript" src="js/jquery-ui-1.9.2.js"></script>
<script type="text/javascript" src="js/config-0.0.1.js"></script>
</head>
<body>
<div class="container">
<div class="heading">Master Configuration</div>
<div id="config">
<div id="config_list"></div>
<button type="button" id="update">Update</button>
<div id="update_dialog" title="Confirm Update">
<p>Are you sure you want to update?</p>
</div>
<button type="button" id="cancel">Cancel</button>
<div id="cancel_dialog" title="Confirm Cancel">
<p>Are you sure you want to cancel? All changes will be lost.</p>
</div>
<div id="error_dialog" title="Error!"></div>
</div>
</div>
</body>
</html>

View File

@ -0,0 +1,8 @@
#config_list {
margin: 20px 0px;
}
.config_label {
width: 250px;
float: left;
}

View File

@ -18,6 +18,7 @@
~ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. ~ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
--> -->
<!-- We plan to redo the master console at some point in the near future with a real front end framework in place. -->
<html> <html>
<head> <head>
<title>Druid Master Console</title> <title>Druid Master Console</title>
@ -38,7 +39,7 @@
<a href="rules.html">Configure Compute Node Rules</a> <a href="rules.html">Configure Compute Node Rules</a>
</div> </div>
<div> <div>
<a href="masterSegmentSettings.html">Configure Dynamic Master Parameters</a> <a href="config.html">Configure Master Parameters</a>
</div> </div>
<div> <div>
<a href="enable.html">Enable/Disable Datasources</a> <a href="enable.html">Enable/Disable Datasources</a>

View File

@ -0,0 +1,105 @@
var configs = [];
function makeConfigDiv(key, value) {
var retVal = "<div class='config'>";
retVal += "<span class='config_label'>" + key + "</span>";
retVal += "<input type='text' class='value' name='value' value='" + value + "'/>";
retVal += "</div>";
return retVal;
}
function domToConfig(configDiv) {
var retVal = {};
retVal.key = $($(configDiv).find(".config_label")).text();
retVal.value = $($(configDiv).find(".value")).val();
return retVal;
}
function getConfigs() {
$.getJSON("/master/config", function(data) {
$('#config_list').empty();
$.each(data, function (key, value) {
$('#config_list').append(makeConfigDiv(key, value));
});
});
}
$(document).ready(function() {
$("button").button();
$("#error_dialog").dialog({
autoOpen: false,
modal:true,
resizeable: false,
buttons: {
Ok : function() {
$(this).dialog("close");
}
}
});
$("#cancel_dialog").dialog({
autoOpen: false,
modal:true,
resizeable: false,
buttons: {
Yes : function() {
getConfigs();
$(this).dialog("close");
},
No: function() {
$(this).dialog("close");
}
}
});
$("#update_dialog").dialog({
autoOpen: false,
modal:true,
resizeable: false,
buttons: {
Yes : function() {
var configs = {};
$.each($("#config_list").children(), function(index, configDiv) {
var config = domToConfig(configDiv);
configs[config.key] = config.value;
});
$.ajax({
type: 'POST',
url:'/master/config',
data: JSON.stringify(configs),
contentType:"application/json; charset=utf-8",
dataType:"json",
error: function(xhr, status, error) {
$("#update_dialog").dialog("close");
$("#error_dialog").html(xhr.responseText);
$("#error_dialog").dialog("open");
},
success: function(data, status, xhr) {
getConfigs();
$("#update_dialog").dialog("close");
}
});
},
Cancel: function() {
$(this).dialog("close");
}
}
});
getConfigs();
$("#cancel").click(function() {
$("#cancel_dialog").dialog("open");
});
$('#update').click(function (){
$("#update_dialog").dialog("open")
});
});

View File

@ -1,26 +0,0 @@
$(function () {
$.get('../master/config', function (data) {
document.getElementById("millis").value=data["millisToWaitBeforeDeleting"];
document.getElementById("mergeBytes").value = data["mergeBytesLimit"];
document.getElementById("mergeSegments").value = data["mergeSegmentsLimit"];
document.getElementById("maxSegments").value = data["maxSegmentsToMove"];
document.getElementById("emitBalancingStats").value = data["emitBalancingStats"];
});
$("#submit").click( function ()
{
values = {};
list = $('form').serializeArray();
for (var i=0;i< list.length;i++)
{
values[list[i]["name"]]=list[i]["value"];
}
$.ajax({
url:'../master/config',
type:"POST",
data: JSON.stringify(values),
contentType:"application/json; charset=utf-8",
dataType:"json"
});
});
});

View File

@ -1,24 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<title>Configure Dynamic Master Parameters</title>
</head>
<body>
<form action="process_config.html" autocomplete="on" id ="configs">
millisToWaitBeforeDeleting:<input type="text" name="millisToWaitBeforeDeleting" id="millis">
<br>
mergeBytesLimit: <input type="text" name="mergeBytesLimit" id="mergeBytes">
<br>
mergeSegmentsLimit: <input type="text" name = "mergeSegmentsLimit" id="mergeSegments">
<br>
maxSegmentsToMove: <input type= "text" name ="maxSegmentsToMove" id ="maxSegments">
<br>
emitBalancingStats: <input type= "text" name ="emitBalancingStats" id="emitBalancingStats">
<button type="button" id="submit"> Submit </button>
</form>
</body>
<script type="text/javascript" src="js/jquery-1.8.3.js"></script>
<script type="text/javascript" src="js/masterSegmentSettings.js"></script>
</script>
</html>

View File

@ -53,7 +53,7 @@ public class DruidMasterBalancerProfiler
Map<String, DataSegment> segments = Maps.newHashMap(); Map<String, DataSegment> segments = Maps.newHashMap();
ServiceEmitter emitter; ServiceEmitter emitter;
DatabaseRuleManager manager; DatabaseRuleManager manager;
PeriodLoadRule loadRule = new PeriodLoadRule(new Period("P5000Y"),3,"normal"); PeriodLoadRule loadRule = new PeriodLoadRule(new Period("P5000Y"), 3, "normal");
List<Rule> rules = ImmutableList.<Rule>of(loadRule); List<Rule> rules = ImmutableList.<Rule>of(loadRule);
@Before @Before
@ -71,7 +71,7 @@ public class DruidMasterBalancerProfiler
{ {
Stopwatch watch = new Stopwatch(); Stopwatch watch = new Stopwatch();
int numSegments = 55000; int numSegments = 55000;
int numServers=50; int numServers = 50;
EasyMock.expect(manager.getAllRules()).andReturn(ImmutableMap.<String, List<Rule>>of("test", rules)).anyTimes(); EasyMock.expect(manager.getAllRules()).andReturn(ImmutableMap.<String, List<Rule>>of("test", rules)).anyTimes();
EasyMock.expect(manager.getRules(EasyMock.<String>anyObject())).andReturn(rules).anyTimes(); EasyMock.expect(manager.getRules(EasyMock.<String>anyObject())).andReturn(rules).anyTimes();
EasyMock.expect(manager.getRulesWithDefault(EasyMock.<String>anyObject())).andReturn(rules).anyTimes(); EasyMock.expect(manager.getRulesWithDefault(EasyMock.<String>anyObject())).andReturn(rules).anyTimes();
@ -89,9 +89,8 @@ public class DruidMasterBalancerProfiler
List<DruidServer> serverList = Lists.newArrayList(); List<DruidServer> serverList = Lists.newArrayList();
Map<String, LoadQueuePeon> peonMap = Maps.newHashMap(); Map<String, LoadQueuePeon> peonMap = Maps.newHashMap();
List<ServerHolder> serverHolderList = Lists.newArrayList(); List<ServerHolder> serverHolderList = Lists.newArrayList();
Map<String,DataSegment> segmentMap = Maps.newHashMap(); Map<String, DataSegment> segmentMap = Maps.newHashMap();
for (int i=0;i<numSegments;i++) for (int i = 0; i < numSegments; i++) {
{
segmentMap.put( segmentMap.put(
"segment" + i, "segment" + i,
new DataSegment( new DataSegment(
@ -108,30 +107,26 @@ public class DruidMasterBalancerProfiler
); );
} }
for (int i=0;i<numServers;i++) for (int i = 0; i < numServers; i++) {
{ DruidServer server = EasyMock.createMock(DruidServer.class);
DruidServer server =EasyMock.createMock(DruidServer.class);
EasyMock.expect(server.getMetadata()).andReturn(null).anyTimes(); EasyMock.expect(server.getMetadata()).andReturn(null).anyTimes();
EasyMock.expect(server.getCurrSize()).andReturn(30L).atLeastOnce(); EasyMock.expect(server.getCurrSize()).andReturn(30L).atLeastOnce();
EasyMock.expect(server.getMaxSize()).andReturn(100L).atLeastOnce(); EasyMock.expect(server.getMaxSize()).andReturn(100L).atLeastOnce();
EasyMock.expect(server.getTier()).andReturn("normal").anyTimes(); EasyMock.expect(server.getTier()).andReturn("normal").anyTimes();
EasyMock.expect(server.getName()).andReturn(Integer.toString(i)).atLeastOnce(); EasyMock.expect(server.getName()).andReturn(Integer.toString(i)).atLeastOnce();
EasyMock.expect(server.getHost()).andReturn(Integer.toString(i)).anyTimes(); EasyMock.expect(server.getHost()).andReturn(Integer.toString(i)).anyTimes();
if (i==0) if (i == 0) {
{
EasyMock.expect(server.getSegments()).andReturn(segmentMap).anyTimes(); EasyMock.expect(server.getSegments()).andReturn(segmentMap).anyTimes();
} } else {
else
{
EasyMock.expect(server.getSegments()).andReturn(new HashMap<String, DataSegment>()).anyTimes(); EasyMock.expect(server.getSegments()).andReturn(new HashMap<String, DataSegment>()).anyTimes();
} }
EasyMock.expect(server.getSegment(EasyMock.<String>anyObject())).andReturn(null).anyTimes(); EasyMock.expect(server.getSegment(EasyMock.<String>anyObject())).andReturn(null).anyTimes();
EasyMock.replay(server); EasyMock.replay(server);
LoadQueuePeon peon = new LoadQueuePeonTester(); LoadQueuePeon peon = new LoadQueuePeonTester();
peonMap.put(Integer.toString(i),peon); peonMap.put(Integer.toString(i), peon);
serverHolderList.add(new ServerHolder(server, peon)); serverHolderList.add(new ServerHolder(server, peon));
} }
DruidMasterRuntimeParams params = DruidMasterRuntimeParams params =
DruidMasterRuntimeParams.newBuilder() DruidMasterRuntimeParams.newBuilder()
@ -150,27 +145,34 @@ public class DruidMasterBalancerProfiler
peonMap peonMap
) )
.withAvailableSegments(segmentMap.values()) .withAvailableSegments(segmentMap.values())
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMaxSegmentsToMove(MAX_SEGMENTS_TO_MOVE).build()) .withDynamicConfigs(
new MasterDynamicConfig.Builder().withMaxSegmentsToMove(
MAX_SEGMENTS_TO_MOVE
).withReplicantLifetime(500)
.withReplicationThrottleLimit(5)
.build()
)
.withBalancerReferenceTimestamp(new DateTime("2013-01-01")) .withBalancerReferenceTimestamp(new DateTime("2013-01-01"))
.withEmitter(emitter) .withEmitter(emitter)
.withDatabaseRuleManager(manager) .withDatabaseRuleManager(manager)
.withReplicationManager(new ReplicationThrottler(2, 500)) .withReplicationManager(new ReplicationThrottler(2, 500))
.withSegmentReplicantLookup( .withSegmentReplicantLookup(
SegmentReplicantLookup.make(new DruidCluster( SegmentReplicantLookup.make(
ImmutableMap.<String, MinMaxPriorityQueue<ServerHolder>>of( new DruidCluster(
"normal", ImmutableMap.<String, MinMaxPriorityQueue<ServerHolder>>of(
MinMaxPriorityQueue.orderedBy(DruidMasterBalancerTester.percentUsedComparator) "normal",
.create( MinMaxPriorityQueue.orderedBy(DruidMasterBalancerTester.percentUsedComparator)
serverHolderList .create(
) serverHolderList
)
)
) )
) )
)
) )
.build(); .build();
DruidMasterBalancerTester tester = new DruidMasterBalancerTester(master); DruidMasterBalancerTester tester = new DruidMasterBalancerTester(master);
DruidMasterRuleRunner runner = new DruidMasterRuleRunner(master,500,5); DruidMasterRuleRunner runner = new DruidMasterRuleRunner(master);
watch.start(); watch.start();
DruidMasterRuntimeParams balanceParams = tester.run(params); DruidMasterRuntimeParams balanceParams = tester.run(params);
DruidMasterRuntimeParams assignParams = runner.run(params); DruidMasterRuntimeParams assignParams = runner.run(params);
@ -178,56 +180,68 @@ public class DruidMasterBalancerProfiler
} }
public void profileRun(){ public void profileRun()
Stopwatch watch = new Stopwatch(); {
LoadQueuePeonTester fromPeon = new LoadQueuePeonTester(); Stopwatch watch = new Stopwatch();
LoadQueuePeonTester toPeon = new LoadQueuePeonTester(); LoadQueuePeonTester fromPeon = new LoadQueuePeonTester();
LoadQueuePeonTester toPeon = new LoadQueuePeonTester();
EasyMock.expect(druidServer1.getName()).andReturn("from").atLeastOnce(); EasyMock.expect(druidServer1.getName()).andReturn("from").atLeastOnce();
EasyMock.expect(druidServer1.getCurrSize()).andReturn(30L).atLeastOnce(); EasyMock.expect(druidServer1.getCurrSize()).andReturn(30L).atLeastOnce();
EasyMock.expect(druidServer1.getMaxSize()).andReturn(100L).atLeastOnce(); EasyMock.expect(druidServer1.getMaxSize()).andReturn(100L).atLeastOnce();
EasyMock.expect(druidServer1.getSegments()).andReturn(segments).anyTimes(); EasyMock.expect(druidServer1.getSegments()).andReturn(segments).anyTimes();
EasyMock.expect(druidServer1.getSegment(EasyMock.<String>anyObject())).andReturn(null).anyTimes(); EasyMock.expect(druidServer1.getSegment(EasyMock.<String>anyObject())).andReturn(null).anyTimes();
EasyMock.replay(druidServer1); EasyMock.replay(druidServer1);
EasyMock.expect(druidServer2.getName()).andReturn("to").atLeastOnce(); EasyMock.expect(druidServer2.getName()).andReturn("to").atLeastOnce();
EasyMock.expect(druidServer2.getTier()).andReturn("normal").anyTimes(); EasyMock.expect(druidServer2.getTier()).andReturn("normal").anyTimes();
EasyMock.expect(druidServer2.getCurrSize()).andReturn(0L).atLeastOnce(); EasyMock.expect(druidServer2.getCurrSize()).andReturn(0L).atLeastOnce();
EasyMock.expect(druidServer2.getMaxSize()).andReturn(100L).atLeastOnce(); EasyMock.expect(druidServer2.getMaxSize()).andReturn(100L).atLeastOnce();
EasyMock.expect(druidServer2.getSegments()).andReturn(new HashMap<String, DataSegment>()).anyTimes(); EasyMock.expect(druidServer2.getSegments()).andReturn(new HashMap<String, DataSegment>()).anyTimes();
EasyMock.expect(druidServer2.getSegment(EasyMock.<String>anyObject())).andReturn(null).anyTimes(); EasyMock.expect(druidServer2.getSegment(EasyMock.<String>anyObject())).andReturn(null).anyTimes();
EasyMock.replay(druidServer2); EasyMock.replay(druidServer2);
master.moveSegment( master.moveSegment(
EasyMock.<String>anyObject(), EasyMock.<String>anyObject(),
EasyMock.<String>anyObject(), EasyMock.<String>anyObject(),
EasyMock.<String>anyObject(), EasyMock.<String>anyObject(),
EasyMock.<LoadPeonCallback>anyObject() EasyMock.<LoadPeonCallback>anyObject()
); );
EasyMock.expectLastCall().anyTimes(); EasyMock.expectLastCall().anyTimes();
EasyMock.replay(master); EasyMock.replay(master);
DruidMasterRuntimeParams params = DruidMasterRuntimeParams params =
DruidMasterRuntimeParams.newBuilder() DruidMasterRuntimeParams.newBuilder()
.withDruidCluster( .withDruidCluster(
new DruidCluster( new DruidCluster(
ImmutableMap.<String, MinMaxPriorityQueue<ServerHolder>>of( ImmutableMap.<String, MinMaxPriorityQueue<ServerHolder>>of(
"normal", "normal",
MinMaxPriorityQueue.orderedBy(DruidMasterBalancerTester.percentUsedComparator) MinMaxPriorityQueue.orderedBy(DruidMasterBalancerTester.percentUsedComparator)
.create( .create(
Arrays.asList( Arrays.asList(
new ServerHolder(druidServer1, fromPeon), new ServerHolder(druidServer1, fromPeon),
new ServerHolder(druidServer2, toPeon) new ServerHolder(druidServer2, toPeon)
) )
) )
) )
) )
) )
.withLoadManagementPeons(ImmutableMap.<String, LoadQueuePeon>of("from", fromPeon, "to", toPeon)) .withLoadManagementPeons(
.withAvailableSegments(segments.values()) ImmutableMap.<String, LoadQueuePeon>of(
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMaxSegmentsToMove(MAX_SEGMENTS_TO_MOVE).build()) "from",
.withBalancerReferenceTimestamp(new DateTime("2013-01-01")) fromPeon,
.build(); "to",
toPeon
)
)
.withAvailableSegments(segments.values())
.withDynamicConfigs(
new MasterDynamicConfig.Builder().withMaxSegmentsToMove(
MAX_SEGMENTS_TO_MOVE
).build()
)
.withBalancerReferenceTimestamp(new DateTime("2013-01-01"))
.build();
DruidMasterBalancerTester tester = new DruidMasterBalancerTester(master); DruidMasterBalancerTester tester = new DruidMasterBalancerTester(master);
watch.start(); watch.start();
DruidMasterRuntimeParams balanceParams = tester.run(params); DruidMasterRuntimeParams balanceParams = tester.run(params);

View File

@ -192,7 +192,11 @@ public class DruidMasterBalancerTest
) )
) )
.withAvailableSegments(segments.values()) .withAvailableSegments(segments.values())
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMaxSegmentsToMove(MAX_SEGMENTS_TO_MOVE).build()) .withDynamicConfigs(
new MasterDynamicConfig.Builder().withMaxSegmentsToMove(
MAX_SEGMENTS_TO_MOVE
).build()
)
.withBalancerReferenceTimestamp(new DateTime("2013-01-01")) .withBalancerReferenceTimestamp(new DateTime("2013-01-01"))
.build(); .build();
@ -264,9 +268,9 @@ public class DruidMasterBalancerTest
) )
) )
.withAvailableSegments(segments.values()) .withAvailableSegments(segments.values())
.withMasterSegmentSettings( .withDynamicConfigs(
new MasterSegmentSettings.Builder().withMaxSegmentsToMove(MAX_SEGMENTS_TO_MOVE) new MasterDynamicConfig.Builder().withMaxSegmentsToMove(MAX_SEGMENTS_TO_MOVE)
.build() .build()
) )
.withBalancerReferenceTimestamp(new DateTime("2013-01-01")) .withBalancerReferenceTimestamp(new DateTime("2013-01-01"))
.build(); .build();
@ -357,8 +361,8 @@ public class DruidMasterBalancerTest
) )
) )
.withAvailableSegments(segments.values()) .withAvailableSegments(segments.values())
.withMasterSegmentSettings( .withDynamicConfigs(
new MasterSegmentSettings.Builder().withMaxSegmentsToMove( new MasterDynamicConfig.Builder().withMaxSegmentsToMove(
MAX_SEGMENTS_TO_MOVE MAX_SEGMENTS_TO_MOVE
).build() ).build()
) )

View File

@ -86,7 +86,7 @@ public class DruidMasterRuleRunnerTest
start = start.plusHours(1); start = start.plusHours(1);
} }
ruleRunner = new DruidMasterRuleRunner(master, 1, 24); ruleRunner = new DruidMasterRuleRunner(new ReplicationThrottler(24, 1), master);
} }
@After @After
@ -177,7 +177,7 @@ public class DruidMasterRuleRunnerTest
.withAvailableSegments(availableSegments) .withAvailableSegments(availableSegments)
.withDatabaseRuleManager(databaseRuleManager) .withDatabaseRuleManager(databaseRuleManager)
.withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster()))
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMaxSegmentsToMove(5).build()) .withDynamicConfigs(new MasterDynamicConfig.Builder().withMaxSegmentsToMove(5).build())
.build(); .build();
DruidMasterRuntimeParams afterParams = ruleRunner.run(params); DruidMasterRuntimeParams afterParams = ruleRunner.run(params);
@ -522,7 +522,7 @@ public class DruidMasterRuleRunnerTest
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder() DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
.withDruidCluster(druidCluster) .withDruidCluster(druidCluster)
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build()) .withDynamicConfigs(new MasterDynamicConfig.Builder().withMillisToWaitBeforeDeleting(0L).build())
.withAvailableSegments(availableSegments) .withAvailableSegments(availableSegments)
.withDatabaseRuleManager(databaseRuleManager) .withDatabaseRuleManager(databaseRuleManager)
.withSegmentReplicantLookup(segmentReplicantLookup) .withSegmentReplicantLookup(segmentReplicantLookup)
@ -596,7 +596,7 @@ public class DruidMasterRuleRunnerTest
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder() DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
.withDruidCluster(druidCluster) .withDruidCluster(druidCluster)
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build()) .withDynamicConfigs(new MasterDynamicConfig.Builder().withMillisToWaitBeforeDeleting(0L).build())
.withAvailableSegments(availableSegments) .withAvailableSegments(availableSegments)
.withDatabaseRuleManager(databaseRuleManager) .withDatabaseRuleManager(databaseRuleManager)
.withSegmentReplicantLookup(segmentReplicantLookup) .withSegmentReplicantLookup(segmentReplicantLookup)
@ -677,7 +677,7 @@ public class DruidMasterRuleRunnerTest
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder() DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
.withDruidCluster(druidCluster) .withDruidCluster(druidCluster)
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build()) .withDynamicConfigs(new MasterDynamicConfig.Builder().withMillisToWaitBeforeDeleting(0L).build())
.withAvailableSegments(availableSegments) .withAvailableSegments(availableSegments)
.withDatabaseRuleManager(databaseRuleManager) .withDatabaseRuleManager(databaseRuleManager)
.withSegmentReplicantLookup(segmentReplicantLookup) .withSegmentReplicantLookup(segmentReplicantLookup)
@ -754,7 +754,7 @@ public class DruidMasterRuleRunnerTest
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder() DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
.withDruidCluster(druidCluster) .withDruidCluster(druidCluster)
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build()) .withDynamicConfigs(new MasterDynamicConfig.Builder().withMillisToWaitBeforeDeleting(0L).build())
.withAvailableSegments(availableSegments) .withAvailableSegments(availableSegments)
.withDatabaseRuleManager(databaseRuleManager) .withDatabaseRuleManager(databaseRuleManager)
.withSegmentReplicantLookup(segmentReplicantLookup) .withSegmentReplicantLookup(segmentReplicantLookup)
@ -843,7 +843,7 @@ public class DruidMasterRuleRunnerTest
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder() DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
.withDruidCluster(druidCluster) .withDruidCluster(druidCluster)
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build()) .withDynamicConfigs(new MasterDynamicConfig.Builder().withMillisToWaitBeforeDeleting(0L).build())
.withAvailableSegments(availableSegments) .withAvailableSegments(availableSegments)
.withDatabaseRuleManager(databaseRuleManager) .withDatabaseRuleManager(databaseRuleManager)
.withSegmentReplicantLookup(segmentReplicantLookup) .withSegmentReplicantLookup(segmentReplicantLookup)
@ -1029,7 +1029,7 @@ public class DruidMasterRuleRunnerTest
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder() DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
.withDruidCluster(druidCluster) .withDruidCluster(druidCluster)
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build()) .withDynamicConfigs(new MasterDynamicConfig.Builder().withMillisToWaitBeforeDeleting(0L).build())
.withAvailableSegments(longerAvailableSegments) .withAvailableSegments(longerAvailableSegments)
.withDatabaseRuleManager(databaseRuleManager) .withDatabaseRuleManager(databaseRuleManager)
.withSegmentReplicantLookup(segmentReplicantLookup) .withSegmentReplicantLookup(segmentReplicantLookup)

View File

@ -459,13 +459,13 @@ public class DruidMasterSegmentMergerTest
final DruidMasterSegmentMerger merger = new DruidMasterSegmentMerger(indexingServiceClient, whitelistRef); final DruidMasterSegmentMerger merger = new DruidMasterSegmentMerger(indexingServiceClient, whitelistRef);
final DruidMasterRuntimeParams params = DruidMasterRuntimeParams.newBuilder() final DruidMasterRuntimeParams params = DruidMasterRuntimeParams.newBuilder()
.withAvailableSegments(ImmutableSet.copyOf(segments)) .withAvailableSegments(ImmutableSet.copyOf(segments))
.withMasterSegmentSettings( .withDynamicConfigs(
new MasterSegmentSettings.Builder().withMergeBytesLimit( new MasterDynamicConfig.Builder().withMergeBytesLimit(
mergeBytesLimit mergeBytesLimit
).withMergeSegmentsLimit( ).withMergeSegmentsLimit(
mergeSegmentsLimit mergeSegmentsLimit
) )
.build() .build()
) )
.build(); .build();
merger.run(params); merger.run(params);

View File

@ -93,18 +93,6 @@ public class DruidMasterTest
{ {
return null; return null;
} }
@Override
public int getReplicantLifetime()
{
return 0;
}
@Override
public int getReplicantThrottleLimit()
{
return 0;
}
}, },
new ZkPathsConfig(){ new ZkPathsConfig(){

View File

@ -44,6 +44,7 @@ import io.druid.guice.ManageLifecycle;
import io.druid.guice.annotations.Self; import io.druid.guice.annotations.Self;
import io.druid.server.http.BackwardsCompatiableInfoResource; import io.druid.server.http.BackwardsCompatiableInfoResource;
import io.druid.server.http.InfoResource; import io.druid.server.http.InfoResource;
import io.druid.server.http.MasterDynamicConfigsResource;
import io.druid.server.http.MasterRedirectInfo; import io.druid.server.http.MasterRedirectInfo;
import io.druid.server.http.MasterResource; import io.druid.server.http.MasterResource;
import io.druid.server.http.RedirectFilter; import io.druid.server.http.RedirectFilter;
@ -111,6 +112,7 @@ public class CliCoordinator extends ServerRunnable
Jerseys.addResource(binder, BackwardsCompatiableInfoResource.class); Jerseys.addResource(binder, BackwardsCompatiableInfoResource.class);
Jerseys.addResource(binder, InfoResource.class); Jerseys.addResource(binder, InfoResource.class);
Jerseys.addResource(binder, MasterResource.class); Jerseys.addResource(binder, MasterResource.class);
Jerseys.addResource(binder, MasterDynamicConfigsResource.class);
LifecycleModule.register(binder, Server.class); LifecycleModule.register(binder, Server.class);
} }