mirror of https://github.com/apache/druid.git
Merge pull request #251 from metamx/fix-config
Make dynamic master resource configuration work again
This commit is contained in:
commit
16558404d6
|
@ -35,7 +35,6 @@ import com.google.common.collect.ImmutableMap;
|
|||
import com.google.common.collect.Lists;
|
||||
import com.metamx.common.Granularity;
|
||||
import com.metamx.common.ISE;
|
||||
import com.metamx.common.MapUtils;
|
||||
import com.metamx.common.guava.FunctionalIterable;
|
||||
import com.metamx.common.logger.Logger;
|
||||
import io.druid.common.utils.JodaUtils;
|
||||
|
@ -52,8 +51,6 @@ import io.druid.indexer.rollup.DataRollupSpec;
|
|||
import io.druid.indexer.updater.DbUpdaterJobSpec;
|
||||
import io.druid.jackson.DefaultObjectMapper;
|
||||
import io.druid.query.aggregation.AggregatorFactory;
|
||||
import io.druid.segment.serde.Registererer;
|
||||
import io.druid.segment.serde.Registererers;
|
||||
import io.druid.timeline.DataSegment;
|
||||
import io.druid.timeline.partition.ShardSpec;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -65,11 +62,9 @@ import org.joda.time.DateTime;
|
|||
import org.joda.time.Interval;
|
||||
import org.joda.time.format.ISODateTimeFormat;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
|
|
@ -1,37 +0,0 @@
|
|||
/*
|
||||
* Druid - a distributed column store.
|
||||
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License
|
||||
* as published by the Free Software Foundation; either version 2
|
||||
* of the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
package io.druid.segment.serde;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
/**
|
||||
* This is a "factory" interface for registering handlers in the system. It exists because I'm unaware of
|
||||
* another way to register the complex serdes in the MR jobs that run on Hadoop. As such, instances of this interface
|
||||
* must be instantiatable via a no argument default constructor (the MR jobs on Hadoop use reflection to instantiate
|
||||
* instances).
|
||||
*
|
||||
* The name is not a typo, I felt that it needed an extra "er" to make the pronunciation that much more difficult.
|
||||
*/
|
||||
public interface Registererer
|
||||
{
|
||||
public void register();
|
||||
|
||||
public void registerSubType(ObjectMapper jsonMapper);
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
/*
|
||||
* Druid - a distributed column store.
|
||||
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License
|
||||
* as published by the Free Software Foundation; either version 2
|
||||
* of the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
package io.druid.segment.serde;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class Registererers
|
||||
{
|
||||
public static void registerHandlers(Iterable<Registererer> registererers, Iterable<ObjectMapper> mappers)
|
||||
{
|
||||
for (Registererer registererer : registererers) {
|
||||
if (!doneRegister) {
|
||||
registererer.register();
|
||||
}
|
||||
for (ObjectMapper mapper : mappers) {
|
||||
registererer.registerSubType(mapper);
|
||||
}
|
||||
}
|
||||
doneRegister = true;
|
||||
}
|
||||
|
||||
private static boolean doneRegister = false;
|
||||
}
|
|
@ -20,7 +20,7 @@
|
|||
package io.druid.server.http;
|
||||
|
||||
import io.druid.common.config.JacksonConfigManager;
|
||||
import io.druid.server.master.MasterSegmentSettings;
|
||||
import io.druid.server.master.MasterDynamicConfig;
|
||||
|
||||
import javax.inject.Inject;
|
||||
import javax.ws.rs.Consumes;
|
||||
|
@ -33,38 +33,38 @@ import javax.ws.rs.core.Response;
|
|||
/**
|
||||
*/
|
||||
@Path("/master/config")
|
||||
public class MasterSegmentSettingsResource
|
||||
public class MasterDynamicConfigsResource
|
||||
{
|
||||
private final JacksonConfigManager manager;
|
||||
|
||||
@Inject
|
||||
public MasterSegmentSettingsResource(
|
||||
public MasterDynamicConfigsResource(
|
||||
JacksonConfigManager manager
|
||||
)
|
||||
{
|
||||
this.manager=manager;
|
||||
this.manager = manager;
|
||||
}
|
||||
|
||||
@GET
|
||||
@Produces("application/json")
|
||||
public Response getDynamicConfigs()
|
||||
{
|
||||
Response.ResponseBuilder builder = Response.status(Response.Status.OK)
|
||||
.entity(
|
||||
manager.watch(MasterSegmentSettings.CONFIG_KEY,MasterSegmentSettings.class).get()
|
||||
);
|
||||
return builder.build();
|
||||
return Response.ok(
|
||||
manager.watch(
|
||||
MasterDynamicConfig.CONFIG_KEY,
|
||||
MasterDynamicConfig.class
|
||||
).get()
|
||||
).build();
|
||||
}
|
||||
|
||||
@POST
|
||||
@Consumes("application/json")
|
||||
public Response setDynamicConfigs(
|
||||
final MasterSegmentSettings masterSegmentSettings
|
||||
)
|
||||
public Response setDynamicConfigs(final MasterDynamicConfig dynamicConfig)
|
||||
{
|
||||
if (!manager.set(MasterSegmentSettings.CONFIG_KEY, masterSegmentSettings)) {
|
||||
if (!manager.set(MasterDynamicConfig.CONFIG_KEY, dynamicConfig)) {
|
||||
return Response.status(Response.Status.BAD_REQUEST).build();
|
||||
}
|
||||
return Response.status(Response.Status.OK).build();
|
||||
return Response.ok().build();
|
||||
}
|
||||
|
||||
}
|
|
@ -22,6 +22,7 @@ package io.druid.server.http;
|
|||
import com.google.inject.Inject;
|
||||
import io.druid.server.master.DruidMaster;
|
||||
import io.druid.server.master.LoadPeonCallback;
|
||||
import io.druid.server.master.MasterDynamicConfig;
|
||||
|
||||
import javax.ws.rs.Consumes;
|
||||
import javax.ws.rs.GET;
|
||||
|
@ -30,7 +31,6 @@ import javax.ws.rs.Path;
|
|||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.core.Response;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*/
|
||||
|
@ -113,8 +113,8 @@ public class MasterResource
|
|||
@GET
|
||||
@Path("/loadstatus")
|
||||
@Produces("application/json")
|
||||
public Map<String, Double> getLoadStatus()
|
||||
public Response getLoadStatus()
|
||||
{
|
||||
return master.getLoadStatus();
|
||||
return Response.ok(master.getLoadStatus()).build();
|
||||
}
|
||||
}
|
|
@ -84,6 +84,7 @@ public class DruidMaster
|
|||
|
||||
private volatile boolean started = false;
|
||||
private volatile boolean master = false;
|
||||
private volatile AtomicReference<MasterDynamicConfig> dynamicConfigs;
|
||||
|
||||
private final DruidMasterConfig config;
|
||||
private final ZkPathsConfig zkPaths;
|
||||
|
@ -98,7 +99,6 @@ public class DruidMaster
|
|||
private final LoadQueueTaskMaster taskMaster;
|
||||
private final Map<String, LoadQueuePeon> loadManagementPeons;
|
||||
private final AtomicReference<LeaderLatch> leaderLatch;
|
||||
private volatile AtomicReference<MasterSegmentSettings> segmentSettingsAtomicReference;
|
||||
|
||||
@Inject
|
||||
public DruidMaster(
|
||||
|
@ -161,7 +161,7 @@ public class DruidMaster
|
|||
this.exec = scheduledExecutorFactory.create(1, "Master-Exec--%d");
|
||||
|
||||
this.leaderLatch = new AtomicReference<>(null);
|
||||
this.segmentSettingsAtomicReference = new AtomicReference<>(null);
|
||||
this.dynamicConfigs = new AtomicReference<>(null);
|
||||
this.loadManagementPeons = loadQueuePeonMap;
|
||||
}
|
||||
|
||||
|
@ -215,6 +215,11 @@ public class DruidMaster
|
|||
return loadStatus;
|
||||
}
|
||||
|
||||
public MasterDynamicConfig getDynamicConfigs()
|
||||
{
|
||||
return dynamicConfigs.get();
|
||||
}
|
||||
|
||||
public void removeSegment(DataSegment segment)
|
||||
{
|
||||
log.info("Removing Segment[%s]", segment);
|
||||
|
@ -471,10 +476,10 @@ public class DruidMaster
|
|||
serverInventoryView.start();
|
||||
|
||||
final List<Pair<? extends MasterRunnable, Duration>> masterRunnables = Lists.newArrayList();
|
||||
segmentSettingsAtomicReference = configManager.watch(
|
||||
MasterSegmentSettings.CONFIG_KEY,
|
||||
MasterSegmentSettings.class,
|
||||
new MasterSegmentSettings.Builder().build()
|
||||
dynamicConfigs = configManager.watch(
|
||||
MasterDynamicConfig.CONFIG_KEY,
|
||||
MasterDynamicConfig.class,
|
||||
new MasterDynamicConfig.Builder().build()
|
||||
);
|
||||
masterRunnables.add(Pair.of(new MasterComputeManagerRunnable(), config.getMasterPeriod()));
|
||||
if (indexingServiceClient != null) {
|
||||
|
@ -663,7 +668,7 @@ public class DruidMaster
|
|||
DruidMasterRuntimeParams.newBuilder()
|
||||
.withStartTime(startTime)
|
||||
.withDatasources(databaseSegmentManager.getInventory())
|
||||
.withMasterSegmentSettings(segmentSettingsAtomicReference.get())
|
||||
.withDynamicConfigs(dynamicConfigs.get())
|
||||
.withEmitter(emitter)
|
||||
.build();
|
||||
|
||||
|
@ -750,13 +755,11 @@ public class DruidMaster
|
|||
.withLoadManagementPeons(loadManagementPeons)
|
||||
.withSegmentReplicantLookup(segmentReplicantLookup)
|
||||
.withBalancerReferenceTimestamp(DateTime.now())
|
||||
.withMasterSegmentSettings(segmentSettingsAtomicReference.get())
|
||||
.withDynamicConfigs(dynamicConfigs.get())
|
||||
.build();
|
||||
}
|
||||
},
|
||||
new DruidMasterRuleRunner(
|
||||
DruidMaster.this, config.getReplicantLifetime(), config.getReplicantThrottleLimit()
|
||||
),
|
||||
new DruidMasterRuleRunner(DruidMaster.this),
|
||||
new DruidMasterCleanup(DruidMaster.this),
|
||||
new DruidMasterBalancer(DruidMaster.this),
|
||||
new DruidMasterLogger()
|
||||
|
|
|
@ -79,7 +79,7 @@ public class DruidMasterBalancer implements DruidMasterHelper
|
|||
final MasterStats stats = new MasterStats();
|
||||
final DateTime referenceTimestamp = params.getBalancerReferenceTimestamp();
|
||||
final BalancerStrategy strategy = params.getBalancerStrategyFactory().createBalancerStrategy(referenceTimestamp);
|
||||
final int maxSegmentsToMove = params.getMasterSegmentSettings().getMaxSegmentsToMove();
|
||||
final int maxSegmentsToMove = params.getMasterDynamicConfig().getMaxSegmentsToMove();
|
||||
|
||||
for (Map.Entry<String, MinMaxPriorityQueue<ServerHolder>> entry :
|
||||
params.getDruidCluster().getCluster().entrySet()) {
|
||||
|
@ -124,7 +124,7 @@ public class DruidMasterBalancer implements DruidMasterHelper
|
|||
}
|
||||
}
|
||||
stats.addToTieredStat("movedCount", tier, currentlyMovingSegments.get(tier).size());
|
||||
if (params.getMasterSegmentSettings().isEmitBalancingStats()) {
|
||||
if (params.getMasterDynamicConfig().emitBalancingStats()) {
|
||||
strategy.emitStats(tier, stats, serverHolderList);
|
||||
|
||||
}
|
||||
|
|
|
@ -54,14 +54,6 @@ public abstract class DruidMasterConfig
|
|||
return false;
|
||||
}
|
||||
|
||||
@Config("druid.master.replicant.lifetime")
|
||||
@Default("15")
|
||||
public abstract int getReplicantLifetime();
|
||||
|
||||
@Config("druid.master.replicant.throttleLimit")
|
||||
@Default("10")
|
||||
public abstract int getReplicantThrottleLimit();
|
||||
|
||||
@Config("druid.master.load.timeout")
|
||||
public Duration getLoadTimeoutDelay()
|
||||
{
|
||||
|
|
|
@ -37,10 +37,21 @@ public class DruidMasterRuleRunner implements DruidMasterHelper
|
|||
|
||||
private final DruidMaster master;
|
||||
|
||||
public DruidMasterRuleRunner(DruidMaster master, int replicantLifeTime, int replicantThrottleLimit)
|
||||
public DruidMasterRuleRunner(DruidMaster master)
|
||||
{
|
||||
this(
|
||||
new ReplicationThrottler(
|
||||
master.getDynamicConfigs().getReplicationThrottleLimit(),
|
||||
master.getDynamicConfigs().getReplicantLifetime()
|
||||
),
|
||||
master
|
||||
);
|
||||
}
|
||||
|
||||
public DruidMasterRuleRunner(ReplicationThrottler replicatorThrottler, DruidMaster master)
|
||||
{
|
||||
this.replicatorThrottler = replicatorThrottler;
|
||||
this.master = master;
|
||||
this.replicatorThrottler = new ReplicationThrottler(replicantThrottleLimit, replicantLifeTime);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -46,7 +46,7 @@ public class DruidMasterRuntimeParams
|
|||
private final Map<String, LoadQueuePeon> loadManagementPeons;
|
||||
private final ReplicationThrottler replicationManager;
|
||||
private final ServiceEmitter emitter;
|
||||
private final MasterSegmentSettings masterSegmentSettings;
|
||||
private final MasterDynamicConfig masterDynamicConfig;
|
||||
private final MasterStats stats;
|
||||
private final DateTime balancerReferenceTimestamp;
|
||||
private final BalancerStrategyFactory strategyFactory;
|
||||
|
@ -61,7 +61,7 @@ public class DruidMasterRuntimeParams
|
|||
Map<String, LoadQueuePeon> loadManagementPeons,
|
||||
ReplicationThrottler replicationManager,
|
||||
ServiceEmitter emitter,
|
||||
MasterSegmentSettings masterSegmentSettings,
|
||||
MasterDynamicConfig masterDynamicConfig,
|
||||
MasterStats stats,
|
||||
DateTime balancerReferenceTimestamp,
|
||||
BalancerStrategyFactory strategyFactory
|
||||
|
@ -76,7 +76,7 @@ public class DruidMasterRuntimeParams
|
|||
this.loadManagementPeons = loadManagementPeons;
|
||||
this.replicationManager = replicationManager;
|
||||
this.emitter = emitter;
|
||||
this.masterSegmentSettings = masterSegmentSettings;
|
||||
this.masterDynamicConfig = masterDynamicConfig;
|
||||
this.stats = stats;
|
||||
this.balancerReferenceTimestamp = balancerReferenceTimestamp;
|
||||
this.strategyFactory = strategyFactory;
|
||||
|
@ -127,9 +127,9 @@ public class DruidMasterRuntimeParams
|
|||
return emitter;
|
||||
}
|
||||
|
||||
public MasterSegmentSettings getMasterSegmentSettings()
|
||||
public MasterDynamicConfig getMasterDynamicConfig()
|
||||
{
|
||||
return masterSegmentSettings;
|
||||
return masterDynamicConfig;
|
||||
}
|
||||
|
||||
public MasterStats getMasterStats()
|
||||
|
@ -149,7 +149,7 @@ public class DruidMasterRuntimeParams
|
|||
|
||||
public boolean hasDeletionWaitTimeElapsed()
|
||||
{
|
||||
return (System.currentTimeMillis() - getStartTime() > masterSegmentSettings.getMillisToWaitBeforeDeleting());
|
||||
return (System.currentTimeMillis() - getStartTime() > masterDynamicConfig.getMillisToWaitBeforeDeleting());
|
||||
}
|
||||
|
||||
public static Builder newBuilder()
|
||||
|
@ -169,7 +169,7 @@ public class DruidMasterRuntimeParams
|
|||
loadManagementPeons,
|
||||
replicationManager,
|
||||
emitter,
|
||||
masterSegmentSettings,
|
||||
masterDynamicConfig,
|
||||
stats,
|
||||
balancerReferenceTimestamp,
|
||||
strategyFactory
|
||||
|
@ -187,7 +187,7 @@ public class DruidMasterRuntimeParams
|
|||
private final Map<String, LoadQueuePeon> loadManagementPeons;
|
||||
private ReplicationThrottler replicationManager;
|
||||
private ServiceEmitter emitter;
|
||||
private MasterSegmentSettings masterSegmentSettings;
|
||||
private MasterDynamicConfig masterDynamicConfig;
|
||||
private MasterStats stats;
|
||||
private DateTime balancerReferenceTimestamp;
|
||||
private BalancerStrategyFactory strategyFactory;
|
||||
|
@ -204,7 +204,7 @@ public class DruidMasterRuntimeParams
|
|||
this.replicationManager = null;
|
||||
this.emitter = null;
|
||||
this.stats = new MasterStats();
|
||||
this.masterSegmentSettings = new MasterSegmentSettings.Builder().build();
|
||||
this.masterDynamicConfig = new MasterDynamicConfig.Builder().build();
|
||||
this.balancerReferenceTimestamp = null;
|
||||
this.strategyFactory = new CostBalancerStrategyFactory();
|
||||
}
|
||||
|
@ -219,7 +219,7 @@ public class DruidMasterRuntimeParams
|
|||
Map<String, LoadQueuePeon> loadManagementPeons,
|
||||
ReplicationThrottler replicationManager,
|
||||
ServiceEmitter emitter,
|
||||
MasterSegmentSettings masterSegmentSettings,
|
||||
MasterDynamicConfig masterDynamicConfig,
|
||||
MasterStats stats,
|
||||
DateTime balancerReferenceTimestamp,
|
||||
BalancerStrategyFactory strategyFactory
|
||||
|
@ -234,7 +234,7 @@ public class DruidMasterRuntimeParams
|
|||
this.loadManagementPeons = loadManagementPeons;
|
||||
this.replicationManager = replicationManager;
|
||||
this.emitter = emitter;
|
||||
this.masterSegmentSettings = masterSegmentSettings;
|
||||
this.masterDynamicConfig = masterDynamicConfig;
|
||||
this.stats = stats;
|
||||
this.balancerReferenceTimestamp = balancerReferenceTimestamp;
|
||||
this.strategyFactory=strategyFactory;
|
||||
|
@ -252,7 +252,7 @@ public class DruidMasterRuntimeParams
|
|||
loadManagementPeons,
|
||||
replicationManager,
|
||||
emitter,
|
||||
masterSegmentSettings,
|
||||
masterDynamicConfig,
|
||||
stats,
|
||||
balancerReferenceTimestamp,
|
||||
strategyFactory
|
||||
|
@ -319,9 +319,9 @@ public class DruidMasterRuntimeParams
|
|||
return this;
|
||||
}
|
||||
|
||||
public Builder withMasterSegmentSettings(MasterSegmentSettings configs)
|
||||
public Builder withDynamicConfigs(MasterDynamicConfig configs)
|
||||
{
|
||||
this.masterSegmentSettings = configs;
|
||||
this.masterDynamicConfig = configs;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
@ -99,9 +99,9 @@ public class DruidMasterSegmentMerger implements DruidMasterHelper
|
|||
|
||||
for (int i = 0; i < timelineObjects.size(); i++) {
|
||||
if (!segmentsToMerge.add(timelineObjects.get(i))
|
||||
|| segmentsToMerge.getByteCount() > params.getMasterSegmentSettings().getMergeBytesLimit()
|
||||
|| segmentsToMerge.getSegmentCount() >= params.getMasterSegmentSettings().getMergeSegmentsLimit()) {
|
||||
i -= segmentsToMerge.backtrack(params.getMasterSegmentSettings().getMergeBytesLimit());
|
||||
|| segmentsToMerge.getByteCount() > params.getMasterDynamicConfig().getMergeBytesLimit()
|
||||
|| segmentsToMerge.getSegmentCount() >= params.getMasterDynamicConfig().getMergeSegmentsLimit()) {
|
||||
i -= segmentsToMerge.backtrack(params.getMasterDynamicConfig().getMergeBytesLimit());
|
||||
|
||||
if (segmentsToMerge.getSegmentCount() > 1) {
|
||||
stats.addToGlobalStat("mergedCount", mergeSegments(segmentsToMerge, entry.getKey()));
|
||||
|
@ -117,7 +117,7 @@ public class DruidMasterSegmentMerger implements DruidMasterHelper
|
|||
}
|
||||
|
||||
// Finish any timelineObjects to merge that may have not hit threshold
|
||||
segmentsToMerge.backtrack(params.getMasterSegmentSettings().getMergeBytesLimit());
|
||||
segmentsToMerge.backtrack(params.getMasterDynamicConfig().getMergeBytesLimit());
|
||||
if (segmentsToMerge.getSegmentCount() > 1) {
|
||||
stats.addToGlobalStat("mergedCount", mergeSegments(segmentsToMerge, entry.getKey()));
|
||||
}
|
||||
|
|
|
@ -21,36 +21,38 @@ package io.druid.server.master;
|
|||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
public class MasterSegmentSettings
|
||||
public class MasterDynamicConfig
|
||||
{
|
||||
public static final String CONFIG_KEY = "master.dynamicConfigs";
|
||||
private long millisToWaitBeforeDeleting = 15 * 60 * 1000L;
|
||||
private long mergeBytesLimit = 100000000L;
|
||||
private int mergeSegmentsLimit = Integer.MAX_VALUE;
|
||||
private int maxSegmentsToMove = 5;
|
||||
private boolean emitBalancingStats = false;
|
||||
public static final String CONFIG_KEY = "master.config";
|
||||
|
||||
private final long millisToWaitBeforeDeleting;
|
||||
private final long mergeBytesLimit;
|
||||
private final int mergeSegmentsLimit;
|
||||
private final int maxSegmentsToMove;
|
||||
private final int replicantLifetime;
|
||||
private final int replicationThrottleLimit;
|
||||
private final boolean emitBalancingStats;
|
||||
|
||||
@JsonCreator
|
||||
public MasterSegmentSettings(
|
||||
@JsonProperty("millisToWaitBeforeDeleting") Long millisToWaitBeforeDeleting,
|
||||
@JsonProperty("mergeBytesLimit") Long mergeBytesLimit,
|
||||
@JsonProperty("mergeSegmentsLimit") Integer mergeSegmentsLimit,
|
||||
@JsonProperty("maxSegmentsToMove") Integer maxSegmentsToMove,
|
||||
@JsonProperty("emitBalancingStats") Boolean emitBalancingStats
|
||||
public MasterDynamicConfig(
|
||||
@JsonProperty("millisToWaitBeforeDeleting") long millisToWaitBeforeDeleting,
|
||||
@JsonProperty("mergeBytesLimit") long mergeBytesLimit,
|
||||
@JsonProperty("mergeSegmentsLimit") int mergeSegmentsLimit,
|
||||
@JsonProperty("maxSegmentsToMove") int maxSegmentsToMove,
|
||||
@JsonProperty("replicantLifetime") int replicantLifetime,
|
||||
@JsonProperty("replicationThrottleLimit") int replicationThrottleLimit,
|
||||
@JsonProperty("emitBalancingStats") boolean emitBalancingStats
|
||||
)
|
||||
{
|
||||
this.maxSegmentsToMove = maxSegmentsToMove;
|
||||
this.millisToWaitBeforeDeleting = millisToWaitBeforeDeleting;
|
||||
this.mergeSegmentsLimit = mergeSegmentsLimit;
|
||||
this.mergeBytesLimit = mergeBytesLimit;
|
||||
this.replicantLifetime = replicantLifetime;
|
||||
this.replicationThrottleLimit = replicationThrottleLimit;
|
||||
this.emitBalancingStats = emitBalancingStats;
|
||||
}
|
||||
|
||||
public static String getConfigKey()
|
||||
{
|
||||
return CONFIG_KEY;
|
||||
}
|
||||
|
||||
@JsonProperty
|
||||
public long getMillisToWaitBeforeDeleting()
|
||||
{
|
||||
|
@ -64,7 +66,7 @@ public class MasterSegmentSettings
|
|||
}
|
||||
|
||||
@JsonProperty
|
||||
public boolean isEmitBalancingStats()
|
||||
public boolean emitBalancingStats()
|
||||
{
|
||||
return emitBalancingStats;
|
||||
}
|
||||
|
@ -81,25 +83,40 @@ public class MasterSegmentSettings
|
|||
return maxSegmentsToMove;
|
||||
}
|
||||
|
||||
@JsonProperty
|
||||
public int getReplicantLifetime()
|
||||
{
|
||||
return replicantLifetime;
|
||||
}
|
||||
|
||||
@JsonProperty
|
||||
public int getReplicationThrottleLimit()
|
||||
{
|
||||
return replicationThrottleLimit;
|
||||
}
|
||||
|
||||
public static class Builder
|
||||
{
|
||||
public static final String CONFIG_KEY = "master.dynamicConfigs";
|
||||
private long millisToWaitBeforeDeleting;
|
||||
private long mergeBytesLimit;
|
||||
private int mergeSegmentsLimit;
|
||||
private int maxSegmentsToMove;
|
||||
private int replicantLifetime;
|
||||
private int replicationThrottleLimit;
|
||||
private boolean emitBalancingStats;
|
||||
|
||||
public Builder()
|
||||
{
|
||||
this(15 * 60 * 1000L, 100000000L, Integer.MAX_VALUE, 5, false);
|
||||
this(15 * 60 * 1000L, 100000000L, Integer.MAX_VALUE, 5, 15, 10, false);
|
||||
}
|
||||
|
||||
public Builder(
|
||||
private Builder(
|
||||
long millisToWaitBeforeDeleting,
|
||||
long mergeBytesLimit,
|
||||
int mergeSegmentsLimit,
|
||||
int maxSegmentsToMove,
|
||||
int replicantLifetime,
|
||||
int replicationThrottleLimit,
|
||||
boolean emitBalancingStats
|
||||
)
|
||||
{
|
||||
|
@ -107,6 +124,8 @@ public class MasterSegmentSettings
|
|||
this.mergeBytesLimit = mergeBytesLimit;
|
||||
this.mergeSegmentsLimit = mergeSegmentsLimit;
|
||||
this.maxSegmentsToMove = maxSegmentsToMove;
|
||||
this.replicantLifetime = replicantLifetime;
|
||||
this.replicationThrottleLimit = replicationThrottleLimit;
|
||||
this.emitBalancingStats = emitBalancingStats;
|
||||
}
|
||||
|
||||
|
@ -134,13 +153,27 @@ public class MasterSegmentSettings
|
|||
return this;
|
||||
}
|
||||
|
||||
public MasterSegmentSettings build()
|
||||
public Builder withReplicantLifetime(int replicantLifetime)
|
||||
{
|
||||
return new MasterSegmentSettings(
|
||||
this.replicantLifetime = replicantLifetime;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder withReplicationThrottleLimit(int replicationThrottleLimit)
|
||||
{
|
||||
this.replicationThrottleLimit = replicationThrottleLimit;
|
||||
return this;
|
||||
}
|
||||
|
||||
public MasterDynamicConfig build()
|
||||
{
|
||||
return new MasterDynamicConfig(
|
||||
millisToWaitBeforeDeleting,
|
||||
mergeBytesLimit,
|
||||
mergeSegmentsLimit,
|
||||
maxSegmentsToMove,
|
||||
replicantLifetime,
|
||||
replicationThrottleLimit,
|
||||
emitBalancingStats
|
||||
);
|
||||
}
|
|
@ -0,0 +1,57 @@
|
|||
<!DOCTYPE HTML>
|
||||
<!--
|
||||
~ Druid - a distributed column store.
|
||||
~ Copyright (C) 2012 Metamarkets Group Inc.
|
||||
~
|
||||
~ This program is free software; you can redistribute it and/or
|
||||
~ modify it under the terms of the GNU General Public License
|
||||
~ as published by the Free Software Foundation; either version 2
|
||||
~ of the License, or (at your option) any later version.
|
||||
~
|
||||
~ This program is distributed in the hope that it will be useful,
|
||||
~ but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
~ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
~ GNU General Public License for more details.
|
||||
~
|
||||
~ You should have received a copy of the GNU General Public License
|
||||
~ along with this program; if not, write to the Free Software
|
||||
~ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
-->
|
||||
|
||||
<html xmlns="http://www.w3.org/1999/html">
|
||||
<head>
|
||||
<title>Druid Master Console - Rule Configuration</title>
|
||||
<link rel="shortcut icon" type="image/ico" href="images/favicon.ico">
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
|
||||
<meta name="Description" content="Druid Master Console Page"/>
|
||||
|
||||
<style type="text/css">@import "css/style.css";</style>
|
||||
<style type="text/css">@import "css/jquery-ui-1.9.2.css";</style>
|
||||
<style type="text/css">@import "css/config.css";</style>
|
||||
|
||||
<script type="text/javascript" src="js/underscore-1.2.2.js"></script>
|
||||
<script type="text/javascript" src="js/jquery-1.8.3.js"></script>
|
||||
<script type="text/javascript" src="js/jquery-ui-1.9.2.js"></script>
|
||||
<script type="text/javascript" src="js/config-0.0.1.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="heading">Master Configuration</div>
|
||||
|
||||
<div id="config">
|
||||
<div id="config_list"></div>
|
||||
|
||||
<button type="button" id="update">Update</button>
|
||||
<div id="update_dialog" title="Confirm Update">
|
||||
<p>Are you sure you want to update?</p>
|
||||
</div>
|
||||
<button type="button" id="cancel">Cancel</button>
|
||||
<div id="cancel_dialog" title="Confirm Cancel">
|
||||
<p>Are you sure you want to cancel? All changes will be lost.</p>
|
||||
</div>
|
||||
|
||||
<div id="error_dialog" title="Error!"></div>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,8 @@
|
|||
#config_list {
|
||||
margin: 20px 0px;
|
||||
}
|
||||
|
||||
.config_label {
|
||||
width: 250px;
|
||||
float: left;
|
||||
}
|
|
@ -18,6 +18,7 @@
|
|||
~ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
-->
|
||||
|
||||
<!-- We plan to redo the master console at some point in the near future with a real front end framework in place. -->
|
||||
<html>
|
||||
<head>
|
||||
<title>Druid Master Console</title>
|
||||
|
@ -38,7 +39,7 @@
|
|||
<a href="rules.html">Configure Compute Node Rules</a>
|
||||
</div>
|
||||
<div>
|
||||
<a href="masterSegmentSettings.html">Configure Dynamic Master Parameters</a>
|
||||
<a href="config.html">Configure Master Parameters</a>
|
||||
</div>
|
||||
<div>
|
||||
<a href="enable.html">Enable/Disable Datasources</a>
|
||||
|
|
|
@ -0,0 +1,105 @@
|
|||
var configs = [];
|
||||
|
||||
function makeConfigDiv(key, value) {
|
||||
var retVal = "<div class='config'>";
|
||||
|
||||
retVal += "<span class='config_label'>" + key + "</span>";
|
||||
retVal += "<input type='text' class='value' name='value' value='" + value + "'/>";
|
||||
|
||||
retVal += "</div>";
|
||||
return retVal;
|
||||
}
|
||||
|
||||
function domToConfig(configDiv) {
|
||||
var retVal = {};
|
||||
|
||||
retVal.key = $($(configDiv).find(".config_label")).text();
|
||||
retVal.value = $($(configDiv).find(".value")).val();
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
function getConfigs() {
|
||||
$.getJSON("/master/config", function(data) {
|
||||
$('#config_list').empty();
|
||||
|
||||
$.each(data, function (key, value) {
|
||||
$('#config_list').append(makeConfigDiv(key, value));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
$(document).ready(function() {
|
||||
$("button").button();
|
||||
|
||||
$("#error_dialog").dialog({
|
||||
autoOpen: false,
|
||||
modal:true,
|
||||
resizeable: false,
|
||||
buttons: {
|
||||
Ok : function() {
|
||||
$(this).dialog("close");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
$("#cancel_dialog").dialog({
|
||||
autoOpen: false,
|
||||
modal:true,
|
||||
resizeable: false,
|
||||
buttons: {
|
||||
Yes : function() {
|
||||
getConfigs();
|
||||
$(this).dialog("close");
|
||||
},
|
||||
No: function() {
|
||||
$(this).dialog("close");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
$("#update_dialog").dialog({
|
||||
autoOpen: false,
|
||||
modal:true,
|
||||
resizeable: false,
|
||||
buttons: {
|
||||
Yes : function() {
|
||||
var configs = {};
|
||||
$.each($("#config_list").children(), function(index, configDiv) {
|
||||
var config = domToConfig(configDiv);
|
||||
configs[config.key] = config.value;
|
||||
});
|
||||
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url:'/master/config',
|
||||
data: JSON.stringify(configs),
|
||||
contentType:"application/json; charset=utf-8",
|
||||
dataType:"json",
|
||||
error: function(xhr, status, error) {
|
||||
$("#update_dialog").dialog("close");
|
||||
$("#error_dialog").html(xhr.responseText);
|
||||
$("#error_dialog").dialog("open");
|
||||
},
|
||||
success: function(data, status, xhr) {
|
||||
getConfigs();
|
||||
$("#update_dialog").dialog("close");
|
||||
}
|
||||
});
|
||||
},
|
||||
Cancel: function() {
|
||||
$(this).dialog("close");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
getConfigs();
|
||||
|
||||
$("#cancel").click(function() {
|
||||
$("#cancel_dialog").dialog("open");
|
||||
});
|
||||
|
||||
$('#update').click(function (){
|
||||
$("#update_dialog").dialog("open")
|
||||
});
|
||||
});
|
|
@ -1,26 +0,0 @@
|
|||
$(function () {
|
||||
$.get('../master/config', function (data) {
|
||||
document.getElementById("millis").value=data["millisToWaitBeforeDeleting"];
|
||||
document.getElementById("mergeBytes").value = data["mergeBytesLimit"];
|
||||
document.getElementById("mergeSegments").value = data["mergeSegmentsLimit"];
|
||||
document.getElementById("maxSegments").value = data["maxSegmentsToMove"];
|
||||
document.getElementById("emitBalancingStats").value = data["emitBalancingStats"];
|
||||
});
|
||||
|
||||
$("#submit").click( function ()
|
||||
{
|
||||
values = {};
|
||||
list = $('form').serializeArray();
|
||||
for (var i=0;i< list.length;i++)
|
||||
{
|
||||
values[list[i]["name"]]=list[i]["value"];
|
||||
}
|
||||
$.ajax({
|
||||
url:'../master/config',
|
||||
type:"POST",
|
||||
data: JSON.stringify(values),
|
||||
contentType:"application/json; charset=utf-8",
|
||||
dataType:"json"
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,24 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Configure Dynamic Master Parameters</title>
|
||||
</head>
|
||||
<body>
|
||||
<form action="process_config.html" autocomplete="on" id ="configs">
|
||||
millisToWaitBeforeDeleting:<input type="text" name="millisToWaitBeforeDeleting" id="millis">
|
||||
<br>
|
||||
mergeBytesLimit: <input type="text" name="mergeBytesLimit" id="mergeBytes">
|
||||
<br>
|
||||
mergeSegmentsLimit: <input type="text" name = "mergeSegmentsLimit" id="mergeSegments">
|
||||
<br>
|
||||
maxSegmentsToMove: <input type= "text" name ="maxSegmentsToMove" id ="maxSegments">
|
||||
<br>
|
||||
emitBalancingStats: <input type= "text" name ="emitBalancingStats" id="emitBalancingStats">
|
||||
<button type="button" id="submit"> Submit </button>
|
||||
</form>
|
||||
</body>
|
||||
<script type="text/javascript" src="js/jquery-1.8.3.js"></script>
|
||||
<script type="text/javascript" src="js/masterSegmentSettings.js"></script>
|
||||
|
||||
</script>
|
||||
</html>
|
|
@ -53,7 +53,7 @@ public class DruidMasterBalancerProfiler
|
|||
Map<String, DataSegment> segments = Maps.newHashMap();
|
||||
ServiceEmitter emitter;
|
||||
DatabaseRuleManager manager;
|
||||
PeriodLoadRule loadRule = new PeriodLoadRule(new Period("P5000Y"),3,"normal");
|
||||
PeriodLoadRule loadRule = new PeriodLoadRule(new Period("P5000Y"), 3, "normal");
|
||||
List<Rule> rules = ImmutableList.<Rule>of(loadRule);
|
||||
|
||||
@Before
|
||||
|
@ -71,7 +71,7 @@ public class DruidMasterBalancerProfiler
|
|||
{
|
||||
Stopwatch watch = new Stopwatch();
|
||||
int numSegments = 55000;
|
||||
int numServers=50;
|
||||
int numServers = 50;
|
||||
EasyMock.expect(manager.getAllRules()).andReturn(ImmutableMap.<String, List<Rule>>of("test", rules)).anyTimes();
|
||||
EasyMock.expect(manager.getRules(EasyMock.<String>anyObject())).andReturn(rules).anyTimes();
|
||||
EasyMock.expect(manager.getRulesWithDefault(EasyMock.<String>anyObject())).andReturn(rules).anyTimes();
|
||||
|
@ -89,9 +89,8 @@ public class DruidMasterBalancerProfiler
|
|||
List<DruidServer> serverList = Lists.newArrayList();
|
||||
Map<String, LoadQueuePeon> peonMap = Maps.newHashMap();
|
||||
List<ServerHolder> serverHolderList = Lists.newArrayList();
|
||||
Map<String,DataSegment> segmentMap = Maps.newHashMap();
|
||||
for (int i=0;i<numSegments;i++)
|
||||
{
|
||||
Map<String, DataSegment> segmentMap = Maps.newHashMap();
|
||||
for (int i = 0; i < numSegments; i++) {
|
||||
segmentMap.put(
|
||||
"segment" + i,
|
||||
new DataSegment(
|
||||
|
@ -108,30 +107,26 @@ public class DruidMasterBalancerProfiler
|
|||
);
|
||||
}
|
||||
|
||||
for (int i=0;i<numServers;i++)
|
||||
{
|
||||
DruidServer server =EasyMock.createMock(DruidServer.class);
|
||||
for (int i = 0; i < numServers; i++) {
|
||||
DruidServer server = EasyMock.createMock(DruidServer.class);
|
||||
EasyMock.expect(server.getMetadata()).andReturn(null).anyTimes();
|
||||
EasyMock.expect(server.getCurrSize()).andReturn(30L).atLeastOnce();
|
||||
EasyMock.expect(server.getMaxSize()).andReturn(100L).atLeastOnce();
|
||||
EasyMock.expect(server.getTier()).andReturn("normal").anyTimes();
|
||||
EasyMock.expect(server.getName()).andReturn(Integer.toString(i)).atLeastOnce();
|
||||
EasyMock.expect(server.getHost()).andReturn(Integer.toString(i)).anyTimes();
|
||||
if (i==0)
|
||||
{
|
||||
if (i == 0) {
|
||||
EasyMock.expect(server.getSegments()).andReturn(segmentMap).anyTimes();
|
||||
}
|
||||
else
|
||||
{
|
||||
} else {
|
||||
EasyMock.expect(server.getSegments()).andReturn(new HashMap<String, DataSegment>()).anyTimes();
|
||||
}
|
||||
EasyMock.expect(server.getSegment(EasyMock.<String>anyObject())).andReturn(null).anyTimes();
|
||||
EasyMock.replay(server);
|
||||
|
||||
LoadQueuePeon peon = new LoadQueuePeonTester();
|
||||
peonMap.put(Integer.toString(i),peon);
|
||||
peonMap.put(Integer.toString(i), peon);
|
||||
serverHolderList.add(new ServerHolder(server, peon));
|
||||
}
|
||||
}
|
||||
|
||||
DruidMasterRuntimeParams params =
|
||||
DruidMasterRuntimeParams.newBuilder()
|
||||
|
@ -150,27 +145,34 @@ public class DruidMasterBalancerProfiler
|
|||
peonMap
|
||||
)
|
||||
.withAvailableSegments(segmentMap.values())
|
||||
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMaxSegmentsToMove(MAX_SEGMENTS_TO_MOVE).build())
|
||||
.withDynamicConfigs(
|
||||
new MasterDynamicConfig.Builder().withMaxSegmentsToMove(
|
||||
MAX_SEGMENTS_TO_MOVE
|
||||
).withReplicantLifetime(500)
|
||||
.withReplicationThrottleLimit(5)
|
||||
.build()
|
||||
)
|
||||
.withBalancerReferenceTimestamp(new DateTime("2013-01-01"))
|
||||
.withEmitter(emitter)
|
||||
.withDatabaseRuleManager(manager)
|
||||
.withReplicationManager(new ReplicationThrottler(2, 500))
|
||||
.withSegmentReplicantLookup(
|
||||
SegmentReplicantLookup.make(new DruidCluster(
|
||||
ImmutableMap.<String, MinMaxPriorityQueue<ServerHolder>>of(
|
||||
"normal",
|
||||
MinMaxPriorityQueue.orderedBy(DruidMasterBalancerTester.percentUsedComparator)
|
||||
.create(
|
||||
serverHolderList
|
||||
)
|
||||
SegmentReplicantLookup.make(
|
||||
new DruidCluster(
|
||||
ImmutableMap.<String, MinMaxPriorityQueue<ServerHolder>>of(
|
||||
"normal",
|
||||
MinMaxPriorityQueue.orderedBy(DruidMasterBalancerTester.percentUsedComparator)
|
||||
.create(
|
||||
serverHolderList
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
.build();
|
||||
|
||||
DruidMasterBalancerTester tester = new DruidMasterBalancerTester(master);
|
||||
DruidMasterRuleRunner runner = new DruidMasterRuleRunner(master,500,5);
|
||||
DruidMasterRuleRunner runner = new DruidMasterRuleRunner(master);
|
||||
watch.start();
|
||||
DruidMasterRuntimeParams balanceParams = tester.run(params);
|
||||
DruidMasterRuntimeParams assignParams = runner.run(params);
|
||||
|
@ -178,56 +180,68 @@ public class DruidMasterBalancerProfiler
|
|||
}
|
||||
|
||||
|
||||
public void profileRun(){
|
||||
Stopwatch watch = new Stopwatch();
|
||||
LoadQueuePeonTester fromPeon = new LoadQueuePeonTester();
|
||||
LoadQueuePeonTester toPeon = new LoadQueuePeonTester();
|
||||
public void profileRun()
|
||||
{
|
||||
Stopwatch watch = new Stopwatch();
|
||||
LoadQueuePeonTester fromPeon = new LoadQueuePeonTester();
|
||||
LoadQueuePeonTester toPeon = new LoadQueuePeonTester();
|
||||
|
||||
EasyMock.expect(druidServer1.getName()).andReturn("from").atLeastOnce();
|
||||
EasyMock.expect(druidServer1.getCurrSize()).andReturn(30L).atLeastOnce();
|
||||
EasyMock.expect(druidServer1.getMaxSize()).andReturn(100L).atLeastOnce();
|
||||
EasyMock.expect(druidServer1.getSegments()).andReturn(segments).anyTimes();
|
||||
EasyMock.expect(druidServer1.getSegment(EasyMock.<String>anyObject())).andReturn(null).anyTimes();
|
||||
EasyMock.replay(druidServer1);
|
||||
EasyMock.expect(druidServer1.getName()).andReturn("from").atLeastOnce();
|
||||
EasyMock.expect(druidServer1.getCurrSize()).andReturn(30L).atLeastOnce();
|
||||
EasyMock.expect(druidServer1.getMaxSize()).andReturn(100L).atLeastOnce();
|
||||
EasyMock.expect(druidServer1.getSegments()).andReturn(segments).anyTimes();
|
||||
EasyMock.expect(druidServer1.getSegment(EasyMock.<String>anyObject())).andReturn(null).anyTimes();
|
||||
EasyMock.replay(druidServer1);
|
||||
|
||||
EasyMock.expect(druidServer2.getName()).andReturn("to").atLeastOnce();
|
||||
EasyMock.expect(druidServer2.getTier()).andReturn("normal").anyTimes();
|
||||
EasyMock.expect(druidServer2.getCurrSize()).andReturn(0L).atLeastOnce();
|
||||
EasyMock.expect(druidServer2.getMaxSize()).andReturn(100L).atLeastOnce();
|
||||
EasyMock.expect(druidServer2.getSegments()).andReturn(new HashMap<String, DataSegment>()).anyTimes();
|
||||
EasyMock.expect(druidServer2.getSegment(EasyMock.<String>anyObject())).andReturn(null).anyTimes();
|
||||
EasyMock.replay(druidServer2);
|
||||
EasyMock.expect(druidServer2.getName()).andReturn("to").atLeastOnce();
|
||||
EasyMock.expect(druidServer2.getTier()).andReturn("normal").anyTimes();
|
||||
EasyMock.expect(druidServer2.getCurrSize()).andReturn(0L).atLeastOnce();
|
||||
EasyMock.expect(druidServer2.getMaxSize()).andReturn(100L).atLeastOnce();
|
||||
EasyMock.expect(druidServer2.getSegments()).andReturn(new HashMap<String, DataSegment>()).anyTimes();
|
||||
EasyMock.expect(druidServer2.getSegment(EasyMock.<String>anyObject())).andReturn(null).anyTimes();
|
||||
EasyMock.replay(druidServer2);
|
||||
|
||||
master.moveSegment(
|
||||
EasyMock.<String>anyObject(),
|
||||
EasyMock.<String>anyObject(),
|
||||
EasyMock.<String>anyObject(),
|
||||
EasyMock.<LoadPeonCallback>anyObject()
|
||||
);
|
||||
EasyMock.expectLastCall().anyTimes();
|
||||
EasyMock.replay(master);
|
||||
master.moveSegment(
|
||||
EasyMock.<String>anyObject(),
|
||||
EasyMock.<String>anyObject(),
|
||||
EasyMock.<String>anyObject(),
|
||||
EasyMock.<LoadPeonCallback>anyObject()
|
||||
);
|
||||
EasyMock.expectLastCall().anyTimes();
|
||||
EasyMock.replay(master);
|
||||
|
||||
DruidMasterRuntimeParams params =
|
||||
DruidMasterRuntimeParams.newBuilder()
|
||||
.withDruidCluster(
|
||||
new DruidCluster(
|
||||
ImmutableMap.<String, MinMaxPriorityQueue<ServerHolder>>of(
|
||||
"normal",
|
||||
MinMaxPriorityQueue.orderedBy(DruidMasterBalancerTester.percentUsedComparator)
|
||||
.create(
|
||||
Arrays.asList(
|
||||
new ServerHolder(druidServer1, fromPeon),
|
||||
new ServerHolder(druidServer2, toPeon)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
.withLoadManagementPeons(ImmutableMap.<String, LoadQueuePeon>of("from", fromPeon, "to", toPeon))
|
||||
.withAvailableSegments(segments.values())
|
||||
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMaxSegmentsToMove(MAX_SEGMENTS_TO_MOVE).build())
|
||||
.withBalancerReferenceTimestamp(new DateTime("2013-01-01"))
|
||||
.build();
|
||||
DruidMasterRuntimeParams params =
|
||||
DruidMasterRuntimeParams.newBuilder()
|
||||
.withDruidCluster(
|
||||
new DruidCluster(
|
||||
ImmutableMap.<String, MinMaxPriorityQueue<ServerHolder>>of(
|
||||
"normal",
|
||||
MinMaxPriorityQueue.orderedBy(DruidMasterBalancerTester.percentUsedComparator)
|
||||
.create(
|
||||
Arrays.asList(
|
||||
new ServerHolder(druidServer1, fromPeon),
|
||||
new ServerHolder(druidServer2, toPeon)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
.withLoadManagementPeons(
|
||||
ImmutableMap.<String, LoadQueuePeon>of(
|
||||
"from",
|
||||
fromPeon,
|
||||
"to",
|
||||
toPeon
|
||||
)
|
||||
)
|
||||
.withAvailableSegments(segments.values())
|
||||
.withDynamicConfigs(
|
||||
new MasterDynamicConfig.Builder().withMaxSegmentsToMove(
|
||||
MAX_SEGMENTS_TO_MOVE
|
||||
).build()
|
||||
)
|
||||
.withBalancerReferenceTimestamp(new DateTime("2013-01-01"))
|
||||
.build();
|
||||
DruidMasterBalancerTester tester = new DruidMasterBalancerTester(master);
|
||||
watch.start();
|
||||
DruidMasterRuntimeParams balanceParams = tester.run(params);
|
||||
|
|
|
@ -192,7 +192,11 @@ public class DruidMasterBalancerTest
|
|||
)
|
||||
)
|
||||
.withAvailableSegments(segments.values())
|
||||
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMaxSegmentsToMove(MAX_SEGMENTS_TO_MOVE).build())
|
||||
.withDynamicConfigs(
|
||||
new MasterDynamicConfig.Builder().withMaxSegmentsToMove(
|
||||
MAX_SEGMENTS_TO_MOVE
|
||||
).build()
|
||||
)
|
||||
.withBalancerReferenceTimestamp(new DateTime("2013-01-01"))
|
||||
.build();
|
||||
|
||||
|
@ -264,9 +268,9 @@ public class DruidMasterBalancerTest
|
|||
)
|
||||
)
|
||||
.withAvailableSegments(segments.values())
|
||||
.withMasterSegmentSettings(
|
||||
new MasterSegmentSettings.Builder().withMaxSegmentsToMove(MAX_SEGMENTS_TO_MOVE)
|
||||
.build()
|
||||
.withDynamicConfigs(
|
||||
new MasterDynamicConfig.Builder().withMaxSegmentsToMove(MAX_SEGMENTS_TO_MOVE)
|
||||
.build()
|
||||
)
|
||||
.withBalancerReferenceTimestamp(new DateTime("2013-01-01"))
|
||||
.build();
|
||||
|
@ -357,8 +361,8 @@ public class DruidMasterBalancerTest
|
|||
)
|
||||
)
|
||||
.withAvailableSegments(segments.values())
|
||||
.withMasterSegmentSettings(
|
||||
new MasterSegmentSettings.Builder().withMaxSegmentsToMove(
|
||||
.withDynamicConfigs(
|
||||
new MasterDynamicConfig.Builder().withMaxSegmentsToMove(
|
||||
MAX_SEGMENTS_TO_MOVE
|
||||
).build()
|
||||
)
|
||||
|
|
|
@ -86,7 +86,7 @@ public class DruidMasterRuleRunnerTest
|
|||
start = start.plusHours(1);
|
||||
}
|
||||
|
||||
ruleRunner = new DruidMasterRuleRunner(master, 1, 24);
|
||||
ruleRunner = new DruidMasterRuleRunner(new ReplicationThrottler(24, 1), master);
|
||||
}
|
||||
|
||||
@After
|
||||
|
@ -177,7 +177,7 @@ public class DruidMasterRuleRunnerTest
|
|||
.withAvailableSegments(availableSegments)
|
||||
.withDatabaseRuleManager(databaseRuleManager)
|
||||
.withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster()))
|
||||
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMaxSegmentsToMove(5).build())
|
||||
.withDynamicConfigs(new MasterDynamicConfig.Builder().withMaxSegmentsToMove(5).build())
|
||||
.build();
|
||||
|
||||
DruidMasterRuntimeParams afterParams = ruleRunner.run(params);
|
||||
|
@ -522,7 +522,7 @@ public class DruidMasterRuleRunnerTest
|
|||
|
||||
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
|
||||
.withDruidCluster(druidCluster)
|
||||
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build())
|
||||
.withDynamicConfigs(new MasterDynamicConfig.Builder().withMillisToWaitBeforeDeleting(0L).build())
|
||||
.withAvailableSegments(availableSegments)
|
||||
.withDatabaseRuleManager(databaseRuleManager)
|
||||
.withSegmentReplicantLookup(segmentReplicantLookup)
|
||||
|
@ -596,7 +596,7 @@ public class DruidMasterRuleRunnerTest
|
|||
|
||||
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
|
||||
.withDruidCluster(druidCluster)
|
||||
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build())
|
||||
.withDynamicConfigs(new MasterDynamicConfig.Builder().withMillisToWaitBeforeDeleting(0L).build())
|
||||
.withAvailableSegments(availableSegments)
|
||||
.withDatabaseRuleManager(databaseRuleManager)
|
||||
.withSegmentReplicantLookup(segmentReplicantLookup)
|
||||
|
@ -677,7 +677,7 @@ public class DruidMasterRuleRunnerTest
|
|||
|
||||
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
|
||||
.withDruidCluster(druidCluster)
|
||||
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build())
|
||||
.withDynamicConfigs(new MasterDynamicConfig.Builder().withMillisToWaitBeforeDeleting(0L).build())
|
||||
.withAvailableSegments(availableSegments)
|
||||
.withDatabaseRuleManager(databaseRuleManager)
|
||||
.withSegmentReplicantLookup(segmentReplicantLookup)
|
||||
|
@ -754,7 +754,7 @@ public class DruidMasterRuleRunnerTest
|
|||
|
||||
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
|
||||
.withDruidCluster(druidCluster)
|
||||
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build())
|
||||
.withDynamicConfigs(new MasterDynamicConfig.Builder().withMillisToWaitBeforeDeleting(0L).build())
|
||||
.withAvailableSegments(availableSegments)
|
||||
.withDatabaseRuleManager(databaseRuleManager)
|
||||
.withSegmentReplicantLookup(segmentReplicantLookup)
|
||||
|
@ -843,7 +843,7 @@ public class DruidMasterRuleRunnerTest
|
|||
|
||||
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
|
||||
.withDruidCluster(druidCluster)
|
||||
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build())
|
||||
.withDynamicConfigs(new MasterDynamicConfig.Builder().withMillisToWaitBeforeDeleting(0L).build())
|
||||
.withAvailableSegments(availableSegments)
|
||||
.withDatabaseRuleManager(databaseRuleManager)
|
||||
.withSegmentReplicantLookup(segmentReplicantLookup)
|
||||
|
@ -1029,7 +1029,7 @@ public class DruidMasterRuleRunnerTest
|
|||
|
||||
DruidMasterRuntimeParams params = new DruidMasterRuntimeParams.Builder()
|
||||
.withDruidCluster(druidCluster)
|
||||
.withMasterSegmentSettings(new MasterSegmentSettings.Builder().withMillisToWaitBeforeDeleting(0L).build())
|
||||
.withDynamicConfigs(new MasterDynamicConfig.Builder().withMillisToWaitBeforeDeleting(0L).build())
|
||||
.withAvailableSegments(longerAvailableSegments)
|
||||
.withDatabaseRuleManager(databaseRuleManager)
|
||||
.withSegmentReplicantLookup(segmentReplicantLookup)
|
||||
|
|
|
@ -459,13 +459,13 @@ public class DruidMasterSegmentMergerTest
|
|||
final DruidMasterSegmentMerger merger = new DruidMasterSegmentMerger(indexingServiceClient, whitelistRef);
|
||||
final DruidMasterRuntimeParams params = DruidMasterRuntimeParams.newBuilder()
|
||||
.withAvailableSegments(ImmutableSet.copyOf(segments))
|
||||
.withMasterSegmentSettings(
|
||||
new MasterSegmentSettings.Builder().withMergeBytesLimit(
|
||||
.withDynamicConfigs(
|
||||
new MasterDynamicConfig.Builder().withMergeBytesLimit(
|
||||
mergeBytesLimit
|
||||
).withMergeSegmentsLimit(
|
||||
mergeSegmentsLimit
|
||||
)
|
||||
.build()
|
||||
.build()
|
||||
)
|
||||
.build();
|
||||
merger.run(params);
|
||||
|
|
|
@ -93,18 +93,6 @@ public class DruidMasterTest
|
|||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getReplicantLifetime()
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getReplicantThrottleLimit()
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
},
|
||||
new ZkPathsConfig(){
|
||||
|
||||
|
|
|
@ -44,6 +44,7 @@ import io.druid.guice.ManageLifecycle;
|
|||
import io.druid.guice.annotations.Self;
|
||||
import io.druid.server.http.BackwardsCompatiableInfoResource;
|
||||
import io.druid.server.http.InfoResource;
|
||||
import io.druid.server.http.MasterDynamicConfigsResource;
|
||||
import io.druid.server.http.MasterRedirectInfo;
|
||||
import io.druid.server.http.MasterResource;
|
||||
import io.druid.server.http.RedirectFilter;
|
||||
|
@ -111,6 +112,7 @@ public class CliCoordinator extends ServerRunnable
|
|||
Jerseys.addResource(binder, BackwardsCompatiableInfoResource.class);
|
||||
Jerseys.addResource(binder, InfoResource.class);
|
||||
Jerseys.addResource(binder, MasterResource.class);
|
||||
Jerseys.addResource(binder, MasterDynamicConfigsResource.class);
|
||||
|
||||
LifecycleModule.register(binder, Server.class);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue