mirror of https://github.com/apache/druid.git
Merge branch 'master' of github.com:metamx/druid
This commit is contained in:
commit
84233238b1
|
@ -116,7 +116,13 @@ public class ServerManager implements QuerySegmentWalker
|
|||
return segmentLoader.isSegmentLoaded(segment);
|
||||
}
|
||||
|
||||
public void loadSegment(final DataSegment segment) throws SegmentLoadingException
|
||||
/**
|
||||
* Load a single segment.
|
||||
* @param segment segment to load
|
||||
* @return true if the segment was newly loaded, false if it was already loaded
|
||||
* @throws SegmentLoadingException if the segment cannot be loaded
|
||||
*/
|
||||
public boolean loadSegment(final DataSegment segment) throws SegmentLoadingException
|
||||
{
|
||||
final Segment adapter;
|
||||
try {
|
||||
|
@ -150,8 +156,8 @@ public class ServerManager implements QuerySegmentWalker
|
|||
segment.getVersion()
|
||||
);
|
||||
if ((entry != null) && (entry.getChunk(segment.getShardSpec().getPartitionNum()) != null)) {
|
||||
log.info("Told to load a adapter for a segment[%s] that already exists", segment.getIdentifier());
|
||||
throw new SegmentLoadingException("Segment already exists[%s]", segment.getIdentifier());
|
||||
log.warn("Told to load a adapter for a segment[%s] that already exists", segment.getIdentifier());
|
||||
return false;
|
||||
}
|
||||
|
||||
loadedIntervals.add(
|
||||
|
@ -165,6 +171,7 @@ public class ServerManager implements QuerySegmentWalker
|
|||
synchronized (dataSourceCounts) {
|
||||
dataSourceCounts.add(dataSource, 1L);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -230,14 +230,16 @@ public class ZkCoordinator implements DataSegmentChangeHandler
|
|||
try {
|
||||
log.info("Loading segment %s", segment.getIdentifier());
|
||||
|
||||
final boolean loaded;
|
||||
try {
|
||||
serverManager.loadSegment(segment);
|
||||
loaded = serverManager.loadSegment(segment);
|
||||
}
|
||||
catch (Exception e) {
|
||||
removeSegment(segment);
|
||||
throw new SegmentLoadingException(e, "Exception loading segment[%s]", segment.getIdentifier());
|
||||
}
|
||||
|
||||
if (loaded) {
|
||||
File segmentInfoCacheFile = new File(config.getInfoDir(), segment.getIdentifier());
|
||||
if (!segmentInfoCacheFile.exists()) {
|
||||
try {
|
||||
|
@ -257,6 +259,7 @@ public class ZkCoordinator implements DataSegmentChangeHandler
|
|||
catch (IOException e) {
|
||||
throw new SegmentLoadingException(e, "Failed to announce segment[%s]", segment.getIdentifier());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
catch (SegmentLoadingException e) {
|
||||
|
@ -275,8 +278,9 @@ public class ZkCoordinator implements DataSegmentChangeHandler
|
|||
for (DataSegment segment : segments) {
|
||||
log.info("Loading segment %s", segment.getIdentifier());
|
||||
|
||||
final boolean loaded;
|
||||
try {
|
||||
serverManager.loadSegment(segment);
|
||||
loaded = serverManager.loadSegment(segment);
|
||||
}
|
||||
catch (Exception e) {
|
||||
log.error(e, "Exception loading segment[%s]", segment.getIdentifier());
|
||||
|
@ -285,6 +289,7 @@ public class ZkCoordinator implements DataSegmentChangeHandler
|
|||
continue;
|
||||
}
|
||||
|
||||
if (loaded) {
|
||||
File segmentInfoCacheFile = new File(config.getInfoDir(), segment.getIdentifier());
|
||||
if (!segmentInfoCacheFile.exists()) {
|
||||
try {
|
||||
|
@ -300,6 +305,7 @@ public class ZkCoordinator implements DataSegmentChangeHandler
|
|||
|
||||
validSegments.add(segment);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
announcer.announceSegments(validSegments);
|
||||
|
|
|
@ -46,10 +46,13 @@ import io.druid.client.ServerInventoryView;
|
|||
import io.druid.client.indexing.IndexingServiceClient;
|
||||
import io.druid.common.config.JacksonConfigManager;
|
||||
import io.druid.concurrent.Execs;
|
||||
import io.druid.curator.discovery.ServiceAnnouncer;
|
||||
import io.druid.db.DatabaseRuleManager;
|
||||
import io.druid.db.DatabaseSegmentManager;
|
||||
import io.druid.guice.ManageLifecycle;
|
||||
import io.druid.guice.annotations.Self;
|
||||
import io.druid.segment.IndexIO;
|
||||
import io.druid.server.DruidNode;
|
||||
import io.druid.server.initialization.ZkPathsConfig;
|
||||
import io.druid.timeline.DataSegment;
|
||||
import org.apache.curator.framework.CuratorFramework;
|
||||
|
@ -99,6 +102,8 @@ public class DruidCoordinator
|
|||
private final LoadQueueTaskMaster taskMaster;
|
||||
private final Map<String, LoadQueuePeon> loadManagementPeons;
|
||||
private final AtomicReference<LeaderLatch> leaderLatch;
|
||||
private final ServiceAnnouncer serviceAnnouncer;
|
||||
private final DruidNode self;
|
||||
|
||||
@Inject
|
||||
public DruidCoordinator(
|
||||
|
@ -112,7 +117,9 @@ public class DruidCoordinator
|
|||
ServiceEmitter emitter,
|
||||
ScheduledExecutorFactory scheduledExecutorFactory,
|
||||
IndexingServiceClient indexingServiceClient,
|
||||
LoadQueueTaskMaster taskMaster
|
||||
LoadQueueTaskMaster taskMaster,
|
||||
ServiceAnnouncer serviceAnnouncer,
|
||||
@Self DruidNode self
|
||||
)
|
||||
{
|
||||
this(
|
||||
|
@ -127,6 +134,8 @@ public class DruidCoordinator
|
|||
scheduledExecutorFactory,
|
||||
indexingServiceClient,
|
||||
taskMaster,
|
||||
serviceAnnouncer,
|
||||
self,
|
||||
Maps.<String, LoadQueuePeon>newConcurrentMap()
|
||||
);
|
||||
}
|
||||
|
@ -143,6 +152,8 @@ public class DruidCoordinator
|
|||
ScheduledExecutorFactory scheduledExecutorFactory,
|
||||
IndexingServiceClient indexingServiceClient,
|
||||
LoadQueueTaskMaster taskMaster,
|
||||
ServiceAnnouncer serviceAnnouncer,
|
||||
DruidNode self,
|
||||
ConcurrentMap<String, LoadQueuePeon> loadQueuePeonMap
|
||||
)
|
||||
{
|
||||
|
@ -157,6 +168,8 @@ public class DruidCoordinator
|
|||
this.emitter = emitter;
|
||||
this.indexingServiceClient = indexingServiceClient;
|
||||
this.taskMaster = taskMaster;
|
||||
this.serviceAnnouncer = serviceAnnouncer;
|
||||
this.self = self;
|
||||
|
||||
this.exec = scheduledExecutorFactory.create(1, "Coordinator-Exec--%d");
|
||||
|
||||
|
@ -474,6 +487,7 @@ public class DruidCoordinator
|
|||
databaseSegmentManager.start();
|
||||
databaseRuleManager.start();
|
||||
serverInventoryView.start();
|
||||
serviceAnnouncer.announce(self);
|
||||
|
||||
final List<Pair<? extends CoordinatorRunnable, Duration>> coordinatorRunnables = Lists.newArrayList();
|
||||
dynamicConfigs = configManager.watch(
|
||||
|
@ -554,8 +568,10 @@ public class DruidCoordinator
|
|||
}
|
||||
loadManagementPeons.clear();
|
||||
|
||||
databaseSegmentManager.stop();
|
||||
serviceAnnouncer.unannounce(self);
|
||||
serverInventoryView.stop();
|
||||
databaseRuleManager.stop();
|
||||
databaseSegmentManager.stop();
|
||||
leader = false;
|
||||
}
|
||||
catch (Exception e) {
|
||||
|
|
|
@ -23,8 +23,10 @@ import com.google.common.collect.MapMaker;
|
|||
import com.metamx.common.concurrent.ScheduledExecutorFactory;
|
||||
import io.druid.client.DruidServer;
|
||||
import io.druid.client.SingleServerInventoryView;
|
||||
import io.druid.curator.discovery.NoopServiceAnnouncer;
|
||||
import io.druid.curator.inventory.InventoryManagerConfig;
|
||||
import io.druid.db.DatabaseSegmentManager;
|
||||
import io.druid.server.DruidNode;
|
||||
import io.druid.server.initialization.ZkPathsConfig;
|
||||
import io.druid.server.metrics.NoopServiceEmitter;
|
||||
import io.druid.timeline.DataSegment;
|
||||
|
@ -111,6 +113,8 @@ public class DruidCoordinatorTest
|
|||
scheduledExecutorFactory,
|
||||
null,
|
||||
taskMaster,
|
||||
new NoopServiceAnnouncer(),
|
||||
new DruidNode("hey", "what", 1234),
|
||||
loadManagementPeons
|
||||
);
|
||||
}
|
||||
|
|
|
@ -28,7 +28,6 @@ import com.metamx.common.concurrent.ScheduledExecutorFactory;
|
|||
import com.metamx.common.logger.Logger;
|
||||
import io.airlift.command.Command;
|
||||
import io.druid.client.indexing.IndexingServiceClient;
|
||||
import io.druid.curator.discovery.DiscoveryModule;
|
||||
import io.druid.db.DatabaseRuleManager;
|
||||
import io.druid.db.DatabaseRuleManagerConfig;
|
||||
import io.druid.db.DatabaseRuleManagerProvider;
|
||||
|
@ -41,7 +40,6 @@ import io.druid.guice.JsonConfigProvider;
|
|||
import io.druid.guice.LazySingleton;
|
||||
import io.druid.guice.LifecycleModule;
|
||||
import io.druid.guice.ManageLifecycle;
|
||||
import io.druid.guice.annotations.Self;
|
||||
import io.druid.server.coordinator.DruidCoordinator;
|
||||
import io.druid.server.coordinator.DruidCoordinatorConfig;
|
||||
import io.druid.server.coordinator.LoadQueueTaskMaster;
|
||||
|
@ -103,7 +101,6 @@ public class CliCoordinator extends ServerRunnable
|
|||
binder.bind(DruidCoordinator.class);
|
||||
|
||||
LifecycleModule.register(binder, DruidCoordinator.class);
|
||||
DiscoveryModule.register(binder, Self.class);
|
||||
|
||||
binder.bind(JettyServerInitializer.class).toInstance(new CoordinatorJettyServerInitializer());
|
||||
Jerseys.addResource(binder, BackwardsCompatibleInfoResource.class);
|
||||
|
|
Loading…
Reference in New Issue