mirror of https://github.com/apache/druid.git
Merge pull request #329 from metamx/segment-load-idempotent
ZkCoordinator: Make addSegment, addSegments idempotent
This commit is contained in:
commit
09f73e2bf2
|
@ -116,7 +116,13 @@ public class ServerManager implements QuerySegmentWalker
|
|||
return segmentLoader.isSegmentLoaded(segment);
|
||||
}
|
||||
|
||||
public void loadSegment(final DataSegment segment) throws SegmentLoadingException
|
||||
/**
|
||||
* Load a single segment.
|
||||
* @param segment segment to load
|
||||
* @return true if the segment was newly loaded, false if it was already loaded
|
||||
* @throws SegmentLoadingException if the segment cannot be loaded
|
||||
*/
|
||||
public boolean loadSegment(final DataSegment segment) throws SegmentLoadingException
|
||||
{
|
||||
final Segment adapter;
|
||||
try {
|
||||
|
@ -150,8 +156,8 @@ public class ServerManager implements QuerySegmentWalker
|
|||
segment.getVersion()
|
||||
);
|
||||
if ((entry != null) && (entry.getChunk(segment.getShardSpec().getPartitionNum()) != null)) {
|
||||
log.info("Told to load a adapter for a segment[%s] that already exists", segment.getIdentifier());
|
||||
throw new SegmentLoadingException("Segment already exists[%s]", segment.getIdentifier());
|
||||
log.warn("Told to load a adapter for a segment[%s] that already exists", segment.getIdentifier());
|
||||
return false;
|
||||
}
|
||||
|
||||
loadedIntervals.add(
|
||||
|
@ -165,6 +171,7 @@ public class ServerManager implements QuerySegmentWalker
|
|||
synchronized (dataSourceCounts) {
|
||||
dataSourceCounts.add(dataSource, 1L);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -230,34 +230,37 @@ public class ZkCoordinator implements DataSegmentChangeHandler
|
|||
try {
|
||||
log.info("Loading segment %s", segment.getIdentifier());
|
||||
|
||||
final boolean loaded;
|
||||
try {
|
||||
serverManager.loadSegment(segment);
|
||||
loaded = serverManager.loadSegment(segment);
|
||||
}
|
||||
catch (Exception e) {
|
||||
removeSegment(segment);
|
||||
throw new SegmentLoadingException(e, "Exception loading segment[%s]", segment.getIdentifier());
|
||||
}
|
||||
|
||||
File segmentInfoCacheFile = new File(config.getInfoDir(), segment.getIdentifier());
|
||||
if (!segmentInfoCacheFile.exists()) {
|
||||
if (loaded) {
|
||||
File segmentInfoCacheFile = new File(config.getInfoDir(), segment.getIdentifier());
|
||||
if (!segmentInfoCacheFile.exists()) {
|
||||
try {
|
||||
jsonMapper.writeValue(segmentInfoCacheFile, segment);
|
||||
}
|
||||
catch (IOException e) {
|
||||
removeSegment(segment);
|
||||
throw new SegmentLoadingException(
|
||||
e, "Failed to write to disk segment info cache file[%s]", segmentInfoCacheFile
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
jsonMapper.writeValue(segmentInfoCacheFile, segment);
|
||||
announcer.announceSegment(segment);
|
||||
}
|
||||
catch (IOException e) {
|
||||
removeSegment(segment);
|
||||
throw new SegmentLoadingException(
|
||||
e, "Failed to write to disk segment info cache file[%s]", segmentInfoCacheFile
|
||||
);
|
||||
throw new SegmentLoadingException(e, "Failed to announce segment[%s]", segment.getIdentifier());
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
announcer.announceSegment(segment);
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new SegmentLoadingException(e, "Failed to announce segment[%s]", segment.getIdentifier());
|
||||
}
|
||||
|
||||
}
|
||||
catch (SegmentLoadingException e) {
|
||||
log.makeAlert(e, "Failed to load segment for dataSource")
|
||||
|
@ -275,8 +278,9 @@ public class ZkCoordinator implements DataSegmentChangeHandler
|
|||
for (DataSegment segment : segments) {
|
||||
log.info("Loading segment %s", segment.getIdentifier());
|
||||
|
||||
final boolean loaded;
|
||||
try {
|
||||
serverManager.loadSegment(segment);
|
||||
loaded = serverManager.loadSegment(segment);
|
||||
}
|
||||
catch (Exception e) {
|
||||
log.error(e, "Exception loading segment[%s]", segment.getIdentifier());
|
||||
|
@ -285,20 +289,22 @@ public class ZkCoordinator implements DataSegmentChangeHandler
|
|||
continue;
|
||||
}
|
||||
|
||||
File segmentInfoCacheFile = new File(config.getInfoDir(), segment.getIdentifier());
|
||||
if (!segmentInfoCacheFile.exists()) {
|
||||
try {
|
||||
jsonMapper.writeValue(segmentInfoCacheFile, segment);
|
||||
if (loaded) {
|
||||
File segmentInfoCacheFile = new File(config.getInfoDir(), segment.getIdentifier());
|
||||
if (!segmentInfoCacheFile.exists()) {
|
||||
try {
|
||||
jsonMapper.writeValue(segmentInfoCacheFile, segment);
|
||||
}
|
||||
catch (IOException e) {
|
||||
log.error(e, "Failed to write to disk segment info cache file[%s]", segmentInfoCacheFile);
|
||||
removeSegment(segment);
|
||||
segmentFailures.add(segment.getIdentifier());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
catch (IOException e) {
|
||||
log.error(e, "Failed to write to disk segment info cache file[%s]", segmentInfoCacheFile);
|
||||
removeSegment(segment);
|
||||
segmentFailures.add(segment.getIdentifier());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
validSegments.add(segment);
|
||||
validSegments.add(segment);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
|
|
Loading…
Reference in New Issue