fix broken UT

This commit is contained in:
Fangjin Yang 2013-04-01 17:16:19 -07:00
parent 258f18471e
commit 0730976c80
4 changed files with 12 additions and 7 deletions

View File

@ -104,7 +104,7 @@ public class ServerManager implements QuerySegmentWalker
}
}
public boolean isSegmentLoaded(final DataSegment segment) throws SegmentLoadingException
public boolean isSegmentCached(final DataSegment segment) throws SegmentLoadingException
{
return segmentLoader.isSegmentLoaded(segment);
}

View File

@ -216,7 +216,7 @@ public class ZkCoordinator implements DataSegmentChangeHandler
log.info("Loading segment cache file [%s]", file);
try {
DataSegment segment = jsonMapper.readValue(file, DataSegment.class);
if (serverManager.isSegmentLoaded(segment)) {
if (serverManager.isSegmentCached(segment)) {
addSegment(segment);
}
}

View File

@ -30,7 +30,7 @@ import com.metamx.druid.client.DruidServerConfig;
import com.metamx.druid.client.ZKPhoneBook;
import com.metamx.druid.index.v1.IndexIO;
import com.metamx.druid.jackson.DefaultObjectMapper;
import com.metamx.druid.loading.NoopSegmentLoader;
import com.metamx.druid.loading.CacheTestSegmentLoader;
import com.metamx.druid.metrics.NoopServiceEmitter;
import com.metamx.druid.query.NoopQueryRunnerFactoryConglomerate;
import com.metamx.druid.shard.NoneShardSpec;
@ -74,7 +74,7 @@ public class ZkCoordinatorTest
}
serverManager = new ServerManager(
new NoopSegmentLoader(),
new CacheTestSegmentLoader(),
new NoopQueryRunnerFactoryConglomerate(),
new NoopServiceEmitter(),
MoreExecutors.sameThreadExecutor()
@ -194,7 +194,7 @@ public class ZkCoordinatorTest
dataSource,
interval,
version,
ImmutableMap.<String, Object>of("version", version, "interval", interval),
ImmutableMap.<String, Object>of("version", version, "interval", interval, "cacheDir", cacheDir),
Arrays.asList("dim1", "dim2", "dim3"),
Arrays.asList("metric1", "metric2"),
new NoneShardSpec(),

View File

@ -19,20 +19,25 @@
package com.metamx.druid.loading;
import com.metamx.common.MapUtils;
import com.metamx.druid.StorageAdapter;
import com.metamx.druid.client.DataSegment;
import com.metamx.druid.index.QueryableIndex;
import com.metamx.druid.index.Segment;
import org.joda.time.Interval;
import java.io.File;
import java.util.Map;
/**
*/
public class NoopSegmentLoader implements SegmentLoader
public class CacheTestSegmentLoader implements SegmentLoader
{
@Override
public boolean isSegmentLoaded(DataSegment segment) throws SegmentLoadingException
{
return false;
Map<String, Object> loadSpec = segment.getLoadSpec();
return new File(MapUtils.getString(loadSpec, "cacheDir")).exists();
}
@Override