review comments

This commit is contained in:
nishantmonu51 2014-03-25 17:02:36 +05:30
parent 0ba72ffb2b
commit fe7a184096
7 changed files with 13 additions and 13 deletions

View File

@ -25,7 +25,7 @@ import javax.validation.constraints.Min;
/**
*/
public class LocalCacheProvider extends CacheConfig implements CacheProvider
public class LocalCacheProvider implements CacheProvider
{
@JsonProperty
@Min(0)
@ -42,6 +42,6 @@ public class LocalCacheProvider extends CacheConfig implements CacheProvider
@Override
public Cache get()
{
return new MapCache(new ByteCountingLRUMap(initialSize, logEvictionCount, sizeInBytes), this);
return new MapCache(new ByteCountingLRUMap(initialSize, logEvictionCount, sizeInBytes));
}
}

View File

@ -33,9 +33,9 @@ import java.util.concurrent.atomic.AtomicLong;
*/
public class MapCache implements Cache
{
public static Cache create(long sizeInBytes, CacheConfig config)
public static Cache create(long sizeInBytes)
{
return new MapCache(new ByteCountingLRUMap(sizeInBytes),config);
return new MapCache(new ByteCountingLRUMap(sizeInBytes));
}
private final Map<ByteBuffer, byte[]> baseMap;
@ -48,16 +48,12 @@ public class MapCache implements Cache
private final AtomicLong hitCount = new AtomicLong(0);
private final AtomicLong missCount = new AtomicLong(0);
private final CacheConfig config;
MapCache(
ByteCountingLRUMap byteCountingLRUMap,
CacheConfig config
ByteCountingLRUMap byteCountingLRUMap
)
{
this.byteCountingLRUMap = byteCountingLRUMap;
this.config = config;
this.baseMap = Collections.synchronizedMap(byteCountingLRUMap);
namespaceId = Maps.newHashMap();

View File

@ -24,7 +24,7 @@ import net.spy.memcached.DefaultConnectionFactory;
import javax.validation.constraints.NotNull;
public class MemcachedCacheConfig extends CacheConfig
public class MemcachedCacheConfig
{
// default to 30 day expiration for cache entries
// values greater than 30 days are interpreted by memcached as absolute POSIX timestamps instead of duration

View File

@ -196,7 +196,7 @@ public class CachingClusteredClientTest
{
timeline = new VersionedIntervalTimeline<>(Ordering.<String>natural());
serverView = EasyMock.createStrictMock(TimelineServerView.class);
cache = MapCache.create(100000, new CacheConfig());
cache = MapCache.create(100000);
client = makeClient();

View File

@ -37,7 +37,7 @@ public class MapCacheTest
public void setUp() throws Exception
{
baseMap = new ByteCountingLRUMap(1024 * 1024);
cache = new MapCache(baseMap, new CacheConfig());
cache = new MapCache(baseMap);
}
@Test

View File

@ -28,6 +28,7 @@ import io.druid.client.BrokerServerView;
import io.druid.client.CachingClusteredClient;
import io.druid.client.TimelineServerView;
import io.druid.client.cache.Cache;
import io.druid.client.cache.CacheConfig;
import io.druid.client.cache.CacheMonitor;
import io.druid.client.cache.CacheProvider;
import io.druid.client.selector.ServerSelectorStrategy;
@ -81,7 +82,7 @@ public class CliBroker extends ServerRunnable
binder.bind(Cache.class).toProvider(CacheProvider.class).in(ManageLifecycle.class);
JsonConfigProvider.bind(binder, "druid.broker.cache", CacheProvider.class);
JsonConfigProvider.bind(binder, "druid.broker.cache", CacheConfig.class);
JsonConfigProvider.bind(binder, "druid.broker.balancer", ServerSelectorStrategy.class);
binder.bind(QuerySegmentWalker.class).to(ClientQuerySegmentWalker.class).in(LazySingleton.class);

View File

@ -25,6 +25,7 @@ import com.google.inject.Module;
import com.metamx.common.logger.Logger;
import io.airlift.command.Command;
import io.druid.client.cache.Cache;
import io.druid.client.cache.CacheConfig;
import io.druid.client.cache.CacheProvider;
import io.druid.guice.Jerseys;
import io.druid.guice.JsonConfigProvider;
@ -78,6 +79,8 @@ public class CliHistorical extends ServerRunnable
LifecycleModule.register(binder, Server.class);
binder.bind(Cache.class).toProvider(CacheProvider.class).in(ManageLifecycle.class);
JsonConfigProvider.bind(binder, "druid.historical.cache", CacheProvider.class);
JsonConfigProvider.bind(binder, "druid.historical.cache", CacheConfig.class);
}
}
);