fix according to code review

This commit is contained in:
fjy 2013-07-02 16:11:12 -07:00
parent 9818d28a26
commit fde874ea56
10 changed files with 18 additions and 25 deletions

View File

@ -288,10 +288,6 @@ public class CuratorInventoryManager<ContainerClass, InventoryClass>
final String inventoryKey = ZKPaths.getNodeFromPath(child.getPath());
if (inventoryKey == null) {
return;
}
switch (event.getType()) {
case CHILD_ADDED:
case CHILD_UPDATED:

View File

@ -41,7 +41,7 @@ public class CountAggregatorFactory implements AggregatorFactory
@JsonProperty("name") String name
)
{
Preconditions.checkNotNull(name, "Must have a valid, non null aggregator name");
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
this.name = name;
}

View File

@ -45,8 +45,8 @@ public class DoubleSumAggregatorFactory implements AggregatorFactory
@JsonProperty("fieldName") final String fieldName
)
{
Preconditions.checkNotNull(name, "Must have a valid, nonl null aggregator name");
Preconditions.checkNotNull(fieldName, "Must have a valid, non null fieldName");
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName");
this.name = name;
this.fieldName = fieldName;

View File

@ -51,8 +51,8 @@ public class HistogramAggregatorFactory implements AggregatorFactory
@JsonProperty("breaks") final List<Float> breaksList
)
{
Preconditions.checkNotNull(name, "Must have a valid, nonl null aggregator name");
Preconditions.checkNotNull(fieldName, "Must have a valid, non null fieldName");
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName");
this.name = name;
this.fieldName = fieldName;

View File

@ -63,11 +63,11 @@ public class JavaScriptAggregatorFactory implements AggregatorFactory
@JsonProperty("fnCombine") final String fnCombine
)
{
Preconditions.checkNotNull(name, "Must have a valid, non null aggregator name");
Preconditions.checkNotNull(fieldNames, "Must have a valid, non null fieldNames");
Preconditions.checkNotNull(fnAggregate, "Must have a valid, non null fnAggregate");
Preconditions.checkNotNull(fnReset, "Must have a valid, non null fnReset");
Preconditions.checkNotNull(fnCombine, "Must have a valid, non null fnCombine");
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
Preconditions.checkNotNull(fieldNames, "Must have a valid, non-null fieldNames");
Preconditions.checkNotNull(fnAggregate, "Must have a valid, non-null fnAggregate");
Preconditions.checkNotNull(fnReset, "Must have a valid, non-null fnReset");
Preconditions.checkNotNull(fnCombine, "Must have a valid, non-null fnCombine");
this.name = name;
this.fieldNames = fieldNames;

View File

@ -45,8 +45,8 @@ public class LongSumAggregatorFactory implements AggregatorFactory
@JsonProperty("fieldName") final String fieldName
)
{
Preconditions.checkNotNull(name, "Must have a valid, nonl null aggregator name");
Preconditions.checkNotNull(fieldName, "Must have a valid, non null fieldName");
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName");
this.name = name;
this.fieldName = fieldName;

View File

@ -45,8 +45,8 @@ public class MaxAggregatorFactory implements AggregatorFactory
@JsonProperty("fieldName") final String fieldName
)
{
Preconditions.checkNotNull(name, "Must have a valid, non null aggregator name");
Preconditions.checkNotNull(fieldName, "Must have a valid, non null fieldName");
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName");
this.name = name;
this.fieldName = fieldName;

View File

@ -45,8 +45,8 @@ public class MinAggregatorFactory implements AggregatorFactory
@JsonProperty("fieldName") final String fieldName
)
{
Preconditions.checkNotNull(name, "Must have a valid, non null aggregator name");
Preconditions.checkNotNull(fieldName, "Must have a valid, non null fieldName");
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName");
this.name = name;
this.fieldName = fieldName;

View File

@ -43,7 +43,7 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher
public DataSegment push(File inDir, DataSegment segment) throws IOException
{
final String storageDir = DataSegmentPusherUtil.getStorageDir(segment);
Path outFile = config.getStorageDirectory().suffix(String.format("/%s/index.zip", storageDir));
Path outFile = new Path(String.format("%s/%s/index.zip", config.getStorageDirectory(), storageDir));
FileSystem fs = outFile.getFileSystem(hadoopConfig);
fs.mkdirs(outFile.getParent());

View File

@ -19,15 +19,12 @@
package com.metamx.druid.loading;
import org.apache.hadoop.fs.Path;
import org.skife.config.Config;
import java.io.File;
/**
*/
public abstract class HdfsDataSegmentPusherConfig
{
@Config("druid.pusher.hdfs.storageDirectory")
public abstract Path getStorageDirectory();
public abstract String getStorageDirectory();
}