mirror of https://github.com/apache/druid.git
fix according to code review
This commit is contained in:
parent
9818d28a26
commit
fde874ea56
|
@ -288,10 +288,6 @@ public class CuratorInventoryManager<ContainerClass, InventoryClass>
|
||||||
|
|
||||||
final String inventoryKey = ZKPaths.getNodeFromPath(child.getPath());
|
final String inventoryKey = ZKPaths.getNodeFromPath(child.getPath());
|
||||||
|
|
||||||
if (inventoryKey == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (event.getType()) {
|
switch (event.getType()) {
|
||||||
case CHILD_ADDED:
|
case CHILD_ADDED:
|
||||||
case CHILD_UPDATED:
|
case CHILD_UPDATED:
|
||||||
|
|
|
@ -41,7 +41,7 @@ public class CountAggregatorFactory implements AggregatorFactory
|
||||||
@JsonProperty("name") String name
|
@JsonProperty("name") String name
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
Preconditions.checkNotNull(name, "Must have a valid, non null aggregator name");
|
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
|
||||||
|
|
||||||
this.name = name;
|
this.name = name;
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,8 +45,8 @@ public class DoubleSumAggregatorFactory implements AggregatorFactory
|
||||||
@JsonProperty("fieldName") final String fieldName
|
@JsonProperty("fieldName") final String fieldName
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
Preconditions.checkNotNull(name, "Must have a valid, nonl null aggregator name");
|
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
|
||||||
Preconditions.checkNotNull(fieldName, "Must have a valid, non null fieldName");
|
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName");
|
||||||
|
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.fieldName = fieldName;
|
this.fieldName = fieldName;
|
||||||
|
|
|
@ -51,8 +51,8 @@ public class HistogramAggregatorFactory implements AggregatorFactory
|
||||||
@JsonProperty("breaks") final List<Float> breaksList
|
@JsonProperty("breaks") final List<Float> breaksList
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
Preconditions.checkNotNull(name, "Must have a valid, nonl null aggregator name");
|
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
|
||||||
Preconditions.checkNotNull(fieldName, "Must have a valid, non null fieldName");
|
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName");
|
||||||
|
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.fieldName = fieldName;
|
this.fieldName = fieldName;
|
||||||
|
|
|
@ -63,11 +63,11 @@ public class JavaScriptAggregatorFactory implements AggregatorFactory
|
||||||
@JsonProperty("fnCombine") final String fnCombine
|
@JsonProperty("fnCombine") final String fnCombine
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
Preconditions.checkNotNull(name, "Must have a valid, non null aggregator name");
|
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
|
||||||
Preconditions.checkNotNull(fieldNames, "Must have a valid, non null fieldNames");
|
Preconditions.checkNotNull(fieldNames, "Must have a valid, non-null fieldNames");
|
||||||
Preconditions.checkNotNull(fnAggregate, "Must have a valid, non null fnAggregate");
|
Preconditions.checkNotNull(fnAggregate, "Must have a valid, non-null fnAggregate");
|
||||||
Preconditions.checkNotNull(fnReset, "Must have a valid, non null fnReset");
|
Preconditions.checkNotNull(fnReset, "Must have a valid, non-null fnReset");
|
||||||
Preconditions.checkNotNull(fnCombine, "Must have a valid, non null fnCombine");
|
Preconditions.checkNotNull(fnCombine, "Must have a valid, non-null fnCombine");
|
||||||
|
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.fieldNames = fieldNames;
|
this.fieldNames = fieldNames;
|
||||||
|
|
|
@ -45,8 +45,8 @@ public class LongSumAggregatorFactory implements AggregatorFactory
|
||||||
@JsonProperty("fieldName") final String fieldName
|
@JsonProperty("fieldName") final String fieldName
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
Preconditions.checkNotNull(name, "Must have a valid, nonl null aggregator name");
|
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
|
||||||
Preconditions.checkNotNull(fieldName, "Must have a valid, non null fieldName");
|
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName");
|
||||||
|
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.fieldName = fieldName;
|
this.fieldName = fieldName;
|
||||||
|
|
|
@ -45,8 +45,8 @@ public class MaxAggregatorFactory implements AggregatorFactory
|
||||||
@JsonProperty("fieldName") final String fieldName
|
@JsonProperty("fieldName") final String fieldName
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
Preconditions.checkNotNull(name, "Must have a valid, non null aggregator name");
|
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
|
||||||
Preconditions.checkNotNull(fieldName, "Must have a valid, non null fieldName");
|
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName");
|
||||||
|
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.fieldName = fieldName;
|
this.fieldName = fieldName;
|
||||||
|
|
|
@ -45,8 +45,8 @@ public class MinAggregatorFactory implements AggregatorFactory
|
||||||
@JsonProperty("fieldName") final String fieldName
|
@JsonProperty("fieldName") final String fieldName
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
Preconditions.checkNotNull(name, "Must have a valid, non null aggregator name");
|
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
|
||||||
Preconditions.checkNotNull(fieldName, "Must have a valid, non null fieldName");
|
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName");
|
||||||
|
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.fieldName = fieldName;
|
this.fieldName = fieldName;
|
||||||
|
|
|
@ -43,7 +43,7 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher
|
||||||
public DataSegment push(File inDir, DataSegment segment) throws IOException
|
public DataSegment push(File inDir, DataSegment segment) throws IOException
|
||||||
{
|
{
|
||||||
final String storageDir = DataSegmentPusherUtil.getStorageDir(segment);
|
final String storageDir = DataSegmentPusherUtil.getStorageDir(segment);
|
||||||
Path outFile = config.getStorageDirectory().suffix(String.format("/%s/index.zip", storageDir));
|
Path outFile = new Path(String.format("%s/%s/index.zip", config.getStorageDirectory(), storageDir));
|
||||||
FileSystem fs = outFile.getFileSystem(hadoopConfig);
|
FileSystem fs = outFile.getFileSystem(hadoopConfig);
|
||||||
|
|
||||||
fs.mkdirs(outFile.getParent());
|
fs.mkdirs(outFile.getParent());
|
||||||
|
|
|
@ -19,15 +19,12 @@
|
||||||
|
|
||||||
package com.metamx.druid.loading;
|
package com.metamx.druid.loading;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.Path;
|
|
||||||
import org.skife.config.Config;
|
import org.skife.config.Config;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
public abstract class HdfsDataSegmentPusherConfig
|
public abstract class HdfsDataSegmentPusherConfig
|
||||||
{
|
{
|
||||||
@Config("druid.pusher.hdfs.storageDirectory")
|
@Config("druid.pusher.hdfs.storageDirectory")
|
||||||
public abstract Path getStorageDirectory();
|
public abstract String getStorageDirectory();
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue