Merge pull request #735 from metamx/misc-cleanup

Misc cleanup
This commit is contained in:
fjy 2014-09-15 14:18:41 -06:00
commit 4b4de83f21
54 changed files with 109 additions and 75 deletions

View File

@ -75,7 +75,7 @@ public class OrderedMergeIterator<T> implements Iterator<T>
new Predicate<Iterator<T>>() new Predicate<Iterator<T>>()
{ {
@Override @Override
public boolean apply(@Nullable Iterator<T> input) public boolean apply(Iterator<T> input)
{ {
return input.hasNext(); return input.hasNext();
} }
@ -85,7 +85,7 @@ public class OrderedMergeIterator<T> implements Iterator<T>
new Function<Iterator<T>, PeekingIterator<T>>() new Function<Iterator<T>, PeekingIterator<T>>()
{ {
@Override @Override
public PeekingIterator<T> apply(@Nullable Iterator<T> input) public PeekingIterator<T> apply(Iterator<T> input)
{ {
return Iterators.peekingIterator(input); return Iterators.peekingIterator(input);
} }

View File

@ -83,7 +83,7 @@ public class OrderedMergeSequence<T> implements Sequence<T>
new Function<Yielder<T>, T>() new Function<Yielder<T>, T>()
{ {
@Override @Override
public T apply(@Nullable Yielder<T> input) public T apply(Yielder<T> input)
{ {
return input.get(); return input.get();
} }

View File

@ -91,7 +91,7 @@ public class JodaUtils
intervals, new Predicate<Interval>() intervals, new Predicate<Interval>()
{ {
@Override @Override
public boolean apply(@Nullable Interval input) public boolean apply(Interval input)
{ {
return input.overlaps(i); return input.overlaps(i);
} }

View File

@ -20,6 +20,7 @@
package io.druid.examples.web; package io.druid.examples.web;
import com.google.api.client.repackaged.com.google.common.base.Throwables; import com.google.api.client.repackaged.com.google.common.base.Throwables;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.io.InputSupplier; import com.google.common.io.InputSupplier;
import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.EmittingLogger;
@ -55,6 +56,6 @@ public class WebJsonSupplier implements InputSupplier<BufferedReader>
{ {
URLConnection connection = url.openConnection(); URLConnection connection = url.openConnection();
connection.setDoInput(true); connection.setDoInput(true);
return new BufferedReader(new InputStreamReader(url.openStream())); return new BufferedReader(new InputStreamReader(url.openStream(), Charsets.UTF_8));
} }
} }

View File

@ -26,7 +26,7 @@ import java.nio.ByteBuffer;
public class Bucket public class Bucket
{ {
public static int PREAMBLE_BYTES = 16; public static final int PREAMBLE_BYTES = 16;
/** ID for this bucket, unique for this indexer run. Used for grouping and partitioning. */ /** ID for this bucket, unique for this indexer run. Used for grouping and partitioning. */
private final int shardNum; private final int shardNum;

View File

@ -170,7 +170,6 @@ public class HadoopDruidIndexerConfig
private volatile HadoopIngestionSpec schema; private volatile HadoopIngestionSpec schema;
private volatile PathSpec pathSpec; private volatile PathSpec pathSpec;
private volatile ColumnConfig columnConfig;
private volatile Map<DateTime,ShardSpecLookup> shardSpecLookups = Maps.newHashMap(); private volatile Map<DateTime,ShardSpecLookup> shardSpecLookups = Maps.newHashMap();
private volatile Map<ShardSpec, HadoopyShardSpec> hadoopShardSpecLookup = Maps.newHashMap(); private volatile Map<ShardSpec, HadoopyShardSpec> hadoopShardSpecLookup = Maps.newHashMap();
@ -179,7 +178,6 @@ public class HadoopDruidIndexerConfig
final @JsonProperty("schema") HadoopIngestionSpec schema final @JsonProperty("schema") HadoopIngestionSpec schema
) )
{ {
this.columnConfig = columnConfig;
this.schema = schema; this.schema = schema;
this.pathSpec = jsonMapper.convertValue(schema.getIOConfig().getPathSpec(), PathSpec.class); this.pathSpec = jsonMapper.convertValue(schema.getIOConfig().getPathSpec(), PathSpec.class);
for (Map.Entry<DateTime, List<HadoopyShardSpec>> entry : schema.getTuningConfig().getShardSpecs().entrySet()) { for (Map.Entry<DateTime, List<HadoopyShardSpec>> entry : schema.getTuningConfig().getShardSpecs().entrySet()) {
@ -213,11 +211,6 @@ public class HadoopDruidIndexerConfig
return schema; return schema;
} }
public ColumnConfig getColumnConfig()
{
return columnConfig;
}
public String getDataSource() public String getDataSource()
{ {
return schema.getDataSchema().getDataSource(); return schema.getDataSchema().getDataSource();

View File

@ -28,7 +28,7 @@ import java.util.List;
public class TaskConfig public class TaskConfig
{ {
public static List<String> DEFAULT_DEFAULT_HADOOP_COORDINATES = ImmutableList.of( public static final List<String> DEFAULT_DEFAULT_HADOOP_COORDINATES = ImmutableList.of(
"org.apache.hadoop:hadoop-client:2.3.0" "org.apache.hadoop:hadoop-client:2.3.0"
); );

View File

@ -97,7 +97,7 @@ public class AppendTask extends MergeTaskBase
new Predicate<Rowboat>() new Predicate<Rowboat>()
{ {
@Override @Override
public boolean apply(@Nullable Rowboat input) public boolean apply(Rowboat input)
{ {
return holder.getInterval().contains(input.getTimestamp()); return holder.getInterval().contains(input.getTimestamp());
} }
@ -115,7 +115,7 @@ public class AppendTask extends MergeTaskBase
return "append"; return "append";
} }
private class SegmentToMergeHolder private static class SegmentToMergeHolder
{ {
private final DataSegment segment; private final DataSegment segment;
private final Interval interval; private final Interval interval;

View File

@ -133,7 +133,7 @@ public abstract class MergeTaskBase extends AbstractFixedIntervalTask
new Function<DataSegment, String>() new Function<DataSegment, String>()
{ {
@Override @Override
public String apply(@Nullable DataSegment input) public String apply(DataSegment input)
{ {
return input.getIdentifier(); return input.getIdentifier();
} }

View File

@ -236,7 +236,7 @@ public class IngestSegmentFirehoseFactory implements FirehoseFactory<InputRowPar
{ {
@Nullable @Nullable
@Override @Override
public Sequence<InputRow> apply(@Nullable StorageAdapter adapter) public Sequence<InputRow> apply(StorageAdapter adapter)
{ {
return Sequences.concat( return Sequences.concat(
Sequences.map( Sequences.map(
@ -248,7 +248,7 @@ public class IngestSegmentFirehoseFactory implements FirehoseFactory<InputRowPar
{ {
@Nullable @Nullable
@Override @Override
public Sequence<InputRow> apply(@Nullable final Cursor cursor) public Sequence<InputRow> apply(final Cursor cursor)
{ {
final TimestampColumnSelector timestampColumnSelector = cursor.makeTimestampColumnSelector(); final TimestampColumnSelector timestampColumnSelector = cursor.makeTimestampColumnSelector();

View File

@ -799,7 +799,7 @@ public class RemoteTaskRunner implements TaskRunner, TaskLogStreamer
ZkWorker zkWorker, ZkWorker zkWorker2 ZkWorker zkWorker, ZkWorker zkWorker2
) )
{ {
int retVal = -Ints.compare(zkWorker.getCurrCapacityUsed(), zkWorker2.getCurrCapacityUsed()); int retVal = Ints.compare(zkWorker2.getCurrCapacityUsed(), zkWorker.getCurrCapacityUsed());
if (retVal == 0) { if (retVal == 0) {
retVal = zkWorker.getWorker().getHost().compareTo(zkWorker2.getWorker().getHost()); retVal = zkWorker.getWorker().getHost().compareTo(zkWorker2.getWorker().getHost());
} }

View File

@ -21,6 +21,7 @@ package io.druid.granularity;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import org.joda.time.Chronology; import org.joda.time.Chronology;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
@ -325,7 +326,7 @@ public class PeriodGranularity extends BaseQueryGranularity
@Override @Override
public byte[] cacheKey() public byte[] cacheKey()
{ {
return (period.toString() + ":" + chronology.getZone().toString()).getBytes(); return (period.toString() + ":" + chronology.getZone().toString()).getBytes(Charsets.UTF_8);
} }
@Override @Override

View File

@ -33,6 +33,7 @@ import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.Reader;
import java.util.Properties; import java.util.Properties;
/** /**
@ -68,8 +69,8 @@ public class PropertiesModule implements Module
if (stream != null) { if (stream != null) {
log.info("Loading properties from %s", propertiesFile); log.info("Loading properties from %s", propertiesFile);
try { try(Reader reader = new InputStreamReader(stream, Charsets.UTF_8)) {
fileProps.load(new InputStreamReader(stream, Charsets.UTF_8)); fileProps.load(reader);
} }
catch (IOException e) { catch (IOException e) {
throw Throwables.propagate(e); throw Throwables.propagate(e);

View File

@ -36,7 +36,7 @@ import java.util.Map;
*/ */
public abstract class BaseQuery<T> implements Query<T> public abstract class BaseQuery<T> implements Query<T>
{ {
public static String QUERYID = "queryId"; public static final String QUERYID = "queryId";
private final DataSource dataSource; private final DataSource dataSource;
private final Map<String, Object> context; private final Map<String, Object> context;
private final QuerySegmentSpec querySegmentSpec; private final QuerySegmentSpec querySegmentSpec;

View File

@ -61,7 +61,7 @@ public class IntervalChunkingQueryRunner<T> implements QueryRunner<T>
new Function<Interval, Iterable<Interval>>() new Function<Interval, Iterable<Interval>>()
{ {
@Override @Override
public Iterable<Interval> apply(@Nullable Interval input) public Iterable<Interval> apply(Interval input)
{ {
return splitInterval(input); return splitInterval(input);
} }

View File

@ -249,7 +249,7 @@ public class PrioritizedExecutorService extends AbstractExecutorService implemen
@Override @Override
public int compareTo(PrioritizedListenableFutureTask otherTask) public int compareTo(PrioritizedListenableFutureTask otherTask)
{ {
return -Ints.compare(getPriority(), otherTask.getPriority()); return Ints.compare(otherTask.getPriority(), getPriority());
} }
} }
} }

View File

@ -49,7 +49,7 @@ public class Queries
new Function<AggregatorFactory, String>() new Function<AggregatorFactory, String>()
{ {
@Override @Override
public String apply(@Nullable AggregatorFactory input) public String apply(AggregatorFactory input)
{ {
return input.getName(); return input.getName();
} }

View File

@ -71,7 +71,7 @@ public class QueryRunnerHelper
new Function<Cursor, Result<T>>() new Function<Cursor, Result<T>>()
{ {
@Override @Override
public Result<T> apply(@Nullable Cursor input) public Result<T> apply(Cursor input)
{ {
log.debug("Running over cursor[%s]", adapter.getInterval(), input.getTime()); log.debug("Running over cursor[%s]", adapter.getInterval(), input.getTime());
return mapFn.apply(input); return mapFn.apply(input);

View File

@ -21,6 +21,7 @@ package io.druid.query.aggregation;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.primitives.Doubles; import com.google.common.primitives.Doubles;
import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ColumnSelectorFactory;
@ -129,7 +130,7 @@ public class DoubleSumAggregatorFactory implements AggregatorFactory
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] fieldNameBytes = fieldName.getBytes(); byte[] fieldNameBytes = fieldName.getBytes(Charsets.UTF_8);
return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array();
} }

View File

@ -154,7 +154,7 @@ public class HistogramAggregatorFactory implements AggregatorFactory
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] fieldNameBytes = fieldName.getBytes(); byte[] fieldNameBytes = fieldName.getBytes(Charsets.UTF_8);
return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array();
} }

View File

@ -21,6 +21,7 @@ package io.druid.query.aggregation;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.primitives.Longs; import com.google.common.primitives.Longs;
import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ColumnSelectorFactory;
@ -125,7 +126,7 @@ public class LongSumAggregatorFactory implements AggregatorFactory
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] fieldNameBytes = fieldName.getBytes(); byte[] fieldNameBytes = fieldName.getBytes(Charsets.UTF_8);
return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array();
} }

View File

@ -21,6 +21,7 @@ package io.druid.query.aggregation;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.primitives.Doubles; import com.google.common.primitives.Doubles;
import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ColumnSelectorFactory;
@ -126,7 +127,7 @@ public class MaxAggregatorFactory implements AggregatorFactory
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] fieldNameBytes = fieldName.getBytes(); byte[] fieldNameBytes = fieldName.getBytes(Charsets.UTF_8);
return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array();
} }

View File

@ -21,6 +21,7 @@ package io.druid.query.aggregation;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.primitives.Doubles; import com.google.common.primitives.Doubles;
import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ColumnSelectorFactory;
@ -126,7 +127,7 @@ public class MinAggregatorFactory implements AggregatorFactory
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] fieldNameBytes = fieldName.getBytes(); byte[] fieldNameBytes = fieldName.getBytes(Charsets.UTF_8);
return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array();
} }

View File

@ -21,6 +21,7 @@ package io.druid.query.dimension;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import io.druid.query.extraction.DimExtractionFn; import io.druid.query.extraction.DimExtractionFn;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@ -68,7 +69,7 @@ public class DefaultDimensionSpec implements DimensionSpec
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] dimensionBytes = dimension.getBytes(); byte[] dimensionBytes = dimension.getBytes(Charsets.UTF_8);
return ByteBuffer.allocate(1 + dimensionBytes.length) return ByteBuffer.allocate(1 + dimensionBytes.length)
.put(CACHE_TYPE_ID) .put(CACHE_TYPE_ID)

View File

@ -21,6 +21,7 @@ package io.druid.query.dimension;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import io.druid.query.extraction.DimExtractionFn; import io.druid.query.extraction.DimExtractionFn;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@ -73,7 +74,7 @@ public class ExtractionDimensionSpec implements DimensionSpec
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] dimensionBytes = dimension.getBytes(); byte[] dimensionBytes = dimension.getBytes(Charsets.UTF_8);
byte[] dimExtractionFnBytes = dimExtractionFn.getCacheKey(); byte[] dimExtractionFnBytes = dimExtractionFn.getCacheKey();
return ByteBuffer.allocate(1 + dimensionBytes.length + dimExtractionFnBytes.length) return ByteBuffer.allocate(1 + dimensionBytes.length + dimExtractionFnBytes.length)

View File

@ -21,6 +21,7 @@ package io.druid.query.extraction;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.google.common.base.Function; import com.google.common.base.Function;
import org.mozilla.javascript.Context; import org.mozilla.javascript.Context;
import org.mozilla.javascript.ContextFactory; import org.mozilla.javascript.ContextFactory;
@ -80,7 +81,7 @@ public class JavascriptDimExtractionFn implements DimExtractionFn
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] bytes = function.getBytes(); byte[] bytes = function.getBytes(Charsets.UTF_8);
return ByteBuffer.allocate(1 + bytes.length) return ByteBuffer.allocate(1 + bytes.length)
.put(CACHE_TYPE_ID) .put(CACHE_TYPE_ID)
.put(bytes) .put(bytes)

View File

@ -21,6 +21,7 @@ package io.druid.query.extraction;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.regex.Matcher; import java.util.regex.Matcher;
@ -47,7 +48,7 @@ public class PartialDimExtractionFn implements DimExtractionFn
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] exprBytes = expr.getBytes(); byte[] exprBytes = expr.getBytes(Charsets.UTF_8);
return ByteBuffer.allocate(1 + exprBytes.length) return ByteBuffer.allocate(1 + exprBytes.length)
.put(CACHE_TYPE_ID) .put(CACHE_TYPE_ID)
.put(exprBytes) .put(exprBytes)

View File

@ -21,6 +21,7 @@ package io.druid.query.extraction;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.regex.Matcher; import java.util.regex.Matcher;
@ -47,7 +48,7 @@ public class RegexDimExtractionFn implements DimExtractionFn
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] exprBytes = expr.getBytes(); byte[] exprBytes = expr.getBytes(Charsets.UTF_8);
return ByteBuffer.allocate(1 + exprBytes.length) return ByteBuffer.allocate(1 + exprBytes.length)
.put(CACHE_TYPE_ID) .put(CACHE_TYPE_ID)
.put(exprBytes) .put(exprBytes)

View File

@ -21,6 +21,7 @@ package io.druid.query.extraction;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.ibm.icu.text.SimpleDateFormat; import com.ibm.icu.text.SimpleDateFormat;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@ -55,7 +56,7 @@ public class TimeDimExtractionFn implements DimExtractionFn
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] timeFormatBytes = timeFormat.getBytes(); byte[] timeFormatBytes = timeFormat.getBytes(Charsets.UTF_8);
return ByteBuffer.allocate(1 + timeFormatBytes.length) return ByteBuffer.allocate(1 + timeFormatBytes.length)
.put(CACHE_TYPE_ID) .put(CACHE_TYPE_ID)
.put(timeFormatBytes) .put(timeFormatBytes)

View File

@ -21,6 +21,7 @@ package io.druid.query.filter;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import io.druid.query.extraction.DimExtractionFn; import io.druid.query.extraction.DimExtractionFn;
@ -71,8 +72,8 @@ public class ExtractionDimFilter implements DimFilter
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] dimensionBytes = dimension.getBytes(); byte[] dimensionBytes = dimension.getBytes(Charsets.UTF_8);
byte[] valueBytes = value.getBytes(); byte[] valueBytes = value.getBytes(Charsets.UTF_8);
return ByteBuffer.allocate(1 + dimensionBytes.length + valueBytes.length) return ByteBuffer.allocate(1 + dimensionBytes.length + valueBytes.length)
.put(DimFilterCacheHelper.EXTRACTION_CACHE_ID) .put(DimFilterCacheHelper.EXTRACTION_CACHE_ID)

View File

@ -21,6 +21,7 @@ package io.druid.query.filter;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@ -47,8 +48,8 @@ public class SelectorDimFilter implements DimFilter
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] dimensionBytes = dimension.getBytes(); byte[] dimensionBytes = dimension.getBytes(Charsets.UTF_8);
byte[] valueBytes = (value == null) ? new byte[]{} : value.getBytes(); byte[] valueBytes = (value == null) ? new byte[]{} : value.getBytes(Charsets.UTF_8);
return ByteBuffer.allocate(1 + dimensionBytes.length + valueBytes.length) return ByteBuffer.allocate(1 + dimensionBytes.length + valueBytes.length)
.put(DimFilterCacheHelper.SELECTOR_CACHE_ID) .put(DimFilterCacheHelper.SELECTOR_CACHE_ID)

View File

@ -20,6 +20,7 @@ package io.druid.query.filter;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.metamx.collections.spatial.search.Bound; import com.metamx.collections.spatial.search.Bound;
@ -48,7 +49,7 @@ public class SpatialDimFilter implements DimFilter
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] dimBytes = dimension.getBytes(); byte[] dimBytes = dimension.getBytes(Charsets.UTF_8);
byte[] boundBytes = bound.getCacheKey(); byte[] boundBytes = bound.getCacheKey();
return ByteBuffer.allocate(1 + dimBytes.length + boundBytes.length) return ByteBuffer.allocate(1 + dimBytes.length + boundBytes.length)

View File

@ -236,7 +236,7 @@ public class GroupByQueryEngine
} }
} }
private class PositionMaintainer private static class PositionMaintainer
{ {
private final int[] increments; private final int[] increments;
private final int increment; private final int increment;
@ -284,7 +284,7 @@ public class GroupByQueryEngine
} }
} }
private class RowIterator implements CloseableIterator<Row> private static class RowIterator implements CloseableIterator<Row>
{ {
private final GroupByQuery query; private final GroupByQuery query;
private final Cursor cursor; private final Cursor cursor;

View File

@ -21,6 +21,7 @@ package io.druid.query.groupby.having;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.google.common.primitives.Bytes; import com.google.common.primitives.Bytes;
import io.druid.data.input.Row; import io.druid.data.input.Row;
@ -71,7 +72,7 @@ public class EqualToHavingSpec implements HavingSpec
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
final byte[] aggBytes = aggregationName.getBytes(); final byte[] aggBytes = aggregationName.getBytes(Charsets.UTF_8);
final byte[] valBytes = Bytes.toArray(Arrays.asList(value)); final byte[] valBytes = Bytes.toArray(Arrays.asList(value));
return ByteBuffer.allocate(1 + aggBytes.length + valBytes.length) return ByteBuffer.allocate(1 + aggBytes.length + valBytes.length)
.put(CACHE_KEY) .put(CACHE_KEY)

View File

@ -21,6 +21,7 @@ package io.druid.query.groupby.having;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.google.common.primitives.Bytes; import com.google.common.primitives.Bytes;
import io.druid.data.input.Row; import io.druid.data.input.Row;
@ -71,7 +72,7 @@ public class GreaterThanHavingSpec implements HavingSpec
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
final byte[] aggBytes = aggregationName.getBytes(); final byte[] aggBytes = aggregationName.getBytes(Charsets.UTF_8);
final byte[] valBytes = Bytes.toArray(Arrays.asList(value)); final byte[] valBytes = Bytes.toArray(Arrays.asList(value));
return ByteBuffer.allocate(1 + aggBytes.length + valBytes.length) return ByteBuffer.allocate(1 + aggBytes.length + valBytes.length)
.put(CACHE_KEY) .put(CACHE_KEY)

View File

@ -20,6 +20,7 @@
package io.druid.query.groupby.having; package io.druid.query.groupby.having;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.google.common.primitives.Bytes; import com.google.common.primitives.Bytes;
import io.druid.data.input.Row; import io.druid.data.input.Row;
@ -69,7 +70,7 @@ public class LessThanHavingSpec implements HavingSpec
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
final byte[] aggBytes = aggregationName.getBytes(); final byte[] aggBytes = aggregationName.getBytes(Charsets.UTF_8);
final byte[] valBytes = Bytes.toArray(Arrays.asList(value)); final byte[] valBytes = Bytes.toArray(Arrays.asList(value));
return ByteBuffer.allocate(1 + aggBytes.length + valBytes.length) return ByteBuffer.allocate(1 + aggBytes.length + valBytes.length)
.put(CACHE_KEY) .put(CACHE_KEY)

View File

@ -21,6 +21,7 @@ package io.druid.query.groupby.orderby;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
@ -182,8 +183,8 @@ public class OrderByColumnSpec
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
final byte[] dimensionBytes = dimension.getBytes(); final byte[] dimensionBytes = dimension.getBytes(Charsets.UTF_8);
final byte[] directionBytes = direction.name().getBytes(); final byte[] directionBytes = direction.name().getBytes(Charsets.UTF_8);
return ByteBuffer.allocate(dimensionBytes.length + directionBytes.length) return ByteBuffer.allocate(dimensionBytes.length + directionBytes.length)
.put(dimensionBytes) .put(dimensionBytes)

View File

@ -20,6 +20,7 @@
package io.druid.query.search; package io.druid.query.search;
import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Charsets;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.base.Functions; import com.google.common.base.Functions;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
@ -172,7 +173,7 @@ public class SearchQueryQueryToolChest extends QueryToolChest<Result<SearchResul
int dimensionsBytesSize = 0; int dimensionsBytesSize = 0;
int index = 0; int index = 0;
for (String dimension : dimensions) { for (String dimension : dimensions) {
dimensionsBytes[index] = dimension.getBytes(); dimensionsBytes[index] = dimension.getBytes(Charsets.UTF_8);
dimensionsBytesSize += dimensionsBytes[index].length; dimensionsBytesSize += dimensionsBytes[index].length;
++index; ++index;
} }

View File

@ -21,6 +21,7 @@ package io.druid.query.search.search;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
@ -77,7 +78,7 @@ public class FragmentSearchQuerySpec implements SearchQuerySpec
int valuesBytesSize = 0; int valuesBytesSize = 0;
int index = 0; int index = 0;
for (String value : values) { for (String value : values) {
valuesBytes[index] = value.getBytes(); valuesBytes[index] = value.getBytes(Charsets.UTF_8);
valuesBytesSize += valuesBytes[index].length; valuesBytesSize += valuesBytes[index].length;
++index; ++index;
} }

View File

@ -21,6 +21,7 @@ package io.druid.query.search.search;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@ -58,7 +59,7 @@ public class InsensitiveContainsSearchQuerySpec implements SearchQuerySpec
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] valueBytes = value.getBytes(); byte[] valueBytes = value.getBytes(Charsets.UTF_8);
return ByteBuffer.allocate(1 + valueBytes.length) return ByteBuffer.allocate(1 + valueBytes.length)
.put(CACHE_TYPE_ID) .put(CACHE_TYPE_ID)

View File

@ -21,6 +21,7 @@ package io.druid.query.select;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.google.common.primitives.Ints; import com.google.common.primitives.Ints;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@ -65,7 +66,7 @@ public class PagingSpec
int pagingKeysSize = 0; int pagingKeysSize = 0;
int pagingValuesSize = 0; int pagingValuesSize = 0;
for (Map.Entry<String, Integer> entry : pagingIdentifiers.entrySet()) { for (Map.Entry<String, Integer> entry : pagingIdentifiers.entrySet()) {
pagingKeys[index] = entry.getKey().getBytes(); pagingKeys[index] = entry.getKey().getBytes(Charsets.UTF_8);
pagingValues[index] = ByteBuffer.allocate(Ints.BYTES).putInt(entry.getValue()).array(); pagingValues[index] = ByteBuffer.allocate(Ints.BYTES).putInt(entry.getValue()).array();
pagingKeysSize += pagingKeys[index].length; pagingKeysSize += pagingKeys[index].length;
pagingValuesSize += Ints.BYTES; pagingValuesSize += Ints.BYTES;

View File

@ -21,6 +21,7 @@ package io.druid.query.select;
import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Charsets;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.base.Functions; import com.google.common.base.Functions;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
@ -173,7 +174,7 @@ public class SelectQueryQueryToolChest extends QueryToolChest<Result<SelectResul
int dimensionsBytesSize = 0; int dimensionsBytesSize = 0;
int index = 0; int index = 0;
for (String dimension : dimensions) { for (String dimension : dimensions) {
dimensionsBytes[index] = dimension.getBytes(); dimensionsBytes[index] = dimension.getBytes(Charsets.UTF_8);
dimensionsBytesSize += dimensionsBytes[index].length; dimensionsBytesSize += dimensionsBytes[index].length;
++index; ++index;
} }
@ -187,7 +188,7 @@ public class SelectQueryQueryToolChest extends QueryToolChest<Result<SelectResul
int metricBytesSize = 0; int metricBytesSize = 0;
index = 0; index = 0;
for (String metric : metrics) { for (String metric : metrics) {
metricBytes[index] = metric.getBytes(); metricBytes[index] = metric.getBytes(Charsets.UTF_8);
metricBytesSize += metricBytes[index].length; metricBytesSize += metricBytes[index].length;
++index; ++index;
} }

View File

@ -21,6 +21,7 @@ package io.druid.firehose.rabbitmq;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Charsets;
import com.metamx.common.logger.Logger; import com.metamx.common.logger.Logger;
import com.rabbitmq.client.AMQP; import com.rabbitmq.client.AMQP;
import com.rabbitmq.client.Channel; import com.rabbitmq.client.Channel;
@ -227,7 +228,7 @@ public class RabbitMQFirehoseFactory implements FirehoseFactory<StringInputRowPa
return null; return null;
} }
return stringParser.parse(new String(delivery.getBody())); return stringParser.parse(new String(delivery.getBody(), Charsets.UTF_8));
} }
@Override @Override

View File

@ -20,6 +20,7 @@
package io.druid.client; package io.druid.client;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Charsets;
import com.google.common.base.Throwables; import com.google.common.base.Throwables;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import io.druid.client.cache.Cache; import io.druid.client.cache.Cache;
@ -39,7 +40,7 @@ public class CacheUtil
) )
{ {
final Interval segmentQueryInterval = descriptor.getInterval(); final Interval segmentQueryInterval = descriptor.getInterval();
final byte[] versionBytes = descriptor.getVersion().getBytes(); final byte[] versionBytes = descriptor.getVersion().getBytes(Charsets.UTF_8);
return new Cache.NamedKey( return new Cache.NamedKey(
segmentIdentifier, ByteBuffer segmentIdentifier, ByteBuffer

View File

@ -27,8 +27,8 @@ import java.util.List;
public class CacheConfig public class CacheConfig
{ {
public static String USE_CACHE = "useCache"; public static final String USE_CACHE = "useCache";
public static String POPULATE_CACHE = "populateCache"; public static final String POPULATE_CACHE = "populateCache";
@JsonProperty @JsonProperty
private boolean useCache = true; private boolean useCache = true;

View File

@ -34,7 +34,7 @@ public class HighestPriorityTierSelectorStrategy extends AbstractTierSelectorStr
@Override @Override
public int compare(Integer o1, Integer o2) public int compare(Integer o1, Integer o2)
{ {
return -Ints.compare(o1, o2); return Ints.compare(o2, o1);
} }
}; };

View File

@ -20,6 +20,7 @@
package io.druid.initialization; package io.druid.initialization;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Charsets;
import com.google.common.base.Throwables; import com.google.common.base.Throwables;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
@ -74,6 +75,7 @@ import org.eclipse.aether.util.filter.DependencyFilterUtils;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.io.PrintStream; import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URI; import java.net.URI;
import java.net.URISyntaxException; import java.net.URISyntaxException;
@ -294,13 +296,17 @@ public class Initialization
} }
} }
) , false, Charsets.UTF_8.name())
); );
return new DefaultTeslaAether( return new DefaultTeslaAether(
config.getLocalRepository(), config.getLocalRepository(),
remoteRepositories.toArray(new Repository[remoteRepositories.size()]) remoteRepositories.toArray(new Repository[remoteRepositories.size()])
); );
} }
catch(UnsupportedEncodingException e) {
// should never happen
throw new IllegalStateException(e);
}
finally { finally {
System.setOut(oldOut); System.setOut(oldOut);
} }

View File

@ -384,7 +384,7 @@ public class LoadQueuePeon
} }
} }
private class SegmentHolder private static class SegmentHolder
{ {
private final DataSegment segment; private final DataSegment segment;
private final DataSegmentChangeRequest changeRequest; private final DataSegmentChangeRequest changeRequest;

View File

@ -74,7 +74,7 @@ public class DBResource
new Function<DruidDataSource, String>() new Function<DruidDataSource, String>()
{ {
@Override @Override
public String apply(@Nullable DruidDataSource dataSource) public String apply(DruidDataSource dataSource)
{ {
return dataSource.getName(); return dataSource.getName();
} }
@ -126,7 +126,7 @@ public class DBResource
new Function<DataSegment, Object>() new Function<DataSegment, Object>()
{ {
@Override @Override
public Object apply(@Nullable DataSegment segment) public Object apply(DataSegment segment)
{ {
return segment.getIdentifier(); return segment.getIdentifier();
} }

View File

@ -261,7 +261,7 @@ public class DatasourcesResource
final DruidDataSource dataSource = getDataSource(dataSourceName.toLowerCase()); final DruidDataSource dataSource = getDataSource(dataSourceName.toLowerCase());
final Interval theInterval = new Interval(interval.replace("_", "/")); final Interval theInterval = new Interval(interval.replace("_", "/"));
if (dataSource == null || interval == null) { if (dataSource == null) {
return Response.noContent().build(); return Response.noContent().build();
} }

View File

@ -158,7 +158,7 @@ public class ServersResource
new Function<DataSegment, String>() new Function<DataSegment, String>()
{ {
@Override @Override
public String apply(@Nullable DataSegment segment) public String apply(DataSegment segment)
{ {
return segment.getIdentifier(); return segment.getIdentifier();
} }

View File

@ -125,7 +125,7 @@ public class EmittingRequestLogger implements RequestLogger
} }
} }
private class RequestLogEventBuilder implements ServiceEventBuilder private static class RequestLogEventBuilder implements ServiceEventBuilder
{ {
private final String feed; private final String feed;
private final RequestLogLine requestLogLine; private final RequestLogLine requestLogLine;

View File

@ -20,6 +20,7 @@
package io.druid.server.log; package io.druid.server.log;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Charsets;
import com.google.common.base.Throwables; import com.google.common.base.Throwables;
import com.metamx.common.concurrent.ScheduledExecutors; import com.metamx.common.concurrent.ScheduledExecutors;
import com.metamx.common.guava.CloseQuietly; import com.metamx.common.guava.CloseQuietly;
@ -31,8 +32,9 @@ import org.joda.time.Duration;
import org.joda.time.MutableDateTime; import org.joda.time.MutableDateTime;
import java.io.File; import java.io.File;
import java.io.FileWriter; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledExecutorService;
@ -47,7 +49,7 @@ public class FileRequestLogger implements RequestLogger
private final Object lock = new Object(); private final Object lock = new Object();
private volatile DateTime currentDay; private volatile DateTime currentDay;
private volatile FileWriter fileWriter; private volatile OutputStreamWriter fileWriter;
public FileRequestLogger(ObjectMapper objectMapper, ScheduledExecutorService exec, File baseDir) public FileRequestLogger(ObjectMapper objectMapper, ScheduledExecutorService exec, File baseDir)
{ {
@ -66,7 +68,10 @@ public class FileRequestLogger implements RequestLogger
mutableDateTime.setMillisOfDay(0); mutableDateTime.setMillisOfDay(0);
currentDay = mutableDateTime.toDateTime(); currentDay = mutableDateTime.toDateTime();
fileWriter = new FileWriter(new File(baseDir, currentDay.toString("yyyy-MM-dd'.log'")), true); fileWriter = new OutputStreamWriter(
new FileOutputStream(new File(baseDir, currentDay.toString("yyyy-MM-dd'.log'")), true),
Charsets.UTF_8
);
long nextDay = currentDay.plusDays(1).getMillis(); long nextDay = currentDay.plusDays(1).getMillis();
Duration delay = new Duration(nextDay - new DateTime().getMillis()); Duration delay = new Duration(nextDay - new DateTime().getMillis());
@ -84,7 +89,10 @@ public class FileRequestLogger implements RequestLogger
try { try {
synchronized (lock) { synchronized (lock) {
CloseQuietly.close(fileWriter); CloseQuietly.close(fileWriter);
fileWriter = new FileWriter(new File(baseDir, currentDay.toString()), true); fileWriter = new OutputStreamWriter(
new FileOutputStream(new File(baseDir, currentDay.toString()), true),
Charsets.UTF_8
);
} }
} }
catch (Exception e) { catch (Exception e) {

View File

@ -145,7 +145,7 @@ public class SQLRunner
final URLConnection urlConnection = url.openConnection(); final URLConnection urlConnection = url.openConnection();
urlConnection.addRequestProperty("content-type", "application/json"); urlConnection.addRequestProperty("content-type", "application/json");
urlConnection.getOutputStream().write(queryStr.getBytes(Charsets.UTF_8)); urlConnection.getOutputStream().write(queryStr.getBytes(Charsets.UTF_8));
BufferedReader stdInput = new BufferedReader(new InputStreamReader(urlConnection.getInputStream())); BufferedReader stdInput = new BufferedReader(new InputStreamReader(urlConnection.getInputStream(), Charsets.UTF_8));
Object res = objectMapper.readValue(stdInput, typeRef); Object res = objectMapper.readValue(stdInput, typeRef);