mirror of https://github.com/apache/druid.git
Enforce brace formatting with Checkstyle (#4564)
This commit is contained in:
parent
38b03f56b4
commit
c0beb78ffd
|
@ -21,7 +21,6 @@ package io.druid.data.input;
|
|||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.Lists;
|
||||
import io.druid.java.util.common.parsers.ParseException;
|
||||
import org.joda.time.DateTime;
|
||||
|
@ -35,13 +34,11 @@ import java.util.regex.Pattern;
|
|||
*/
|
||||
public class MapBasedRow implements Row
|
||||
{
|
||||
private static final Function<Object, String> TO_STRING_INCLUDING_NULL = String::valueOf;
|
||||
private static final Pattern LONG_PAT = Pattern.compile("[-|+]?\\d+");
|
||||
|
||||
private final DateTime timestamp;
|
||||
private final Map<String, Object> event;
|
||||
|
||||
private static final Pattern LONG_PAT = Pattern.compile("[-|+]?\\d+");
|
||||
|
||||
@JsonCreator
|
||||
public MapBasedRow(
|
||||
@JsonProperty("timestamp") DateTime timestamp,
|
||||
|
@ -88,9 +85,7 @@ public class MapBasedRow implements Row
|
|||
return Collections.emptyList();
|
||||
} else if (dimValue instanceof List) {
|
||||
// guava's toString function fails on null objects, so please do not use it
|
||||
return Lists.transform(
|
||||
(List) dimValue,
|
||||
TO_STRING_INCLUDING_NULL);
|
||||
return Lists.transform((List) dimValue, String::valueOf);
|
||||
} else {
|
||||
return Collections.singletonList(String.valueOf(dimValue));
|
||||
}
|
||||
|
|
|
@ -77,7 +77,10 @@ public abstract class DimensionSchema
|
|||
SORTED_SET,
|
||||
ARRAY {
|
||||
@Override
|
||||
public boolean needSorting() { return false;}
|
||||
public boolean needSorting()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
public boolean needSorting()
|
||||
|
|
|
@ -222,12 +222,10 @@ public abstract class PrefetchableTextFilesFirehoseFactory<ObjectType>
|
|||
{
|
||||
if ((fetchFuture == null || fetchFuture.isDone())
|
||||
&& remainingBytes <= prefetchTriggerBytes) {
|
||||
fetchFuture = fetchExecutor.submit(
|
||||
() -> {
|
||||
fetch();
|
||||
return null;
|
||||
}
|
||||
);
|
||||
fetchFuture = fetchExecutor.submit(() -> {
|
||||
fetch();
|
||||
return null;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -155,7 +155,8 @@ public class TimestampSpec
|
|||
|
||||
//simple merge strategy on timestampSpec that checks if all are equal or else
|
||||
//returns null. this can be improved in future but is good enough for most use-cases.
|
||||
public static TimestampSpec mergeTimestampSpec(List<TimestampSpec> toMerge) {
|
||||
public static TimestampSpec mergeTimestampSpec(List<TimestampSpec> toMerge)
|
||||
{
|
||||
if (toMerge == null || toMerge.size() == 0) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -146,11 +146,9 @@ public class JsonConfigProvider<T> implements Provider<Supplier<T>>
|
|||
|
||||
if (bindKey.getAnnotationType() != null) {
|
||||
supplierKey = Key.get(supType, bindKey.getAnnotationType());
|
||||
}
|
||||
else if (bindKey.getAnnotation() != null) {
|
||||
} else if (bindKey.getAnnotation() != null) {
|
||||
supplierKey = Key.get(supType, bindKey.getAnnotation());
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
supplierKey = Key.get(supType);
|
||||
}
|
||||
|
||||
|
|
|
@ -112,7 +112,7 @@ public class JsonConfigurator
|
|||
List<String> messages = Lists.newArrayList();
|
||||
|
||||
for (ConstraintViolation<T> violation : violations) {
|
||||
String path = "";
|
||||
StringBuilder path = new StringBuilder();
|
||||
try {
|
||||
Class<?> beanClazz = violation.getRootBeanClass();
|
||||
final Iterator<Path.Node> iter = violation.getPropertyPath().iterator();
|
||||
|
@ -123,18 +123,17 @@ public class JsonConfigurator
|
|||
final Field theField = beanClazz.getDeclaredField(fieldName);
|
||||
|
||||
if (theField.getAnnotation(JacksonInject.class) != null) {
|
||||
path = StringUtils.format(" -- Injected field[%s] not bound!?", fieldName);
|
||||
path = new StringBuilder(StringUtils.format(" -- Injected field[%s] not bound!?", fieldName));
|
||||
break;
|
||||
}
|
||||
|
||||
JsonProperty annotation = theField.getAnnotation(JsonProperty.class);
|
||||
final boolean noAnnotationValue = annotation == null || Strings.isNullOrEmpty(annotation.value());
|
||||
final String pathPart = noAnnotationValue ? fieldName : annotation.value();
|
||||
if (path.isEmpty()) {
|
||||
path += pathPart;
|
||||
}
|
||||
else {
|
||||
path += "." + pathPart;
|
||||
if (path.length() == 0) {
|
||||
path.append(pathPart);
|
||||
} else {
|
||||
path.append(".").append(pathPart);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -143,7 +142,7 @@ public class JsonConfigurator
|
|||
throw Throwables.propagate(e);
|
||||
}
|
||||
|
||||
messages.add(StringUtils.format("%s - %s", path, violation.getMessage()));
|
||||
messages.add(StringUtils.format("%s - %s", path.toString(), violation.getMessage()));
|
||||
}
|
||||
|
||||
throw new ProvisionException(
|
||||
|
|
|
@ -146,7 +146,8 @@ public class LifecycleModule implements Module
|
|||
final Key<Set<KeyHolder>> keyHolderKey = Key.get(new TypeLiteral<Set<KeyHolder>>(){}, Names.named("lifecycle"));
|
||||
final Set<KeyHolder> eagerClasses = injector.getInstance(keyHolderKey);
|
||||
|
||||
Lifecycle lifecycle = new Lifecycle(){
|
||||
Lifecycle lifecycle = new Lifecycle()
|
||||
{
|
||||
@Override
|
||||
public void start() throws Exception
|
||||
{
|
||||
|
|
|
@ -71,8 +71,7 @@ public class LifecycleScope implements Scope
|
|||
synchronized (instances) {
|
||||
if (lifecycle == null) {
|
||||
instances.add(retVal);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
try {
|
||||
lifecycle.addMaybeStartManagedInstance(retVal, stage);
|
||||
}
|
||||
|
|
|
@ -123,13 +123,11 @@ public class PolyBind
|
|||
return MapBinder.newMapBinder(
|
||||
binder, TypeLiteral.get(String.class), interfaceType, interfaceKey.getAnnotation()
|
||||
);
|
||||
}
|
||||
else if (interfaceKey.getAnnotationType() != null) {
|
||||
} else if (interfaceKey.getAnnotationType() != null) {
|
||||
return MapBinder.newMapBinder(
|
||||
binder, TypeLiteral.get(String.class), interfaceType, interfaceKey.getAnnotationType()
|
||||
);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
return MapBinder.newMapBinder(binder, TypeLiteral.get(String.class), interfaceType);
|
||||
}
|
||||
}
|
||||
|
@ -177,11 +175,9 @@ public class PolyBind
|
|||
final Map<String, Provider<T>> implsMap;
|
||||
if (key.getAnnotation() != null) {
|
||||
implsMap = (Map<String, Provider<T>>) injector.getInstance(Key.get(mapType, key.getAnnotation()));
|
||||
}
|
||||
else if (key.getAnnotationType() != null) {
|
||||
} else if (key.getAnnotationType() != null) {
|
||||
implsMap = (Map<String, Provider<T>>) injector.getInstance(Key.get(mapType, key.getAnnotation()));
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
implsMap = (Map<String, Provider<T>>) injector.getInstance(Key.get(mapType));
|
||||
}
|
||||
|
||||
|
|
|
@ -40,10 +40,14 @@ public interface DataSegmentPusher
|
|||
DataSegment push(File file, DataSegment segment) throws IOException;
|
||||
//use map instead of LoadSpec class to avoid dependency pollution.
|
||||
Map<String, Object> makeLoadSpec(URI finalIndexZipFilePath);
|
||||
default String getStorageDir(DataSegment dataSegment) {
|
||||
|
||||
default String getStorageDir(DataSegment dataSegment)
|
||||
{
|
||||
return getDefaultStorageDir(dataSegment);
|
||||
}
|
||||
default String makeIndexPathName(DataSegment dataSegment, String indexName) {
|
||||
|
||||
default String makeIndexPathName(DataSegment dataSegment, String indexName)
|
||||
{
|
||||
return StringUtils.format("./%s/%s", getStorageDir(dataSegment), indexName);
|
||||
}
|
||||
|
||||
|
@ -60,7 +64,8 @@ public interface DataSegmentPusher
|
|||
// If above format is ever changed, make sure to change it appropriately in other places
|
||||
// e.g. HDFSDataSegmentKiller uses this information to clean the version, interval and dataSource directories
|
||||
// on segment deletion if segment being deleted was the only segment
|
||||
static String getDefaultStorageDir(DataSegment segment) {
|
||||
static String getDefaultStorageDir(DataSegment segment)
|
||||
{
|
||||
return JOINER.join(
|
||||
segment.getDataSource(),
|
||||
StringUtils.format("%s_%s", segment.getInterval().getStart(), segment.getInterval().getEnd()),
|
||||
|
|
|
@ -37,12 +37,17 @@ public interface LoadSpec
|
|||
public LoadSpecResult loadSegment(File destDir) throws SegmentLoadingException;
|
||||
|
||||
// Hold interesting data about the results of the segment load
|
||||
public static class LoadSpecResult{
|
||||
public static class LoadSpecResult
|
||||
{
|
||||
private final long size;
|
||||
public LoadSpecResult(long size){
|
||||
|
||||
public LoadSpecResult(long size)
|
||||
{
|
||||
this.size = size;
|
||||
}
|
||||
public long getSize(){
|
||||
|
||||
public long getSize()
|
||||
{
|
||||
return this.size;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -95,7 +95,8 @@ public class DataSegmentUtils
|
|||
version,
|
||||
trail
|
||||
);
|
||||
} catch (IllegalArgumentException e) {
|
||||
}
|
||||
catch (IllegalArgumentException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,12 +35,18 @@ public class NoneShardSpec implements ShardSpec
|
|||
private final static NoneShardSpec INSTANCE = new NoneShardSpec();
|
||||
|
||||
@JsonCreator
|
||||
public static NoneShardSpec instance() { return INSTANCE; }
|
||||
public static NoneShardSpec instance()
|
||||
{
|
||||
return INSTANCE;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated use {@link #instance()} instead
|
||||
*/
|
||||
@Deprecated
|
||||
// Use NoneShardSpec.instance() instead
|
||||
public NoneShardSpec(){
|
||||
|
||||
public NoneShardSpec()
|
||||
{
|
||||
// empty
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -23,10 +23,8 @@ package io.druid.utils;
|
|||
*/
|
||||
public class Runnables
|
||||
{
|
||||
public static Runnable getNoopRunnable(){
|
||||
return new Runnable(){
|
||||
@Override
|
||||
public void run(){}
|
||||
};
|
||||
public static Runnable getNoopRunnable()
|
||||
{
|
||||
return () -> {};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,19 +19,18 @@
|
|||
|
||||
package io.druid.data.input.impl;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import io.druid.TestObjectMapper;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
|
||||
public class JSONParseSpecTest {
|
||||
public class JSONParseSpecTest
|
||||
{
|
||||
private final ObjectMapper jsonMapper = new TestObjectMapper();
|
||||
|
||||
@Test
|
||||
|
|
|
@ -44,5 +44,8 @@ public class AWSCredentialsConfig
|
|||
return secretKey;
|
||||
}
|
||||
|
||||
public String getFileSessionCredentials() { return fileSessionCredentials; }
|
||||
public String getFileSessionCredentials()
|
||||
{
|
||||
return fileSessionCredentials;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,8 @@ import com.amazonaws.auth.profile.ProfileCredentialsProvider;
|
|||
|
||||
public class AWSCredentialsUtils
|
||||
{
|
||||
public static AWSCredentialsProviderChain defaultAWSCredentialsProviderChain(final AWSCredentialsConfig config) {
|
||||
public static AWSCredentialsProviderChain defaultAWSCredentialsProviderChain(final AWSCredentialsConfig config)
|
||||
{
|
||||
return new AWSCredentialsProviderChain(
|
||||
new ConfigDrivenAwsCredentialsConfigProvider(config),
|
||||
new LazyFileSessionCredentialsProvider(config),
|
||||
|
|
|
@ -28,7 +28,8 @@ public class ConfigDrivenAwsCredentialsConfigProvider implements AWSCredentialsP
|
|||
{
|
||||
private AWSCredentialsConfig config;
|
||||
|
||||
public ConfigDrivenAwsCredentialsConfigProvider(AWSCredentialsConfig config) {
|
||||
public ConfigDrivenAwsCredentialsConfigProvider(AWSCredentialsConfig config)
|
||||
{
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
|
@ -36,14 +37,17 @@ public class ConfigDrivenAwsCredentialsConfigProvider implements AWSCredentialsP
|
|||
public AWSCredentials getCredentials()
|
||||
{
|
||||
if (!Strings.isNullOrEmpty(config.getAccessKey()) && !Strings.isNullOrEmpty(config.getSecretKey())) {
|
||||
return new AWSCredentials() {
|
||||
return new AWSCredentials()
|
||||
{
|
||||
@Override
|
||||
public String getAWSAccessKeyId() {
|
||||
public String getAWSAccessKeyId()
|
||||
{
|
||||
return config.getAccessKey();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAWSSecretKey() {
|
||||
public String getAWSSecretKey()
|
||||
{
|
||||
return config.getSecretKey();
|
||||
}
|
||||
};
|
||||
|
|
|
@ -32,48 +32,52 @@ import java.util.Properties;
|
|||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class FileSessionCredentialsProvider implements AWSCredentialsProvider {
|
||||
public class FileSessionCredentialsProvider implements AWSCredentialsProvider
|
||||
{
|
||||
private final String sessionCredentials;
|
||||
private volatile String sessionToken;
|
||||
private volatile String accessKey;
|
||||
private volatile String secretKey;
|
||||
|
||||
private final ScheduledExecutorService scheduler = Execs.scheduledSingleThreaded("FileSessionCredentialsProviderRefresh-%d");
|
||||
private final ScheduledExecutorService scheduler =
|
||||
Execs.scheduledSingleThreaded("FileSessionCredentialsProviderRefresh-%d");
|
||||
|
||||
public FileSessionCredentialsProvider(String sessionCredentials) {
|
||||
public FileSessionCredentialsProvider(String sessionCredentials)
|
||||
{
|
||||
this.sessionCredentials = sessionCredentials;
|
||||
refresh();
|
||||
|
||||
scheduler.scheduleAtFixedRate(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
refresh();
|
||||
}
|
||||
}, 1, 1, TimeUnit.HOURS); // refresh every hour
|
||||
scheduler.scheduleAtFixedRate(this::refresh, 1, 1, TimeUnit.HOURS); // refresh every hour
|
||||
}
|
||||
|
||||
@Override
|
||||
public AWSCredentials getCredentials() {
|
||||
return new AWSSessionCredentials() {
|
||||
public AWSCredentials getCredentials()
|
||||
{
|
||||
return new AWSSessionCredentials()
|
||||
{
|
||||
@Override
|
||||
public String getSessionToken() {
|
||||
public String getSessionToken()
|
||||
{
|
||||
return sessionToken;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAWSAccessKeyId() {
|
||||
public String getAWSAccessKeyId()
|
||||
{
|
||||
return accessKey;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAWSSecretKey() {
|
||||
public String getAWSSecretKey()
|
||||
{
|
||||
return secretKey;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void refresh() {
|
||||
public void refresh()
|
||||
{
|
||||
try {
|
||||
Properties props = new Properties();
|
||||
InputStream is = new FileInputStream(new File(sessionCredentials));
|
||||
|
@ -83,7 +87,8 @@ public class FileSessionCredentialsProvider implements AWSCredentialsProvider {
|
|||
sessionToken = props.getProperty("sessionToken");
|
||||
accessKey = props.getProperty("accessKey");
|
||||
secretKey = props.getProperty("secretKey");
|
||||
} catch (IOException e) {
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new RuntimeException("cannot refresh AWS credentials", e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,11 +27,13 @@ public class LazyFileSessionCredentialsProvider implements AWSCredentialsProvide
|
|||
private AWSCredentialsConfig config;
|
||||
private FileSessionCredentialsProvider provider;
|
||||
|
||||
public LazyFileSessionCredentialsProvider(AWSCredentialsConfig config) {
|
||||
public LazyFileSessionCredentialsProvider(AWSCredentialsConfig config)
|
||||
{
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
private FileSessionCredentialsProvider getUnderlyingProvider() {
|
||||
private FileSessionCredentialsProvider getUnderlyingProvider()
|
||||
{
|
||||
if (provider == null) {
|
||||
synchronized (config) {
|
||||
if (provider == null) {
|
||||
|
@ -49,7 +51,8 @@ public class LazyFileSessionCredentialsProvider implements AWSCredentialsProvide
|
|||
}
|
||||
|
||||
@Override
|
||||
public void refresh() {
|
||||
public void refresh()
|
||||
{
|
||||
getUnderlyingProvider().refresh();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -77,7 +77,8 @@ public class BitmapIterationBenchmark
|
|||
@Param({"1000000"})
|
||||
public int size;
|
||||
|
||||
private BitmapFactory makeFactory() {
|
||||
private BitmapFactory makeFactory()
|
||||
{
|
||||
switch (bitmapAlgo) {
|
||||
case "bitset":
|
||||
return new BitSetBitmapFactory();
|
||||
|
|
|
@ -111,7 +111,8 @@ public class FlattenJSONBenchmark
|
|||
return parsed;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws RunnerException {
|
||||
public static void main(String[] args) throws RunnerException
|
||||
{
|
||||
Options opt = new OptionsBuilder()
|
||||
.include(FlattenJSONBenchmark.class.getSimpleName())
|
||||
.warmupIterations(1)
|
||||
|
|
|
@ -240,7 +240,7 @@ public class BenchmarkColumnSchema
|
|||
schema.startInt = startInt;
|
||||
schema.endInt = endInt;
|
||||
return schema;
|
||||
};
|
||||
}
|
||||
|
||||
public static BenchmarkColumnSchema makeEnumeratedSequential(
|
||||
String name,
|
||||
|
@ -261,7 +261,7 @@ public class BenchmarkColumnSchema
|
|||
);
|
||||
schema.enumeratedValues = enumeratedValues;
|
||||
return schema;
|
||||
};
|
||||
}
|
||||
|
||||
public static BenchmarkColumnSchema makeDiscreteUniform(
|
||||
String name,
|
||||
|
@ -284,7 +284,7 @@ public class BenchmarkColumnSchema
|
|||
schema.startInt = startInt;
|
||||
schema.endInt = endInt;
|
||||
return schema;
|
||||
};
|
||||
}
|
||||
|
||||
public static BenchmarkColumnSchema makeEnumeratedDiscreteUniform(
|
||||
String name,
|
||||
|
@ -305,7 +305,7 @@ public class BenchmarkColumnSchema
|
|||
);
|
||||
schema.enumeratedValues = enumeratedValues;
|
||||
return schema;
|
||||
};
|
||||
}
|
||||
|
||||
public static BenchmarkColumnSchema makeContinuousUniform(
|
||||
String name,
|
||||
|
@ -328,8 +328,7 @@ public class BenchmarkColumnSchema
|
|||
schema.startDouble = startDouble;
|
||||
schema.endDouble = endDouble;
|
||||
return schema;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
public static BenchmarkColumnSchema makeNormal(
|
||||
String name,
|
||||
|
@ -353,7 +352,7 @@ public class BenchmarkColumnSchema
|
|||
schema.mean = mean;
|
||||
schema.standardDeviation = standardDeviation;
|
||||
return schema;
|
||||
};
|
||||
}
|
||||
|
||||
public static BenchmarkColumnSchema makeZipf(
|
||||
String name,
|
||||
|
@ -378,7 +377,7 @@ public class BenchmarkColumnSchema
|
|||
schema.endInt = endInt;
|
||||
schema.zipfExponent = zipfExponent;
|
||||
return schema;
|
||||
};
|
||||
}
|
||||
|
||||
public static BenchmarkColumnSchema makeEnumeratedZipf(
|
||||
String name,
|
||||
|
@ -401,8 +400,7 @@ public class BenchmarkColumnSchema
|
|||
schema.enumeratedValues = enumeratedValues;
|
||||
schema.zipfExponent = zipfExponent;
|
||||
return schema;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
public static BenchmarkColumnSchema makeEnumerated(
|
||||
String name,
|
||||
|
@ -425,5 +423,5 @@ public class BenchmarkColumnSchema
|
|||
schema.enumeratedValues = enumeratedValues;
|
||||
schema.enumeratedProbabilities = enumeratedProbabilities;
|
||||
return schema;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -55,8 +55,8 @@ public class QueryBenchmarkUtil
|
|||
{
|
||||
return new IntervalChunkingQueryRunnerDecorator(null, null, null) {
|
||||
@Override
|
||||
public <T> QueryRunner<T> decorate(final QueryRunner<T> delegate,
|
||||
QueryToolChest<T, ? extends Query<T>> toolChest) {
|
||||
public <T> QueryRunner<T> decorate(final QueryRunner<T> delegate, QueryToolChest<T, ? extends Query<T>> toolChest)
|
||||
{
|
||||
return new QueryRunner<T>() {
|
||||
@Override
|
||||
public Sequence<T> run(QueryPlus<T> queryPlus, Map<String, Object> responseContext)
|
||||
|
|
|
@ -37,7 +37,8 @@ import java.util.Map;
|
|||
public class BenchmarkDataGeneratorTest
|
||||
{
|
||||
@Test
|
||||
public void testSequential() throws Exception {
|
||||
public void testSequential() throws Exception
|
||||
{
|
||||
List<BenchmarkColumnSchema> schemas = new ArrayList<>();
|
||||
RowValueTracker tracker = new RowValueTracker();
|
||||
|
||||
|
@ -86,7 +87,8 @@ public class BenchmarkDataGeneratorTest
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testDiscreteUniform() throws Exception {
|
||||
public void testDiscreteUniform() throws Exception
|
||||
{
|
||||
List<BenchmarkColumnSchema> schemas = new ArrayList<>();
|
||||
RowValueTracker tracker = new RowValueTracker();
|
||||
|
||||
|
@ -150,7 +152,8 @@ public class BenchmarkDataGeneratorTest
|
|||
|
||||
|
||||
@Test
|
||||
public void testRoundedNormal() throws Exception {
|
||||
public void testRoundedNormal() throws Exception
|
||||
{
|
||||
List<BenchmarkColumnSchema> schemas = new ArrayList<>();
|
||||
RowValueTracker tracker = new RowValueTracker();
|
||||
|
||||
|
@ -192,7 +195,8 @@ public class BenchmarkDataGeneratorTest
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testZipf() throws Exception {
|
||||
public void testZipf() throws Exception
|
||||
{
|
||||
List<BenchmarkColumnSchema> schemas = new ArrayList<>();
|
||||
RowValueTracker tracker = new RowValueTracker();
|
||||
|
||||
|
@ -246,7 +250,8 @@ public class BenchmarkDataGeneratorTest
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testEnumerated() throws Exception {
|
||||
public void testEnumerated() throws Exception
|
||||
{
|
||||
List<BenchmarkColumnSchema> schemas = new ArrayList<>();
|
||||
RowValueTracker tracker = new RowValueTracker();
|
||||
|
||||
|
@ -274,7 +279,8 @@ public class BenchmarkDataGeneratorTest
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testNormal() throws Exception {
|
||||
public void testNormal() throws Exception
|
||||
{
|
||||
List<BenchmarkColumnSchema> schemas = new ArrayList<>();
|
||||
RowValueTracker tracker = new RowValueTracker();
|
||||
|
||||
|
@ -316,7 +322,8 @@ public class BenchmarkDataGeneratorTest
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testRealUniform() throws Exception {
|
||||
public void testRealUniform() throws Exception
|
||||
{
|
||||
List<BenchmarkColumnSchema> schemas = new ArrayList<>();
|
||||
RowValueTracker tracker = new RowValueTracker();
|
||||
|
||||
|
@ -356,7 +363,8 @@ public class BenchmarkDataGeneratorTest
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testIntervalBasedTimeGeneration() throws Exception {
|
||||
public void testIntervalBasedTimeGeneration() throws Exception
|
||||
{
|
||||
List<BenchmarkColumnSchema> schemas = new ArrayList<>();
|
||||
|
||||
schemas.add(
|
||||
|
@ -388,11 +396,13 @@ public class BenchmarkDataGeneratorTest
|
|||
{
|
||||
private Map<String, Map<Object, Integer>> dimensionMap;
|
||||
|
||||
public RowValueTracker() {
|
||||
public RowValueTracker()
|
||||
{
|
||||
dimensionMap = new HashMap<>();
|
||||
}
|
||||
|
||||
public void addRow(InputRow row) {
|
||||
public void addRow(InputRow row)
|
||||
{
|
||||
for (String dim : row.getDimensions()) {
|
||||
if (dimensionMap.get(dim) == null) {
|
||||
dimensionMap.put(dim, new HashMap<Object, Integer>());
|
||||
|
|
|
@ -29,7 +29,8 @@ import java.util.Map;
|
|||
public class FlattenJSONBenchmarkUtilTest
|
||||
{
|
||||
@Test
|
||||
public void testOne() throws Exception {
|
||||
public void testOne() throws Exception
|
||||
{
|
||||
FlattenJSONBenchmarkUtil eventGen = new FlattenJSONBenchmarkUtil();
|
||||
|
||||
String newEvent = eventGen.generateFlatEvent();
|
||||
|
@ -45,7 +46,8 @@ public class FlattenJSONBenchmarkUtilTest
|
|||
checkEvent2(event2);
|
||||
}
|
||||
|
||||
public void checkEvent1(Map<String, Object> event) {
|
||||
public void checkEvent1(Map<String, Object> event)
|
||||
{
|
||||
Assert.assertEquals("2015-09-12T12:10:53.155Z", event.get("ts").toString());
|
||||
Assert.assertEquals("-1170723877", event.get("d1").toString());
|
||||
Assert.assertEquals("238976084", event.get("d2").toString());
|
||||
|
@ -74,7 +76,8 @@ public class FlattenJSONBenchmarkUtilTest
|
|||
Assert.assertEquals("1414285347", event.get("ae1[2].e1.d2").toString());
|
||||
}
|
||||
|
||||
public void checkEvent2(Map<String, Object> event2) {
|
||||
public void checkEvent2(Map<String, Object> event2)
|
||||
{
|
||||
Assert.assertEquals("728062074", event2.get("ae1[0].d1").toString());
|
||||
Assert.assertEquals("1701675101", event2.get("ae1[1].d1").toString());
|
||||
Assert.assertEquals("1887775139", event2.get("ae1[2].e1.d2").toString());
|
||||
|
|
|
@ -37,6 +37,8 @@
|
|||
<suppress checks="UnusedImports" files="[\\/]target[\\/]generated-test-sources[\\/]" />
|
||||
<suppress checks="AvoidStarImport" files="[\\/]target[\\/]generated-test-sources[\\/]" />
|
||||
<suppress checks="FileTabCharacter" files="[\\/]target[\\/]generated-test-sources[\\/]" />
|
||||
<suppress checks="LeftCurly" files="[\\/]target[\\/]generated-test-sources[\\/]" />
|
||||
<suppress checks="RightCurly" files="[\\/]target[\\/]generated-test-sources[\\/]" />
|
||||
|
||||
<!-- extendedset is a fork of Alessandro Colantonio's CONCISE (COmpressed 'N' Composable Integer SEt) repository and licensed to Metamarkets under a CLA is not true. -->
|
||||
<suppress checks="Header" files="[\\/]extendedset[\\/]" />
|
||||
|
|
|
@ -51,6 +51,23 @@
|
|||
<module name="RedundantImport"/>
|
||||
<module name="UnusedImports" />
|
||||
<module name="NeedBraces"/>
|
||||
<module name="LeftCurly">
|
||||
<property name="option" value="nl"/>
|
||||
<property name="tokens" value="INTERFACE_DEF,CLASS_DEF,ANNOTATION_DEF,ENUM_DEF,CTOR_DEF,METHOD_DEF"/>
|
||||
</module>
|
||||
<module name="LeftCurly">
|
||||
<property name="option" value="eol"/>
|
||||
<property name="tokens" value="LITERAL_WHILE,LITERAL_TRY,LITERAL_CATCH,LITERAL_FINALLY,LITERAL_SYNCHRONIZED,LITERAL_SWITCH,LITERAL_DO,LITERAL_IF,LITERAL_ELSE,LITERAL_FOR,STATIC_INIT,LAMBDA,ENUM_CONSTANT_DEF"/>
|
||||
</module>
|
||||
<module name="RightCurly">
|
||||
<property name="option" value="same"/>
|
||||
<property name="tokens" value="LITERAL_IF,LITERAL_ELSE,LITERAL_DO"/>
|
||||
</module>
|
||||
<module name="RightCurly">
|
||||
<property name="option" value="alone"/>
|
||||
<property name="tokens" value="LITERAL_TRY,LITERAL_CATCH,LITERAL_FINALLY,CLASS_DEF,METHOD_DEF,CTOR_DEF,LITERAL_FOR,LITERAL_WHILE,STATIC_INIT,INSTANCE_INIT"/>
|
||||
</module>
|
||||
|
||||
<module name="Regexp">
|
||||
<property name="format" value="com\.google\.common\.io\.Closer"/>
|
||||
<property name="illegalPattern" value="true"/>
|
||||
|
|
|
@ -120,7 +120,8 @@ public class DefaultBlockingPool<T> implements BlockingPool<T>
|
|||
lock.lock();
|
||||
try {
|
||||
return objects.isEmpty() ? null : objects.pop();
|
||||
} finally {
|
||||
}
|
||||
finally {
|
||||
lock.unlock();
|
||||
}
|
||||
}
|
||||
|
@ -138,7 +139,8 @@ public class DefaultBlockingPool<T> implements BlockingPool<T>
|
|||
nanos = notEnough.awaitNanos(nanos);
|
||||
}
|
||||
return objects.pop();
|
||||
} finally {
|
||||
}
|
||||
finally {
|
||||
lock.unlock();
|
||||
}
|
||||
}
|
||||
|
@ -152,7 +154,8 @@ public class DefaultBlockingPool<T> implements BlockingPool<T>
|
|||
notEnough.await();
|
||||
}
|
||||
return objects.pop();
|
||||
} finally {
|
||||
}
|
||||
finally {
|
||||
lock.unlock();
|
||||
}
|
||||
}
|
||||
|
@ -211,7 +214,8 @@ public class DefaultBlockingPool<T> implements BlockingPool<T>
|
|||
}
|
||||
return list;
|
||||
}
|
||||
} finally {
|
||||
}
|
||||
finally {
|
||||
lock.unlock();
|
||||
}
|
||||
}
|
||||
|
@ -233,7 +237,8 @@ public class DefaultBlockingPool<T> implements BlockingPool<T>
|
|||
list.add(objects.pop());
|
||||
}
|
||||
return list;
|
||||
} finally {
|
||||
}
|
||||
finally {
|
||||
lock.unlock();
|
||||
}
|
||||
}
|
||||
|
@ -251,7 +256,8 @@ public class DefaultBlockingPool<T> implements BlockingPool<T>
|
|||
list.add(objects.pop());
|
||||
}
|
||||
return list;
|
||||
} finally {
|
||||
}
|
||||
finally {
|
||||
lock.unlock();
|
||||
}
|
||||
}
|
||||
|
@ -272,7 +278,8 @@ public class DefaultBlockingPool<T> implements BlockingPool<T>
|
|||
} else {
|
||||
throw new ISE("Cannot exceed pre-configured maximum size");
|
||||
}
|
||||
} finally {
|
||||
}
|
||||
finally {
|
||||
lock.unlock();
|
||||
}
|
||||
}
|
||||
|
@ -290,7 +297,8 @@ public class DefaultBlockingPool<T> implements BlockingPool<T>
|
|||
} else {
|
||||
throw new ISE("Cannot exceed pre-configured maximum size");
|
||||
}
|
||||
} finally {
|
||||
}
|
||||
finally {
|
||||
lock.unlock();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -119,8 +119,7 @@ public class OrderedMergeSequence<T> implements Sequence<T>
|
|||
throw Throwables.propagate(e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
yield();
|
||||
}
|
||||
|
||||
|
@ -144,19 +143,16 @@ public class OrderedMergeSequence<T> implements Sequence<T>
|
|||
Yielder<T> yielder;
|
||||
if (oldDudeAtCrosswalk.isDone()) {
|
||||
yielder = pQueue.remove();
|
||||
}
|
||||
else if (pQueue.isEmpty()) {
|
||||
} else if (pQueue.isEmpty()) {
|
||||
yielder = oldDudeAtCrosswalk.get();
|
||||
oldDudeAtCrosswalk = oldDudeAtCrosswalk.next(null);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
Yielder<T> queueYielder = pQueue.peek();
|
||||
Yielder<T> iterYielder = oldDudeAtCrosswalk.get();
|
||||
|
||||
if (ordering.compare(queueYielder.get(), iterYielder.get()) <= 0) {
|
||||
yielder = pQueue.remove();
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
yielder = oldDudeAtCrosswalk.get();
|
||||
oldDudeAtCrosswalk = oldDudeAtCrosswalk.next(null);
|
||||
}
|
||||
|
@ -171,8 +167,7 @@ public class OrderedMergeSequence<T> implements Sequence<T>
|
|||
catch (IOException e) {
|
||||
throw Throwables.propagate(e);
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
pQueue.add(yielder);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -118,7 +118,8 @@ public class StupidPool<T> implements NonBlockingPool<T>
|
|||
}
|
||||
|
||||
@VisibleForTesting
|
||||
long poolSize() {
|
||||
long poolSize()
|
||||
{
|
||||
return poolSize.get();
|
||||
}
|
||||
|
||||
|
|
|
@ -150,7 +150,8 @@ public class Log4jShutdown implements ShutdownCallbackRegistry, LifeCycle
|
|||
@GuardedBy("this")
|
||||
private State current;
|
||||
|
||||
private SynchronizedStateHolder(State initial) {
|
||||
private SynchronizedStateHolder(State initial)
|
||||
{
|
||||
current = initial;
|
||||
}
|
||||
|
||||
|
|
|
@ -107,8 +107,7 @@ public class CombiningSequence<T> implements Sequence<T>
|
|||
} else {
|
||||
finalYielder = Yielders.done(null, yielder);
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
return Yielders.done(combiningAccumulator.getRetVal(), yielder);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,10 +19,10 @@
|
|||
|
||||
package io.druid.common.utils;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
import io.druid.java.util.common.ISE;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class PropUtils
|
||||
|
@ -50,8 +50,7 @@ public class PropUtils
|
|||
if (retVal == null) {
|
||||
if (defaultValue == null) {
|
||||
throw new ISE("Property[%s] not specified.", property);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,12 +19,12 @@
|
|||
|
||||
package io.druid.common.utils;
|
||||
|
||||
import io.druid.java.util.common.ISE;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.ServerSocket;
|
||||
import java.util.Random;
|
||||
|
||||
import io.druid.java.util.common.ISE;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class SocketUtil
|
||||
|
@ -38,7 +38,8 @@ public class SocketUtil
|
|||
return findOpenPortFrom(startPort);
|
||||
}
|
||||
|
||||
public static int findOpenPortFrom(int startPort) {
|
||||
public static int findOpenPortFrom(int startPort)
|
||||
{
|
||||
int currPort = startPort;
|
||||
|
||||
while (currPort < 0xffff) {
|
||||
|
|
|
@ -85,7 +85,8 @@ public class DruidSecondaryModule implements Module
|
|||
return smileMapper;
|
||||
}
|
||||
|
||||
private void setupJackson(Injector injector, final ObjectMapper mapper) {
|
||||
private void setupJackson(Injector injector, final ObjectMapper mapper)
|
||||
{
|
||||
final GuiceAnnotationIntrospector guiceIntrospector = new GuiceAnnotationIntrospector();
|
||||
|
||||
mapper.setInjectableValues(new GuiceInjectableValues(injector));
|
||||
|
|
|
@ -33,11 +33,17 @@ public class GuiceInjectableValues extends InjectableValues
|
|||
{
|
||||
private final Injector injector;
|
||||
|
||||
public GuiceInjectableValues(Injector injector) {this.injector = injector;}
|
||||
public GuiceInjectableValues(Injector injector)
|
||||
{
|
||||
this.injector = injector;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object findInjectableValue(
|
||||
Object valueId, DeserializationContext ctxt, BeanProperty forProperty, Object beanInstance
|
||||
Object valueId,
|
||||
DeserializationContext ctxt,
|
||||
BeanProperty forProperty,
|
||||
Object beanInstance
|
||||
)
|
||||
{
|
||||
// From the docs: "Object that identifies value to inject; may be a simple name or more complex identifier object,
|
||||
|
|
|
@ -85,8 +85,7 @@ public class JacksonConfigProvider<T> implements Provider<Supplier<T>>
|
|||
{
|
||||
if (clazz == null) {
|
||||
return DSuppliers.of(configManager.watch(key, typeRef, defaultVal));
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
return DSuppliers.of(configManager.watch(key, clazz, defaultVal));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,7 +46,8 @@ public class DefaultPasswordProvider implements PasswordProvider
|
|||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
public String toString()
|
||||
{
|
||||
return this.getClass().getCanonicalName();
|
||||
}
|
||||
|
||||
|
|
|
@ -45,20 +45,16 @@ public class ReferenceCountingResourceHolderTest
|
|||
final ReferenceCountingResourceHolder<Closeable> resourceHolder = makeReleasingHandler(released);
|
||||
List<Thread> threads = new ArrayList<>();
|
||||
for (int i = 0; i < 100; i++) {
|
||||
Thread thread = new Thread() {
|
||||
@Override
|
||||
public void run()
|
||||
{
|
||||
try (Releaser r = resourceHolder.increment()) {
|
||||
try {
|
||||
Thread.sleep(1);
|
||||
}
|
||||
catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
Thread thread = new Thread(() -> {
|
||||
try (Releaser r = resourceHolder.increment()) {
|
||||
try {
|
||||
Thread.sleep(1);
|
||||
}
|
||||
catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
thread.start();
|
||||
threads.add(thread);
|
||||
}
|
||||
|
|
|
@ -30,9 +30,11 @@ public class VMUtilsTest
|
|||
try {
|
||||
long maxMemory = VMUtils.getMaxDirectMemory();
|
||||
Assert.assertTrue((maxMemory > 0));
|
||||
} catch (UnsupportedOperationException expected) {
|
||||
}
|
||||
catch (UnsupportedOperationException expected) {
|
||||
Assert.assertTrue(true);
|
||||
} catch (RuntimeException expected) {
|
||||
}
|
||||
catch (RuntimeException expected) {
|
||||
Assert.assertTrue(true);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,26 +29,30 @@ public class DefaultPasswordProviderTest
|
|||
private static final ObjectMapper jsonMapper = new ObjectMapper();
|
||||
|
||||
@Test
|
||||
public void testExplicitConstruction() {
|
||||
public void testExplicitConstruction()
|
||||
{
|
||||
DefaultPasswordProvider pp = new DefaultPasswordProvider(pwd);
|
||||
Assert.assertEquals(pwd, pp.getPassword());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFromStringConstruction() {
|
||||
public void testFromStringConstruction()
|
||||
{
|
||||
DefaultPasswordProvider pp = DefaultPasswordProvider.fromString(pwd);
|
||||
Assert.assertEquals(pwd, pp.getPassword());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeserializationFromJsonString() throws Exception {
|
||||
public void testDeserializationFromJsonString() throws Exception
|
||||
{
|
||||
PasswordProvider pp = jsonMapper.readValue("\"" + pwd + "\"",
|
||||
PasswordProvider.class);
|
||||
Assert.assertEquals(pwd, pp.getPassword());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeserializationFromJson() throws Exception {
|
||||
public void testDeserializationFromJson() throws Exception
|
||||
{
|
||||
PasswordProvider pp = jsonMapper.readValue(
|
||||
"{\"type\": \"default\", \"password\": \"" + pwd + "\"}",
|
||||
PasswordProvider.class);
|
||||
|
|
|
@ -716,16 +716,14 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||
int minCount = Math.min(thisItr.count, otherItr.count);
|
||||
res.appendFill(minCount, operator.combineLiterals(thisItr.word, otherItr.word));
|
||||
//noinspection NonShortCircuitBooleanExpression
|
||||
if (!thisItr.prepareNext(minCount) | !otherItr.prepareNext(minCount)) // NOT ||
|
||||
{
|
||||
if (!thisItr.prepareNext(minCount) | /* NOT || */ !otherItr.prepareNext(minCount)) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
res.appendLiteral(operator.combineLiterals(thisItr.toLiteral(), otherItr.word));
|
||||
thisItr.word--;
|
||||
//noinspection NonShortCircuitBooleanExpression
|
||||
if (!thisItr.prepareNext(1) | !otherItr.prepareNext()) // do NOT use "||"
|
||||
{
|
||||
if (!thisItr.prepareNext(1) | /* do NOT use "||" */ !otherItr.prepareNext()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -733,15 +731,13 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||
res.appendLiteral(operator.combineLiterals(thisItr.word, otherItr.toLiteral()));
|
||||
otherItr.word--;
|
||||
//noinspection NonShortCircuitBooleanExpression
|
||||
if (!thisItr.prepareNext() | !otherItr.prepareNext(1)) // do NOT use "||"
|
||||
{
|
||||
if (!thisItr.prepareNext() | /* do NOT use "||" */ !otherItr.prepareNext(1)) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
res.appendLiteral(operator.combineLiterals(thisItr.word, otherItr.word));
|
||||
//noinspection NonShortCircuitBooleanExpression
|
||||
if (!thisItr.prepareNext() | !otherItr.prepareNext()) // do NOT use "||"
|
||||
{
|
||||
if (!thisItr.prepareNext() | /* do NOT use "||" */ !otherItr.prepareNext()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -841,29 +837,25 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||
if ((ConciseSetUtils.SEQUENCE_BIT & thisItr.word & otherItr.word) != 0) {
|
||||
res += maxLiteralLengthMultiplication(minCount);
|
||||
}
|
||||
if (!thisItr.prepareNext(minCount) | !otherItr.prepareNext(minCount)) // NOT ||
|
||||
{
|
||||
if (!thisItr.prepareNext(minCount) | /* NOT || */ !otherItr.prepareNext(minCount)) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
res += getLiteralBitCount(thisItr.toLiteral() & otherItr.word);
|
||||
thisItr.word--;
|
||||
if (!thisItr.prepareNext(1) | !otherItr.prepareNext()) // do NOT use "||"
|
||||
{
|
||||
if (!thisItr.prepareNext(1) | /* do NOT use "||" */ !otherItr.prepareNext()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (!otherItr.isLiteral) {
|
||||
res += getLiteralBitCount(thisItr.word & otherItr.toLiteral());
|
||||
otherItr.word--;
|
||||
if (!thisItr.prepareNext() | !otherItr.prepareNext(1)) // do NOT use "||"
|
||||
{
|
||||
if (!thisItr.prepareNext() | /* do NOT use "||" */ !otherItr.prepareNext(1)) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
res += getLiteralBitCount(thisItr.word & otherItr.word);
|
||||
if (!thisItr.prepareNext() | !otherItr.prepareNext()) // do NOT use "||"
|
||||
{
|
||||
if (!thisItr.prepareNext() | /* do NOT use "||" */ !otherItr.prepareNext()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -937,10 +929,9 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||
}
|
||||
} else {
|
||||
setBitsInCurrentWord = sequenceLength - 1;
|
||||
if (position < setBitsInCurrentWord)
|
||||
// check whether the desired set bit is after the
|
||||
// flipped bit (or after the first block)
|
||||
{
|
||||
if (position < setBitsInCurrentWord) {
|
||||
// check whether the desired set bit is after the
|
||||
// flipped bit (or after the first block)
|
||||
return firstSetBitInWord + position + (position < getFlippedBit(w) ? 0 : 1);
|
||||
}
|
||||
}
|
||||
|
@ -1132,13 +1123,11 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||
// complement each word
|
||||
for (int i = 0; i <= lastWordIndex; i++) {
|
||||
int w = words[i];
|
||||
if (isLiteral(w))
|
||||
// negate the bits and set the most significant bit to 1
|
||||
{
|
||||
if (isLiteral(w)) {
|
||||
// negate the bits and set the most significant bit to 1
|
||||
words[i] = ConciseSetUtils.ALL_ZEROS_LITERAL | ~w;
|
||||
} else
|
||||
// switch the sequence type
|
||||
{
|
||||
} else {
|
||||
// switch the sequence type
|
||||
words[i] ^= ConciseSetUtils.SEQUENCE_BIT;
|
||||
}
|
||||
}
|
||||
|
@ -1724,8 +1713,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||
return true;
|
||||
}
|
||||
//noinspection NonShortCircuitBooleanExpression
|
||||
if (!thisItr.prepareNext(minCount) | !otherItr.prepareNext(minCount)) // NOT ||
|
||||
{
|
||||
if (!thisItr.prepareNext(minCount) | /* NOT || */ !otherItr.prepareNext(minCount)) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
|
@ -1734,8 +1722,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||
}
|
||||
thisItr.word--;
|
||||
//noinspection NonShortCircuitBooleanExpression
|
||||
if (!thisItr.prepareNext(1) | !otherItr.prepareNext()) // do NOT use "||"
|
||||
{
|
||||
if (!thisItr.prepareNext(1) | /* do NOT use "||" */ !otherItr.prepareNext()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -1745,8 +1732,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||
}
|
||||
otherItr.word--;
|
||||
//noinspection NonShortCircuitBooleanExpression
|
||||
if (!thisItr.prepareNext() | !otherItr.prepareNext(1)) // do NOT use "||"
|
||||
{
|
||||
if (!thisItr.prepareNext() | /* do NOT use "||" */ !otherItr.prepareNext(1)) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
|
@ -1754,8 +1740,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||
return true;
|
||||
}
|
||||
//noinspection NonShortCircuitBooleanExpression
|
||||
if (!thisItr.prepareNext() | !otherItr.prepareNext()) // do NOT use "||"
|
||||
{
|
||||
if (!thisItr.prepareNext() | /* do NOT use "||" */ !otherItr.prepareNext()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -1817,8 +1802,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||
}
|
||||
}
|
||||
//noinspection NonShortCircuitBooleanExpression
|
||||
if (!thisItr.prepareNext(minCount) | !otherItr.prepareNext(minCount)) // NOT ||
|
||||
{
|
||||
if (!thisItr.prepareNext(minCount) | /* NOT || */ !otherItr.prepareNext(minCount)) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
|
@ -1828,8 +1812,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||
}
|
||||
thisItr.word--;
|
||||
//noinspection NonShortCircuitBooleanExpression
|
||||
if (!thisItr.prepareNext(1) | !otherItr.prepareNext()) // do NOT use "||"
|
||||
{
|
||||
if (!thisItr.prepareNext(1) | /* do NOT use "||" */ !otherItr.prepareNext()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -1840,8 +1823,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||
}
|
||||
otherItr.word--;
|
||||
//noinspection NonShortCircuitBooleanExpression
|
||||
if (!thisItr.prepareNext() | !otherItr.prepareNext(1)) // do NOT use "||"
|
||||
{
|
||||
if (!thisItr.prepareNext() | /* do NOT use "||" */ !otherItr.prepareNext(1)) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
|
@ -1850,8 +1832,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||
return true;
|
||||
}
|
||||
//noinspection NonShortCircuitBooleanExpression
|
||||
if (!thisItr.prepareNext() | !otherItr.prepareNext()) // do NOT use "||"
|
||||
{
|
||||
if (!thisItr.prepareNext() | /* do NOT use "||" */ !otherItr.prepareNext()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -2063,9 +2044,8 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||
// start comparing from the end, that is at blocks with no
|
||||
// (un)set bits
|
||||
if (isZeroSequence(thisWord)) {
|
||||
if (isOneSequence(otherWord))
|
||||
// zeros < ones
|
||||
{
|
||||
if (isOneSequence(otherWord)) {
|
||||
// zeros < ones
|
||||
return -1;
|
||||
}
|
||||
// compare two sequences of zeros
|
||||
|
@ -2074,9 +2054,8 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||
return res < 0 ? -1 : 1;
|
||||
}
|
||||
} else {
|
||||
if (isZeroSequence(otherWord))
|
||||
// ones > zeros
|
||||
{
|
||||
if (isZeroSequence(otherWord)) {
|
||||
// ones > zeros
|
||||
return 1;
|
||||
}
|
||||
// compare two sequences of ones
|
||||
|
@ -2318,9 +2297,8 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||
*/
|
||||
private void writeObject(ObjectOutputStream s) throws IOException
|
||||
{
|
||||
if (words != null && lastWordIndex < words.length - 1)
|
||||
// compact before serializing
|
||||
{
|
||||
if (words != null && lastWordIndex < words.length - 1) {
|
||||
// compact before serializing
|
||||
words = Arrays.copyOf(words, lastWordIndex + 1);
|
||||
}
|
||||
s.defaultWriteObject();
|
||||
|
@ -2562,7 +2540,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||
}
|
||||
return null;
|
||||
}
|
||||
},;
|
||||
};
|
||||
|
||||
/**
|
||||
* Performs the operation on the given literals
|
||||
|
|
|
@ -928,10 +928,9 @@ public class ImmutableConciseSet
|
|||
}
|
||||
} else {
|
||||
setBitsInCurrentWord = sequenceLength - 1;
|
||||
if (position < setBitsInCurrentWord)
|
||||
// check whether the desired set bit is after the
|
||||
// flipped bit (or after the first block)
|
||||
{
|
||||
if (position < setBitsInCurrentWord) {
|
||||
// check whether the desired set bit is after the
|
||||
// flipped bit (or after the first block)
|
||||
return firstSetBitInWord + position + (position < ConciseSetUtils.getFlippedBit(w) ? 0 : 1);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -54,7 +54,8 @@ public class AmbariMetricsEmitterModule implements DruidModule
|
|||
@Provides
|
||||
@ManageLifecycle
|
||||
@Named(EMITTER_TYPE)
|
||||
public Emitter getEmitter(AmbariMetricsEmitterConfig emitterConfig, final Injector injector){
|
||||
public Emitter getEmitter(AmbariMetricsEmitterConfig emitterConfig, final Injector injector)
|
||||
{
|
||||
List<Emitter> emitters = Lists.transform(
|
||||
emitterConfig.getAlertEmitters(),
|
||||
new Function<String, Emitter>()
|
||||
|
|
|
@ -24,7 +24,8 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
|||
import javax.validation.constraints.NotNull;
|
||||
|
||||
|
||||
public class AzureBlob {
|
||||
public class AzureBlob
|
||||
{
|
||||
@JsonProperty
|
||||
@NotNull
|
||||
private String container = null;
|
||||
|
@ -33,19 +34,23 @@ public class AzureBlob {
|
|||
@NotNull
|
||||
private String path = null;
|
||||
|
||||
public AzureBlob() {
|
||||
public AzureBlob()
|
||||
{
|
||||
}
|
||||
|
||||
public AzureBlob(String container, String path) {
|
||||
public AzureBlob(String container, String path)
|
||||
{
|
||||
this.container = container;
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
public String getContainer() {
|
||||
public String getContainer()
|
||||
{
|
||||
return container;
|
||||
}
|
||||
|
||||
public String getPath() {
|
||||
public String getPath()
|
||||
{
|
||||
return path;
|
||||
}
|
||||
|
||||
|
|
|
@ -70,13 +70,28 @@ public class AzureAccountConfig
|
|||
this.container = container;
|
||||
}
|
||||
|
||||
public String getProtocol() { return protocol; }
|
||||
public String getProtocol()
|
||||
{
|
||||
return protocol;
|
||||
}
|
||||
|
||||
public int getMaxTries() { return maxTries; }
|
||||
public int getMaxTries()
|
||||
{
|
||||
return maxTries;
|
||||
}
|
||||
|
||||
public String getAccount() { return account; }
|
||||
public String getAccount()
|
||||
{
|
||||
return account;
|
||||
}
|
||||
|
||||
public String getKey() { return key;}
|
||||
public String getKey()
|
||||
{
|
||||
return key;
|
||||
}
|
||||
|
||||
public String getContainer() { return container; }
|
||||
public String getContainer()
|
||||
{
|
||||
return container;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -93,12 +93,14 @@ public class AzureStorage
|
|||
}
|
||||
|
||||
public CloudBlockBlob getBlob(final String containerName, final String blobPath)
|
||||
throws URISyntaxException, StorageException {
|
||||
throws URISyntaxException, StorageException
|
||||
{
|
||||
return getCloudBlobContainer(containerName).getBlockBlobReference(blobPath);
|
||||
}
|
||||
|
||||
public long getBlobLength(final String containerName, final String blobPath)
|
||||
throws URISyntaxException, StorageException {
|
||||
throws URISyntaxException, StorageException
|
||||
{
|
||||
return getCloudBlobContainer(containerName).getBlockBlobReference(blobPath).getProperties().getLength();
|
||||
}
|
||||
|
||||
|
@ -109,8 +111,8 @@ public class AzureStorage
|
|||
return container.getBlockBlobReference(blobPath).openInputStream();
|
||||
}
|
||||
|
||||
public boolean getBlobExists(String container, String blobPath)
|
||||
throws URISyntaxException, StorageException {
|
||||
public boolean getBlobExists(String container, String blobPath) throws URISyntaxException, StorageException
|
||||
{
|
||||
return getCloudBlobContainer(container).getBlockBlobReference(blobPath).exists();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,7 +35,8 @@ import java.io.InputStream;
|
|||
import java.net.URISyntaxException;
|
||||
import java.util.concurrent.Callable;
|
||||
|
||||
public class AzureTaskLogs implements TaskLogs {
|
||||
public class AzureTaskLogs implements TaskLogs
|
||||
{
|
||||
|
||||
private static final Logger log = new Logger(AzureTaskLogs.class);
|
||||
|
||||
|
@ -43,34 +44,35 @@ public class AzureTaskLogs implements TaskLogs {
|
|||
private final AzureStorage azureStorage;
|
||||
|
||||
@Inject
|
||||
public AzureTaskLogs(AzureTaskLogsConfig config, AzureStorage azureStorage) {
|
||||
public AzureTaskLogs(AzureTaskLogsConfig config, AzureStorage azureStorage)
|
||||
{
|
||||
this.config = config;
|
||||
this.azureStorage = azureStorage;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void pushTaskLog(final String taskid, final File logFile) throws IOException {
|
||||
public void pushTaskLog(final String taskid, final File logFile) throws IOException
|
||||
{
|
||||
final String taskKey = getTaskLogKey(taskid);
|
||||
log.info("Pushing task log %s to: %s", logFile, taskKey);
|
||||
|
||||
try {
|
||||
AzureUtils.retryAzureOperation(
|
||||
new Callable<Void>() {
|
||||
@Override
|
||||
public Void call() throws Exception {
|
||||
azureStorage.uploadBlob(logFile, config.getContainer(), taskKey);
|
||||
return null;
|
||||
}
|
||||
(Callable<Void>) () -> {
|
||||
azureStorage.uploadBlob(logFile, config.getContainer(), taskKey);
|
||||
return null;
|
||||
},
|
||||
config.getMaxTries()
|
||||
);
|
||||
} catch (Exception e) {
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw Throwables.propagate(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<ByteSource> streamTaskLog(final String taskid, final long offset) throws IOException {
|
||||
public Optional<ByteSource> streamTaskLog(final String taskid, final long offset) throws IOException
|
||||
{
|
||||
final String container = config.getContainer();
|
||||
final String taskKey = getTaskLogKey(taskid);
|
||||
|
||||
|
@ -80,9 +82,11 @@ public class AzureTaskLogs implements TaskLogs {
|
|||
}
|
||||
|
||||
return Optional.<ByteSource>of(
|
||||
new ByteSource() {
|
||||
new ByteSource()
|
||||
{
|
||||
@Override
|
||||
public InputStream openStream() throws IOException {
|
||||
public InputStream openStream() throws IOException
|
||||
{
|
||||
try {
|
||||
final long start;
|
||||
final long length = azureStorage.getBlobLength(container, taskKey);
|
||||
|
@ -100,19 +104,22 @@ public class AzureTaskLogs implements TaskLogs {
|
|||
|
||||
return stream;
|
||||
|
||||
} catch(Exception e) {
|
||||
}
|
||||
catch(Exception e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
} catch (StorageException | URISyntaxException e) {
|
||||
}
|
||||
catch (StorageException | URISyntaxException e) {
|
||||
throw new IOE(e, "Failed to stream logs from: %s", taskKey);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private String getTaskLogKey(String taskid) {
|
||||
private String getTaskLogKey(String taskid)
|
||||
{
|
||||
return StringUtils.format("%s/%s/log", config.getPrefix(), taskid);
|
||||
}
|
||||
|
||||
|
|
|
@ -24,7 +24,8 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
|||
import javax.validation.constraints.Min;
|
||||
import javax.validation.constraints.NotNull;
|
||||
|
||||
public class AzureTaskLogsConfig {
|
||||
public class AzureTaskLogsConfig
|
||||
{
|
||||
@JsonProperty
|
||||
@NotNull
|
||||
private String container = null;
|
||||
|
@ -37,25 +38,29 @@ public class AzureTaskLogsConfig {
|
|||
@Min(1)
|
||||
private int maxTries = 3;
|
||||
|
||||
public AzureTaskLogsConfig() {
|
||||
|
||||
public AzureTaskLogsConfig()
|
||||
{
|
||||
}
|
||||
|
||||
public AzureTaskLogsConfig(String container, String prefix, int maxTries) {
|
||||
public AzureTaskLogsConfig(String container, String prefix, int maxTries)
|
||||
{
|
||||
this.container = container;
|
||||
this.prefix = prefix;
|
||||
this.maxTries = maxTries;
|
||||
}
|
||||
|
||||
public String getContainer() {
|
||||
public String getContainer()
|
||||
{
|
||||
return container;
|
||||
}
|
||||
|
||||
public String getPrefix() {
|
||||
public String getPrefix()
|
||||
{
|
||||
return prefix;
|
||||
}
|
||||
|
||||
public int getMaxTries() {
|
||||
public int getMaxTries()
|
||||
{
|
||||
return maxTries;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,7 +53,8 @@ public class AzureUtils
|
|||
}
|
||||
};
|
||||
|
||||
public static <T> T retryAzureOperation(Callable<T> f, int maxTries) throws Exception {
|
||||
public static <T> T retryAzureOperation(Callable<T> f, int maxTries) throws Exception
|
||||
{
|
||||
return RetryUtils.retry(f, AZURE_RETRY, maxTries);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,7 +38,8 @@ import java.io.StringWriter;
|
|||
import static org.easymock.EasyMock.expect;
|
||||
import static org.easymock.EasyMock.expectLastCall;
|
||||
|
||||
public class AzureTaskLogsTest extends EasyMockSupport {
|
||||
public class AzureTaskLogsTest extends EasyMockSupport
|
||||
{
|
||||
|
||||
private static final String container = "test";
|
||||
private static final String prefix = "test/log";
|
||||
|
@ -49,14 +50,16 @@ public class AzureTaskLogsTest extends EasyMockSupport {
|
|||
private AzureTaskLogs azureTaskLogs;
|
||||
|
||||
@Before
|
||||
public void before() {
|
||||
public void before()
|
||||
{
|
||||
azureStorage = createMock(AzureStorage.class);
|
||||
azureTaskLogs = new AzureTaskLogs(azureTaskLogsConfig, azureStorage);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testPushTaskLog() throws Exception {
|
||||
public void testPushTaskLog() throws Exception
|
||||
{
|
||||
final File tmpDir = Files.createTempDir();
|
||||
|
||||
try {
|
||||
|
@ -70,13 +73,15 @@ public class AzureTaskLogsTest extends EasyMockSupport {
|
|||
azureTaskLogs.pushTaskLog(taskid, logFile);
|
||||
|
||||
verifyAll();
|
||||
} finally {
|
||||
}
|
||||
finally {
|
||||
FileUtils.deleteDirectory(tmpDir);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStreamTaskLogWithoutOffset() throws Exception {
|
||||
public void testStreamTaskLogWithoutOffset() throws Exception
|
||||
{
|
||||
final String testLog = "hello this is a log";
|
||||
|
||||
final String blobPath = prefix + "/" + taskid + "/log";
|
||||
|
@ -98,7 +103,8 @@ public class AzureTaskLogsTest extends EasyMockSupport {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStreamTaskLogWithPositiveOffset() throws Exception {
|
||||
public void testStreamTaskLogWithPositiveOffset() throws Exception
|
||||
{
|
||||
final String testLog = "hello this is a log";
|
||||
|
||||
final String blobPath = prefix + "/" + taskid + "/log";
|
||||
|
@ -120,7 +126,8 @@ public class AzureTaskLogsTest extends EasyMockSupport {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStreamTaskLogWithNegative() throws Exception {
|
||||
public void testStreamTaskLogWithNegative() throws Exception
|
||||
{
|
||||
final String testLog = "hello this is a log";
|
||||
|
||||
final String blobPath = prefix + "/" + taskid + "/log";
|
||||
|
|
|
@ -23,15 +23,14 @@ import com.google.common.base.Predicates;
|
|||
import com.google.inject.Inject;
|
||||
import com.netflix.astyanax.recipes.storage.ChunkedStorage;
|
||||
import com.netflix.astyanax.recipes.storage.ObjectMetadata;
|
||||
|
||||
import io.druid.java.util.common.CompressionUtils;
|
||||
import io.druid.java.util.common.FileUtils;
|
||||
import io.druid.java.util.common.ISE;
|
||||
import io.druid.java.util.common.RetryUtils;
|
||||
import io.druid.java.util.common.logger.Logger;
|
||||
import io.druid.segment.loading.DataSegmentPuller;
|
||||
import io.druid.segment.loading.SegmentLoadingException;
|
||||
import io.druid.timeline.DataSegment;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
|
@ -60,7 +59,9 @@ public class CassandraDataSegmentPuller extends CassandraStorage implements Data
|
|||
String key = (String) segment.getLoadSpec().get("key");
|
||||
getSegmentFiles(key, outDir);
|
||||
}
|
||||
public io.druid.java.util.common.FileUtils.FileCopyResult getSegmentFiles(final String key, final File outDir) throws SegmentLoadingException{
|
||||
public FileUtils.FileCopyResult getSegmentFiles(final String key, final File outDir)
|
||||
throws SegmentLoadingException
|
||||
{
|
||||
log.info("Pulling index from C* at path[%s] to outDir[%s]", key, outDir);
|
||||
if (!outDir.exists()) {
|
||||
outDir.mkdirs();
|
||||
|
@ -74,13 +75,13 @@ public class CassandraDataSegmentPuller extends CassandraStorage implements Data
|
|||
final File tmpFile = new File(outDir, "index.zip");
|
||||
log.info("Pulling to temporary local cache [%s]", tmpFile.getAbsolutePath());
|
||||
|
||||
final io.druid.java.util.common.FileUtils.FileCopyResult localResult;
|
||||
final FileUtils.FileCopyResult localResult;
|
||||
try {
|
||||
localResult = RetryUtils.retry(
|
||||
new Callable<io.druid.java.util.common.FileUtils.FileCopyResult>()
|
||||
new Callable<FileUtils.FileCopyResult>()
|
||||
{
|
||||
@Override
|
||||
public io.druid.java.util.common.FileUtils.FileCopyResult call() throws Exception
|
||||
public FileUtils.FileCopyResult call() throws Exception
|
||||
{
|
||||
try (OutputStream os = new FileOutputStream(tmpFile)) {
|
||||
final ObjectMetadata meta = ChunkedStorage
|
||||
|
@ -89,17 +90,18 @@ public class CassandraDataSegmentPuller extends CassandraStorage implements Data
|
|||
.withConcurrencyLevel(CONCURRENCY)
|
||||
.call();
|
||||
}
|
||||
return new io.druid.java.util.common.FileUtils.FileCopyResult(tmpFile);
|
||||
return new FileUtils.FileCopyResult(tmpFile);
|
||||
}
|
||||
},
|
||||
Predicates.<Throwable>alwaysTrue(),
|
||||
10
|
||||
);
|
||||
}catch (Exception e){
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new SegmentLoadingException(e, "Unable to copy key [%s] to file [%s]", key, tmpFile.getAbsolutePath());
|
||||
}
|
||||
try{
|
||||
final io.druid.java.util.common.FileUtils.FileCopyResult result = CompressionUtils.unzip(tmpFile, outDir);
|
||||
final FileUtils.FileCopyResult result = CompressionUtils.unzip(tmpFile, outDir);
|
||||
log.info(
|
||||
"Pull of file[%s] completed in %,d millis (%s bytes)", key, System.currentTimeMillis() - startTime,
|
||||
result.size()
|
||||
|
@ -108,15 +110,16 @@ public class CassandraDataSegmentPuller extends CassandraStorage implements Data
|
|||
}
|
||||
catch (Exception e) {
|
||||
try {
|
||||
FileUtils.deleteDirectory(outDir);
|
||||
org.apache.commons.io.FileUtils.deleteDirectory(outDir);
|
||||
}
|
||||
catch (IOException e1) {
|
||||
log.error(e1, "Error clearing segment directory [%s]", outDir.getAbsolutePath());
|
||||
e.addSuppressed(e1);
|
||||
}
|
||||
throw new SegmentLoadingException(e, e.getMessage());
|
||||
} finally {
|
||||
if(!tmpFile.delete()){
|
||||
}
|
||||
finally {
|
||||
if (!tmpFile.delete()) {
|
||||
log.warn("Could not delete cache file at [%s]", tmpFile.getAbsolutePath());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -88,8 +88,7 @@ public class CassandraDataSegmentPusher extends CassandraStorage implements Data
|
|||
|
||||
int version = SegmentUtils.getVersionFromDir(indexFilesDir);
|
||||
|
||||
try
|
||||
{
|
||||
try {
|
||||
long start = System.currentTimeMillis();
|
||||
ChunkedStorage.newWriter(indexStorage, key, new FileInputStream(compressedIndexFile))
|
||||
.withConcurrencyLevel(CONCURRENCY).call();
|
||||
|
@ -100,8 +99,8 @@ public class CassandraDataSegmentPusher extends CassandraStorage implements Data
|
|||
.putColumn("descriptor", json, null);
|
||||
mutation.execute();
|
||||
log.info("Wrote index to C* in [%s] ms", System.currentTimeMillis() - start);
|
||||
} catch (Exception e)
|
||||
{
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
|
||||
|
|
|
@ -22,23 +22,27 @@ package io.druid.firehose.google;
|
|||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
public class GoogleBlob {
|
||||
public class GoogleBlob
|
||||
{
|
||||
private final String bucket;
|
||||
private final String path;
|
||||
|
||||
@JsonCreator
|
||||
public GoogleBlob(@JsonProperty("bucket") String bucket, @JsonProperty("path") String path) {
|
||||
public GoogleBlob(@JsonProperty("bucket") String bucket, @JsonProperty("path") String path)
|
||||
{
|
||||
this.bucket = bucket;
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
@JsonProperty
|
||||
public String getBucket() {
|
||||
public String getBucket()
|
||||
{
|
||||
return bucket;
|
||||
}
|
||||
|
||||
@JsonProperty
|
||||
public String getPath() {
|
||||
public String getPath()
|
||||
{
|
||||
return path;
|
||||
}
|
||||
|
||||
|
|
|
@ -54,7 +54,8 @@ public class StaticGoogleBlobStoreFirehoseFactory extends PrefetchableTextFilesF
|
|||
}
|
||||
|
||||
@JsonProperty
|
||||
public List<GoogleBlob> getBlobs() {
|
||||
public List<GoogleBlob> getBlobs()
|
||||
{
|
||||
return blobs;
|
||||
}
|
||||
|
||||
|
|
|
@ -166,7 +166,8 @@ public class GoogleDataSegmentPusher implements DataSegmentPusher
|
|||
return makeLoadSpec(config.getBucket(),finalIndexZipFilePath.getPath().substring(1));
|
||||
}
|
||||
|
||||
private Map<String, Object> makeLoadSpec(String bucket, String path) {
|
||||
private Map<String, Object> makeLoadSpec(String bucket, String path)
|
||||
{
|
||||
return ImmutableMap.<String, Object>of(
|
||||
"type", GoogleStorageDruidModule.SCHEME,
|
||||
"bucket", bucket,
|
||||
|
|
|
@ -58,7 +58,8 @@ public class GoogleStorage
|
|||
{
|
||||
try {
|
||||
return storage.objects().get(bucket, path).executeUsingHead().isSuccessStatusCode();
|
||||
} catch (Exception e) {
|
||||
}
|
||||
catch (Exception e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,20 +32,23 @@ import java.io.FileInputStream;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
public class GoogleTaskLogs implements TaskLogs {
|
||||
public class GoogleTaskLogs implements TaskLogs
|
||||
{
|
||||
private static final Logger LOG = new Logger(GoogleTaskLogs.class);
|
||||
|
||||
private final GoogleTaskLogsConfig config;
|
||||
private final GoogleStorage storage;
|
||||
|
||||
@Inject
|
||||
public GoogleTaskLogs(GoogleTaskLogsConfig config, GoogleStorage storage) {
|
||||
public GoogleTaskLogs(GoogleTaskLogsConfig config, GoogleStorage storage)
|
||||
{
|
||||
this.config = config;
|
||||
this.storage = storage;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void pushTaskLog(final String taskid, final File logFile) throws IOException {
|
||||
public void pushTaskLog(final String taskid, final File logFile) throws IOException
|
||||
{
|
||||
final String taskKey = getTaskLogKey(taskid);
|
||||
LOG.info("Pushing task log %s to: %s", logFile, taskKey);
|
||||
|
||||
|
@ -58,7 +61,8 @@ public class GoogleTaskLogs implements TaskLogs {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Optional<ByteSource> streamTaskLog(final String taskid, final long offset) throws IOException {
|
||||
public Optional<ByteSource> streamTaskLog(final String taskid, final long offset) throws IOException
|
||||
{
|
||||
final String taskKey = getTaskLogKey(taskid);
|
||||
|
||||
try {
|
||||
|
@ -69,9 +73,11 @@ public class GoogleTaskLogs implements TaskLogs {
|
|||
final long length = storage.size(config.getBucket(), taskKey);
|
||||
|
||||
return Optional.<ByteSource>of(
|
||||
new ByteSource() {
|
||||
new ByteSource()
|
||||
{
|
||||
@Override
|
||||
public InputStream openStream() throws IOException {
|
||||
public InputStream openStream() throws IOException
|
||||
{
|
||||
try {
|
||||
final long start;
|
||||
|
||||
|
@ -87,18 +93,21 @@ public class GoogleTaskLogs implements TaskLogs {
|
|||
stream.skip(start);
|
||||
|
||||
return stream;
|
||||
} catch(Exception e) {
|
||||
}
|
||||
catch(Exception e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
} catch (IOException e) {
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new IOE(e, "Failed to stream logs from: %s", taskKey);
|
||||
}
|
||||
}
|
||||
|
||||
private String getTaskLogKey(String taskid) {
|
||||
private String getTaskLogKey(String taskid)
|
||||
{
|
||||
return config.getPrefix() + "/" + taskid.replaceAll(":", "_");
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,8 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
|||
|
||||
import javax.validation.constraints.NotNull;
|
||||
|
||||
public class GoogleTaskLogsConfig {
|
||||
public class GoogleTaskLogsConfig
|
||||
{
|
||||
@JsonProperty
|
||||
@NotNull
|
||||
private final String bucket;
|
||||
|
@ -32,16 +33,19 @@ public class GoogleTaskLogsConfig {
|
|||
@NotNull
|
||||
private final String prefix;
|
||||
|
||||
public GoogleTaskLogsConfig(@JsonProperty("bucket") String bucket, @JsonProperty("prefix") String prefix) {
|
||||
public GoogleTaskLogsConfig(@JsonProperty("bucket") String bucket, @JsonProperty("prefix") String prefix)
|
||||
{
|
||||
this.bucket = bucket;
|
||||
this.prefix = prefix;
|
||||
}
|
||||
|
||||
public String getBucket() {
|
||||
public String getBucket()
|
||||
{
|
||||
return bucket;
|
||||
}
|
||||
|
||||
public String getPrefix() {
|
||||
public String getPrefix()
|
||||
{
|
||||
return prefix;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -41,7 +41,8 @@ import java.nio.charset.StandardCharsets;
|
|||
import static org.easymock.EasyMock.expect;
|
||||
import static org.easymock.EasyMock.expectLastCall;
|
||||
|
||||
public class GoogleTaskLogsTest extends EasyMockSupport {
|
||||
public class GoogleTaskLogsTest extends EasyMockSupport
|
||||
{
|
||||
private static final String bucket = "test";
|
||||
private static final String prefix = "test/log";
|
||||
private static final String taskid = "taskid";
|
||||
|
@ -50,14 +51,16 @@ public class GoogleTaskLogsTest extends EasyMockSupport {
|
|||
private GoogleTaskLogs googleTaskLogs;
|
||||
|
||||
@Before
|
||||
public void before() {
|
||||
public void before()
|
||||
{
|
||||
storage = createMock(GoogleStorage.class);
|
||||
GoogleTaskLogsConfig config = new GoogleTaskLogsConfig(bucket, prefix);
|
||||
googleTaskLogs = new GoogleTaskLogs(config, storage);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPushTaskLog() throws Exception {
|
||||
public void testPushTaskLog() throws Exception
|
||||
{
|
||||
final File tmpDir = Files.createTempDir();
|
||||
|
||||
try {
|
||||
|
@ -74,13 +77,15 @@ public class GoogleTaskLogsTest extends EasyMockSupport {
|
|||
googleTaskLogs.pushTaskLog(taskid, logFile);
|
||||
|
||||
verifyAll();
|
||||
} finally {
|
||||
}
|
||||
finally {
|
||||
FileUtils.deleteDirectory(tmpDir);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStreamTaskLogWithoutOffset() throws Exception {
|
||||
public void testStreamTaskLogWithoutOffset() throws Exception
|
||||
{
|
||||
final String testLog = "hello this is a log";
|
||||
|
||||
final String logPath = prefix + "/" + taskid;
|
||||
|
@ -100,7 +105,8 @@ public class GoogleTaskLogsTest extends EasyMockSupport {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStreamTaskLogWithPositiveOffset() throws Exception {
|
||||
public void testStreamTaskLogWithPositiveOffset() throws Exception
|
||||
{
|
||||
final String testLog = "hello this is a log";
|
||||
|
||||
final String logPath = prefix + "/" + taskid;
|
||||
|
@ -120,7 +126,8 @@ public class GoogleTaskLogsTest extends EasyMockSupport {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testStreamTaskLogWithNegative() throws Exception {
|
||||
public void testStreamTaskLogWithNegative() throws Exception
|
||||
{
|
||||
final String testLog = "hello this is a log";
|
||||
|
||||
final String logPath = prefix + "/" + taskid;
|
||||
|
|
|
@ -55,7 +55,8 @@ public class GraphiteEmitterModule implements DruidModule
|
|||
@Provides
|
||||
@ManageLifecycle
|
||||
@Named(EMITTER_TYPE)
|
||||
public Emitter getEmitter(GraphiteEmitterConfig graphiteEmitterConfig, ObjectMapper mapper, final Injector injector){
|
||||
public Emitter getEmitter(GraphiteEmitterConfig graphiteEmitterConfig, ObjectMapper mapper, final Injector injector)
|
||||
{
|
||||
List<Emitter> emitters = Lists.transform(
|
||||
graphiteEmitterConfig.getAlertEmitters(),
|
||||
new Function<String, Emitter>()
|
||||
|
|
|
@ -30,7 +30,8 @@ import java.util.List;
|
|||
public class OrcExtensionsModule implements DruidModule
|
||||
{
|
||||
@Override
|
||||
public List<? extends Module> getJacksonModules() {
|
||||
public List<? extends Module> getJacksonModules()
|
||||
{
|
||||
return Collections.singletonList(
|
||||
new SimpleModule("OrcInputRowParserModule")
|
||||
.registerSubtypes(
|
||||
|
@ -40,7 +41,7 @@ public class OrcExtensionsModule implements DruidModule
|
|||
}
|
||||
|
||||
@Override
|
||||
public void configure(Binder binder) {
|
||||
|
||||
public void configure(Binder binder)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
|
|
@ -141,8 +141,7 @@ public class DruidOrcInputFormatTest
|
|||
ListColumnVector listColumnVector = (ListColumnVector) batch.cols[2];
|
||||
listColumnVector.childCount = col2.length;
|
||||
listColumnVector.lengths[0] = 3;
|
||||
for (int idx = 0; idx < col2.length; idx++)
|
||||
{
|
||||
for (int idx = 0; idx < col2.length; idx++) {
|
||||
((BytesColumnVector) listColumnVector.child).setRef(
|
||||
idx,
|
||||
StringUtils.toUtf8(col2[idx]),
|
||||
|
|
|
@ -302,8 +302,7 @@ public class OrcIndexGeneratorJobTest
|
|||
QueryableIndex index = HadoopDruidIndexerConfig.INDEX_IO.loadIndex(dir);
|
||||
QueryableIndexIndexableAdapter adapter = new QueryableIndexIndexableAdapter(index);
|
||||
|
||||
for(Rowboat row: adapter.getRows())
|
||||
{
|
||||
for (Rowboat row: adapter.getRows()) {
|
||||
Object[] metrics = row.getMetrics();
|
||||
|
||||
rowCount++;
|
||||
|
|
|
@ -29,9 +29,11 @@ import io.druid.initialization.DruidModule;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
public class ScanQueryDruidModule implements DruidModule {
|
||||
public class ScanQueryDruidModule implements DruidModule
|
||||
{
|
||||
@Override
|
||||
public void configure(Binder binder) {
|
||||
public void configure(Binder binder)
|
||||
{
|
||||
DruidBinders.queryToolChestBinder(binder)
|
||||
.addBinding(ScanQuery.class)
|
||||
.to(ScanQueryQueryToolChest.class)
|
||||
|
@ -44,7 +46,8 @@ public class ScanQueryDruidModule implements DruidModule {
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<? extends Module> getJacksonModules() {
|
||||
public List<? extends Module> getJacksonModules()
|
||||
{
|
||||
return Arrays.<Module>asList(
|
||||
new SimpleModule("ScanQueryDruidModule")
|
||||
.registerSubtypes(
|
||||
|
|
|
@ -87,8 +87,7 @@ public class ScanQueryEngine
|
|||
allColumns.addAll(query.getColumns());
|
||||
allDims.retainAll(query.getColumns());
|
||||
allMetrics.retainAll(query.getColumns());
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
if (!allDims.contains(ScanResultValue.timestampKey)) {
|
||||
allColumns.add(ScanResultValue.timestampKey);
|
||||
}
|
||||
|
|
|
@ -606,7 +606,8 @@ public class ScanQueryRunnerTest
|
|||
return results;
|
||||
}
|
||||
|
||||
private Iterable<ScanResultValue> compactedListToRow(Iterable<ScanResultValue> results) {
|
||||
private Iterable<ScanResultValue> compactedListToRow(Iterable<ScanResultValue> results)
|
||||
{
|
||||
return Iterables.transform(results, new Function<ScanResultValue, ScanResultValue>()
|
||||
{
|
||||
@Override
|
||||
|
|
|
@ -194,7 +194,8 @@ public class SQLServerConnector extends SQLMetadataConnector
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getQuoteString() {
|
||||
public String getQuoteString()
|
||||
{
|
||||
return QUOTE_STRING;
|
||||
}
|
||||
|
||||
|
|
|
@ -85,8 +85,9 @@ public class CustomStatementRewriterTest
|
|||
// -
|
||||
// https://en.wikipedia.org/wiki/List_of_Unicode_characters
|
||||
Assert.fail("Expected 'UnableToCreateStatementException'");
|
||||
} catch (UnableToCreateStatementException e) {
|
||||
//
|
||||
}
|
||||
catch (UnableToCreateStatementException e) {
|
||||
// expected
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -59,7 +59,8 @@ public class TimestampAggregator implements Aggregator
|
|||
}
|
||||
|
||||
@Override
|
||||
public void aggregate() {
|
||||
public void aggregate()
|
||||
{
|
||||
Long value = TimestampAggregatorFactory.convertLong(timestampSpec, selector.get());
|
||||
|
||||
if (value != null) {
|
||||
|
|
|
@ -22,8 +22,7 @@ package io.druid.query.aggregation;
|
|||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.base.Preconditions;
|
||||
|
||||
import java.util.Comparator;
|
||||
import com.google.common.collect.Ordering;
|
||||
|
||||
public class TimestampMaxAggregatorFactory extends TimestampAggregatorFactory
|
||||
{
|
||||
|
@ -34,12 +33,7 @@ public class TimestampMaxAggregatorFactory extends TimestampAggregatorFactory
|
|||
@JsonProperty("timeFormat") String timeFormat
|
||||
)
|
||||
{
|
||||
super(name, fieldName, timeFormat, new Comparator<Long>() {
|
||||
@Override
|
||||
public int compare(Long o1, Long o2) {
|
||||
return Long.compare(o1, o2);
|
||||
}
|
||||
}, Long.MIN_VALUE);
|
||||
super(name, fieldName, timeFormat, Ordering.natural(), Long.MIN_VALUE);
|
||||
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
|
||||
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName");
|
||||
}
|
||||
|
|
|
@ -22,8 +22,7 @@ package io.druid.query.aggregation;
|
|||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.base.Preconditions;
|
||||
|
||||
import java.util.Comparator;
|
||||
import com.google.common.collect.Ordering;
|
||||
|
||||
public class TimestampMinAggregatorFactory extends TimestampAggregatorFactory
|
||||
{
|
||||
|
@ -34,12 +33,7 @@ public class TimestampMinAggregatorFactory extends TimestampAggregatorFactory
|
|||
@JsonProperty("timeFormat") String timeFormat
|
||||
)
|
||||
{
|
||||
super(name, fieldName, timeFormat, new Comparator<Long>() {
|
||||
@Override
|
||||
public int compare(Long o1, Long o2) {
|
||||
return -(Long.compare(o1, o2));
|
||||
}
|
||||
}, Long.MAX_VALUE);
|
||||
super(name, fieldName, timeFormat, Ordering.natural().reverse(), Long.MAX_VALUE);
|
||||
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
|
||||
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName");
|
||||
}
|
||||
|
|
|
@ -80,12 +80,14 @@ public class SchemaRepoBasedAvroBytesDecoder<SUBJECT, ID> implements AvroBytesDe
|
|||
DatumReader<GenericRecord> reader = new GenericDatumReader<>(schema);
|
||||
try (ByteBufferInputStream inputStream = new ByteBufferInputStream(Collections.singletonList(bytes))) {
|
||||
return reader.read(null, DecoderFactory.get().binaryDecoder(inputStream, null));
|
||||
} catch (EOFException eof) {
|
||||
}
|
||||
catch (EOFException eof) {
|
||||
// waiting for avro v1.9.0 (#AVRO-813)
|
||||
throw new ParseException(
|
||||
eof, "Avro's unnecessary EOFException, detail: [%s]", "https://issues.apache.org/jira/browse/AVRO-813"
|
||||
);
|
||||
} catch (IOException e) {
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new ParseException(e, "Fail to decode avro message!");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -95,7 +95,10 @@ public class AvroStreamInputRowParserTest
|
|||
{
|
||||
@Nullable
|
||||
@Override
|
||||
public Integer apply(@Nullable CharSequence input) { return Integer.parseInt(input.toString()); }
|
||||
public Integer apply(@Nullable CharSequence input)
|
||||
{
|
||||
return Integer.parseInt(input.toString());
|
||||
}
|
||||
}
|
||||
);
|
||||
public static final Map<CharSequence, CharSequence> SOME_STRING_VALUE_MAP_VALUE = Maps.asMap(
|
||||
|
@ -103,16 +106,14 @@ public class AvroStreamInputRowParserTest
|
|||
{
|
||||
@Nullable
|
||||
@Override
|
||||
public CharSequence apply(@Nullable CharSequence input) { return input.toString(); }
|
||||
public CharSequence apply(@Nullable CharSequence input)
|
||||
{
|
||||
return input.toString();
|
||||
}
|
||||
}
|
||||
);
|
||||
public static final String SOME_UNION_VALUE = "string as union";
|
||||
public static final ByteBuffer SOME_BYTES_VALUE = ByteBuffer.allocate(8);
|
||||
private static final Function<Object, String> TO_STRING_INCLUDING_NULL = new Function<Object, String>()
|
||||
{
|
||||
@Override
|
||||
public String apply(Object o) { return String.valueOf(o); }
|
||||
};
|
||||
|
||||
private final ObjectMapper jsonMapper = new ObjectMapper();
|
||||
|
||||
|
@ -195,11 +196,11 @@ public class AvroStreamInputRowParserTest
|
|||
assertEquals(Collections.singletonList(String.valueOf(SOME_OTHER_ID_VALUE)), inputRow.getDimension(SOME_OTHER_ID));
|
||||
assertEquals(Collections.singletonList(String.valueOf(true)), inputRow.getDimension(IS_VALID));
|
||||
assertEquals(
|
||||
Lists.transform(SOME_INT_ARRAY_VALUE, TO_STRING_INCLUDING_NULL),
|
||||
Lists.transform(SOME_INT_ARRAY_VALUE, String::valueOf),
|
||||
inputRow.getDimension("someIntArray")
|
||||
);
|
||||
assertEquals(
|
||||
Lists.transform(SOME_STRING_ARRAY_VALUE, TO_STRING_INCLUDING_NULL),
|
||||
Lists.transform(SOME_STRING_ARRAY_VALUE, String::valueOf),
|
||||
inputRow.getDimension("someStringArray")
|
||||
);
|
||||
// towards Map avro field as druid dimension, need to convert its toString() back to HashMap to check equality
|
||||
|
|
|
@ -113,7 +113,8 @@ public class DruidKerberosUtil
|
|||
}
|
||||
}
|
||||
|
||||
public static boolean needToSendCredentials(CookieStore cookieStore, URI uri){
|
||||
public static boolean needToSendCredentials(CookieStore cookieStore, URI uri)
|
||||
{
|
||||
return getAuthCookie(cookieStore, uri) == null;
|
||||
}
|
||||
|
||||
|
|
|
@ -31,7 +31,8 @@ public class HdfsKerberosConfig
|
|||
private final String keytab;
|
||||
|
||||
@JsonCreator
|
||||
public HdfsKerberosConfig(@JsonProperty("principal") String principal,@JsonProperty("keytab") String keytab) {
|
||||
public HdfsKerberosConfig(@JsonProperty("principal") String principal, @JsonProperty("keytab") String keytab)
|
||||
{
|
||||
this.principal = principal;
|
||||
this.keytab = keytab;
|
||||
}
|
||||
|
|
|
@ -245,8 +245,7 @@ public class HdfsDataSegmentFinderTest
|
|||
updatedSegment4_1 = dataSegment;
|
||||
} else if (dataSegment.getIdentifier().equals(SEGMENT_5.getIdentifier())) {
|
||||
updatedSegment5 = dataSegment;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
Assert.fail("Unexpected segment");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -191,17 +191,35 @@ public class ApproximateHistogram
|
|||
);
|
||||
}
|
||||
|
||||
public long count() { return count; }
|
||||
public long count()
|
||||
{
|
||||
return count;
|
||||
}
|
||||
|
||||
public float min() { return min; }
|
||||
public float min()
|
||||
{
|
||||
return min;
|
||||
}
|
||||
|
||||
public float max() { return max; }
|
||||
public float max()
|
||||
{
|
||||
return max;
|
||||
}
|
||||
|
||||
public int binCount() { return binCount; }
|
||||
public int binCount()
|
||||
{
|
||||
return binCount;
|
||||
}
|
||||
|
||||
public int capacity() { return size; }
|
||||
public int capacity()
|
||||
{
|
||||
return size;
|
||||
}
|
||||
|
||||
public float[] positions() { return Arrays.copyOfRange(positions, 0, binCount); }
|
||||
public float[] positions()
|
||||
{
|
||||
return Arrays.copyOfRange(positions, 0, binCount);
|
||||
}
|
||||
|
||||
public long[] bins()
|
||||
{
|
||||
|
@ -239,9 +257,15 @@ public class ApproximateHistogram
|
|||
return exactCount;
|
||||
}
|
||||
|
||||
public float getMin() { return this.min;}
|
||||
public float getMin()
|
||||
{
|
||||
return this.min;
|
||||
}
|
||||
|
||||
public float getMax() { return this.max;}
|
||||
public float getMax()
|
||||
{
|
||||
return this.max;
|
||||
}
|
||||
|
||||
private static long sumBins(long[] bins, int binCount)
|
||||
{
|
||||
|
@ -1491,10 +1515,8 @@ public class ApproximateHistogram
|
|||
// add full bin count if left bin count is exact
|
||||
if (exact0) {
|
||||
return (s + m0);
|
||||
}
|
||||
|
||||
// otherwise add only the left half of the bin
|
||||
else {
|
||||
} else {
|
||||
// otherwise add only the left half of the bin
|
||||
return (s + 0.5 * m0);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -578,7 +578,9 @@ public class ApproximateHistogramTest
|
|||
);
|
||||
}
|
||||
|
||||
@Test public void testEmptyHistogram() {
|
||||
@Test
|
||||
public void testEmptyHistogram()
|
||||
{
|
||||
ApproximateHistogram h = new ApproximateHistogram(50);
|
||||
Assert.assertArrayEquals(
|
||||
new float[]{Float.NaN, Float.NaN},
|
||||
|
|
|
@ -28,7 +28,10 @@ public class KafkaLookupExtractorIntrospectionHandler implements LookupIntrospec
|
|||
{
|
||||
private KafkaLookupExtractorFactory kafkaLookupExtractorFactory;
|
||||
|
||||
public KafkaLookupExtractorIntrospectionHandler(KafkaLookupExtractorFactory kafkaLookupExtractorFactory) {this.kafkaLookupExtractorFactory = kafkaLookupExtractorFactory;}
|
||||
public KafkaLookupExtractorIntrospectionHandler(KafkaLookupExtractorFactory kafkaLookupExtractorFactory)
|
||||
{
|
||||
this.kafkaLookupExtractorFactory = kafkaLookupExtractorFactory;
|
||||
}
|
||||
|
||||
@GET
|
||||
public Response getActive()
|
||||
|
|
|
@ -299,7 +299,8 @@ public class KafkaLookupExtractorFactoryTest
|
|||
EasyMock.expectLastCall().andAnswer(new IAnswer<Object>()
|
||||
{
|
||||
@Override
|
||||
public Object answer() throws Throwable {
|
||||
public Object answer() throws Throwable
|
||||
{
|
||||
threadWasInterrupted.set(Thread.currentThread().isInterrupted());
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -982,6 +982,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
|
|||
}
|
||||
|
||||
@Override
|
||||
void checkConnection(String host, int port) throws IOException { }
|
||||
void checkConnection(String host, int port) throws IOException
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -137,8 +137,8 @@ public final class CacheScheduler
|
|||
* that would be a leak preventing the Entry to be collected by GC, and therefore {@link #entryCleaner} to be run by
|
||||
* the JVM. Also, {@link #entryCleaner} must not reference the Entry through it's Runnable hunk.
|
||||
*/
|
||||
public class EntryImpl<T extends ExtractionNamespace> implements AutoCloseable {
|
||||
|
||||
public class EntryImpl<T extends ExtractionNamespace> implements AutoCloseable
|
||||
{
|
||||
private final T namespace;
|
||||
private final String asString;
|
||||
private final AtomicReference<CacheState> cacheStateHolder = new AtomicReference<CacheState>(NoCache.CACHE_NOT_INITIALIZED);
|
||||
|
|
|
@ -45,7 +45,8 @@ public abstract class NamespaceExtractionCacheManager
|
|||
|
||||
private final ScheduledThreadPoolExecutor scheduledExecutorService;
|
||||
|
||||
public NamespaceExtractionCacheManager(final Lifecycle lifecycle, final ServiceEmitter serviceEmitter) {
|
||||
public NamespaceExtractionCacheManager(final Lifecycle lifecycle, final ServiceEmitter serviceEmitter)
|
||||
{
|
||||
this.scheduledExecutorService = new ScheduledThreadPoolExecutor(
|
||||
1,
|
||||
new ThreadFactoryBuilder()
|
||||
|
|
|
@ -70,7 +70,7 @@ public class LoadingLookup extends LookupExtractor
|
|||
}
|
||||
final String presentVal;
|
||||
try {
|
||||
presentVal = loadingCache.get(key, new applyCallable(key));
|
||||
presentVal = loadingCache.get(key, new ApplyCallable(key));
|
||||
return Strings.emptyToNull(presentVal);
|
||||
}
|
||||
catch (ExecutionException e) {
|
||||
|
@ -88,7 +88,7 @@ public class LoadingLookup extends LookupExtractor
|
|||
}
|
||||
final List<String> retList;
|
||||
try {
|
||||
retList = reverseLoadingCache.get(value, new unapplyCallable(value));
|
||||
retList = reverseLoadingCache.get(value, new UnapplyCallable(value));
|
||||
return retList;
|
||||
}
|
||||
catch (ExecutionException e) {
|
||||
|
@ -120,11 +120,14 @@ public class LoadingLookup extends LookupExtractor
|
|||
return LookupExtractionModule.getRandomCacheKey();
|
||||
}
|
||||
|
||||
private class applyCallable implements Callable<String>
|
||||
private class ApplyCallable implements Callable<String>
|
||||
{
|
||||
private final String key;
|
||||
|
||||
public applyCallable(String key) {this.key = key;}
|
||||
public ApplyCallable(String key)
|
||||
{
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String call() throws Exception
|
||||
|
@ -134,11 +137,14 @@ public class LoadingLookup extends LookupExtractor
|
|||
}
|
||||
}
|
||||
|
||||
private class unapplyCallable implements Callable<List<String>>
|
||||
private class UnapplyCallable implements Callable<List<String>>
|
||||
{
|
||||
private final String value;
|
||||
|
||||
public unapplyCallable(String value) {this.value = value;}
|
||||
public UnapplyCallable(String value)
|
||||
{
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> call() throws Exception
|
||||
|
|
|
@ -215,7 +215,10 @@ public class PollingLookup extends LookupExtractor
|
|||
private final PollingCache pollingCache;
|
||||
private final AtomicLong refCounts = new AtomicLong(0L);
|
||||
|
||||
CacheRefKeeper(PollingCache pollingCache) {this.pollingCache = pollingCache;}
|
||||
CacheRefKeeper(PollingCache pollingCache)
|
||||
{
|
||||
this.pollingCache = pollingCache;
|
||||
}
|
||||
|
||||
PollingCache getAndIncrementRef()
|
||||
{
|
||||
|
|
|
@ -63,7 +63,8 @@ public class PollingLookupFactoryTest
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testGet(){
|
||||
public void testGet()
|
||||
{
|
||||
Assert.assertEquals(pollingLookup, pollingLookupFactory.get());
|
||||
}
|
||||
|
||||
|
|
|
@ -52,9 +52,14 @@ public class PollingLookupSerDeserTest
|
|||
});
|
||||
}
|
||||
|
||||
public PollingLookupSerDeserTest(PollingCacheFactory cacheFactory) {this.cacheFactory = cacheFactory;}
|
||||
private final PollingCacheFactory cacheFactory ;
|
||||
private final PollingCacheFactory cacheFactory;
|
||||
private DataFetcher dataFetcher = new MockDataFetcher();
|
||||
|
||||
public PollingLookupSerDeserTest(PollingCacheFactory cacheFactory)
|
||||
{
|
||||
this.cacheFactory = cacheFactory;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSerDeser() throws IOException
|
||||
{
|
||||
|
|
|
@ -69,7 +69,10 @@ public class PollingLookupTest
|
|||
@Override
|
||||
public Iterable fetchAll()
|
||||
{
|
||||
if (callNumber == 0) {callNumber +=1; return firstLookupMap.entrySet();}
|
||||
if (callNumber == 0) {
|
||||
callNumber++;
|
||||
return firstLookupMap.entrySet();
|
||||
}
|
||||
return secondLookupMap.entrySet();
|
||||
}
|
||||
|
||||
|
@ -108,12 +111,14 @@ public class PollingLookupTest
|
|||
});
|
||||
}
|
||||
|
||||
public PollingLookupTest(PollingCacheFactory pollingCacheFactory) {this.pollingCacheFactory = pollingCacheFactory;}
|
||||
|
||||
private final PollingCacheFactory pollingCacheFactory;
|
||||
private final DataFetcher dataFetcher = new MockDataFetcher();
|
||||
private PollingLookup pollingLookup;
|
||||
|
||||
public PollingLookupTest(PollingCacheFactory pollingCacheFactory)
|
||||
{
|
||||
this.pollingCacheFactory = pollingCacheFactory;
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() throws InterruptedException
|
||||
|
|
|
@ -59,7 +59,10 @@ public class LoadingCacheTest
|
|||
|
||||
private final LoadingCache loadingCache;
|
||||
|
||||
public LoadingCacheTest(LoadingCache loadingCache) {this.loadingCache = loadingCache;}
|
||||
public LoadingCacheTest(LoadingCache loadingCache)
|
||||
{
|
||||
this.loadingCache = loadingCache;
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() throws InterruptedException
|
||||
|
|
|
@ -79,7 +79,8 @@ public class MySQLConnector extends SQLMetadataConnector
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getQuoteString() {
|
||||
public String getQuoteString()
|
||||
{
|
||||
return QUOTE_STRING;
|
||||
}
|
||||
|
||||
|
@ -153,5 +154,8 @@ public class MySQLConnector extends SQLMetadataConnector
|
|||
}
|
||||
|
||||
@Override
|
||||
public DBI getDBI() { return dbi; }
|
||||
public DBI getDBI()
|
||||
{
|
||||
return dbi;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -65,7 +65,8 @@ public class PostgreSQLConnector extends SQLMetadataConnector
|
|||
}
|
||||
|
||||
@Override
|
||||
protected String getPayloadType() {
|
||||
protected String getPayloadType()
|
||||
{
|
||||
return PAYLOAD_TYPE;
|
||||
}
|
||||
|
||||
|
@ -76,7 +77,8 @@ public class PostgreSQLConnector extends SQLMetadataConnector
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getQuoteString() {
|
||||
public String getQuoteString()
|
||||
{
|
||||
return QUOTE_STRING;
|
||||
}
|
||||
|
||||
|
@ -161,7 +163,10 @@ public class PostgreSQLConnector extends SQLMetadataConnector
|
|||
}
|
||||
|
||||
@Override
|
||||
public DBI getDBI() { return dbi; }
|
||||
public DBI getDBI()
|
||||
{
|
||||
return dbi;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean connectorIsTransientException(Throwable e)
|
||||
|
|
|
@ -819,7 +819,10 @@ public final class ProtoTestEventWrapper
|
|||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public Builder newBuilderForType()
|
||||
{
|
||||
return newBuilder();
|
||||
}
|
||||
|
||||
public static Builder newBuilder()
|
||||
{
|
||||
|
@ -1848,7 +1851,10 @@ public final class ProtoTestEventWrapper
|
|||
.parseWithIOException(PARSER, input, extensionRegistry);
|
||||
}
|
||||
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public Builder newBuilderForType()
|
||||
{
|
||||
return newBuilder();
|
||||
}
|
||||
|
||||
public static Builder newBuilder()
|
||||
{
|
||||
|
|
|
@ -22,12 +22,14 @@ package io.druid.storage.s3;
|
|||
import com.amazonaws.auth.AWSCredentialsProvider;
|
||||
import org.jets3t.service.security.AWSSessionCredentials;
|
||||
|
||||
public class AWSSessionCredentialsAdapter extends AWSSessionCredentials {
|
||||
public class AWSSessionCredentialsAdapter extends AWSSessionCredentials
|
||||
{
|
||||
private final AWSCredentialsProvider provider;
|
||||
|
||||
public AWSSessionCredentialsAdapter(AWSCredentialsProvider provider) {
|
||||
public AWSSessionCredentialsAdapter(AWSCredentialsProvider provider)
|
||||
{
|
||||
super(null, null, null);
|
||||
if(provider.getCredentials() instanceof com.amazonaws.auth.AWSSessionCredentials) {
|
||||
if (provider.getCredentials() instanceof com.amazonaws.auth.AWSSessionCredentials) {
|
||||
this.provider = provider;
|
||||
} else {
|
||||
throw new IllegalArgumentException("provider does not contain session credentials");
|
||||
|
@ -35,27 +37,32 @@ public class AWSSessionCredentialsAdapter extends AWSSessionCredentials {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected String getTypeName() {
|
||||
protected String getTypeName()
|
||||
{
|
||||
return "AWSSessionCredentialsAdapter";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getVersionPrefix() {
|
||||
public String getVersionPrefix()
|
||||
{
|
||||
return "AWSSessionCredentialsAdapter, version: ";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAccessKey() {
|
||||
public String getAccessKey()
|
||||
{
|
||||
return provider.getCredentials().getAWSAccessKeyId();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSecretKey() {
|
||||
public String getSecretKey()
|
||||
{
|
||||
return provider.getCredentials().getAWSSecretKey();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSessionToken() {
|
||||
public String getSessionToken()
|
||||
{
|
||||
com.amazonaws.auth.AWSSessionCredentials sessionCredentials =
|
||||
(com.amazonaws.auth.AWSSessionCredentials) provider.getCredentials();
|
||||
return sessionCredentials.getSessionToken();
|
||||
|
|
|
@ -113,11 +113,14 @@ public class S3DataSegmentFinder implements DataSegmentFinder
|
|||
}
|
||||
}
|
||||
}
|
||||
} catch (ServiceException e) {
|
||||
}
|
||||
catch (ServiceException e) {
|
||||
throw new SegmentLoadingException(e, "Problem interacting with S3");
|
||||
} catch (IOException e) {
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new SegmentLoadingException(e, "IO exception");
|
||||
} catch (Exception e) {
|
||||
}
|
||||
catch (Exception e) {
|
||||
Throwables.propagateIfInstanceOf(e, SegmentLoadingException.class);
|
||||
Throwables.propagate(e);
|
||||
}
|
||||
|
|
|
@ -296,7 +296,8 @@ public class S3DataSegmentFinderTest
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testFindSegmentsUpdateLoadSpec() throws Exception {
|
||||
public void testFindSegmentsUpdateLoadSpec() throws Exception
|
||||
{
|
||||
config.setBucket("amazing");
|
||||
final DataSegment segmentMissingLoadSpec = DataSegment.builder(SEGMENT_1)
|
||||
.loadSpec(ImmutableMap.of())
|
||||
|
|
|
@ -158,7 +158,8 @@ public class S3DataSegmentMoverTest
|
|||
), ImmutableMap.<String, Object>of("bucket", "DOES NOT EXIST", "baseKey", "baseKey2"));
|
||||
}
|
||||
|
||||
private static class MockStorageService extends RestS3Service {
|
||||
private static class MockStorageService extends RestS3Service
|
||||
{
|
||||
Map<String, Set<String>> storage = Maps.newHashMap();
|
||||
boolean moved = false;
|
||||
|
||||
|
@ -167,7 +168,8 @@ public class S3DataSegmentMoverTest
|
|||
super(null);
|
||||
}
|
||||
|
||||
public boolean didMove() {
|
||||
public boolean didMove()
|
||||
{
|
||||
return moved;
|
||||
}
|
||||
|
||||
|
@ -187,7 +189,8 @@ public class S3DataSegmentMoverTest
|
|||
object.setStorageClass(S3Object.STORAGE_CLASS_STANDARD);
|
||||
return new S3Object[]{object};
|
||||
}
|
||||
} catch (ServiceException e) {
|
||||
}
|
||||
catch (ServiceException e) {
|
||||
// return empty list
|
||||
}
|
||||
return new S3Object[]{};
|
||||
|
|
|
@ -44,14 +44,17 @@ import java.io.File;
|
|||
*/
|
||||
public class S3DataSegmentPusherTest
|
||||
{
|
||||
private static class ValueContainer<T> {
|
||||
private static class ValueContainer<T>
|
||||
{
|
||||
private T value;
|
||||
|
||||
public T getValue() {
|
||||
public T getValue()
|
||||
{
|
||||
return value;
|
||||
}
|
||||
|
||||
public void setValue(T value) {
|
||||
public void setValue(T value)
|
||||
{
|
||||
this.value = value;
|
||||
}
|
||||
}
|
||||
|
@ -68,9 +71,11 @@ public class S3DataSegmentPusherTest
|
|||
ValueContainer<String> capturedS3SegmentJson = new ValueContainer<>();
|
||||
EasyMock.expect(s3Client.putObject(EasyMock.anyString(), EasyMock.capture(capturedS3Object)))
|
||||
.andAnswer(
|
||||
new IAnswer<S3Object>() {
|
||||
new IAnswer<S3Object>()
|
||||
{
|
||||
@Override
|
||||
public S3Object answer() throws Throwable {
|
||||
public S3Object answer() throws Throwable
|
||||
{
|
||||
capturedS3SegmentJson.setValue(
|
||||
IOUtils.toString(capturedS3Object.getValue().getDataInputStream(), "utf-8")
|
||||
);
|
||||
|
|
|
@ -33,12 +33,14 @@ import java.nio.charset.StandardCharsets;
|
|||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class TestFileSessionCredentialsProvider {
|
||||
public class TestFileSessionCredentialsProvider
|
||||
{
|
||||
@Rule
|
||||
public TemporaryFolder folder = new TemporaryFolder();
|
||||
|
||||
@Test
|
||||
public void test() throws IOException {
|
||||
public void test() throws IOException
|
||||
{
|
||||
File file = folder.newFile();
|
||||
try (BufferedWriter out = Files.newWriter(file, StandardCharsets.UTF_8)) {
|
||||
out.write("sessionToken=sessionTokenSample\nsecretKey=secretKeySample\naccessKey=accessKeySample\n");
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue