mirror of https://github.com/apache/druid.git
Enforce modifier order with Checkstyle (#5246)
This commit is contained in:
parent
3cc4a0ab19
commit
8877ce38d6
|
@ -32,7 +32,7 @@ import java.util.Map;
|
|||
*/
|
||||
public class NoneShardSpec implements ShardSpec
|
||||
{
|
||||
private final static NoneShardSpec INSTANCE = new NoneShardSpec();
|
||||
private static final NoneShardSpec INSTANCE = new NoneShardSpec();
|
||||
|
||||
@JsonCreator
|
||||
public static NoneShardSpec instance()
|
||||
|
|
|
@ -312,7 +312,7 @@ public class ConditionalMultibindTest
|
|||
Assert.assertEquals(ImmutableSet.of(zoo1, zoo2), actualZooSet);
|
||||
}
|
||||
|
||||
static abstract class Animal
|
||||
abstract static class Animal
|
||||
{
|
||||
private final String type;
|
||||
|
||||
|
|
|
@ -50,8 +50,8 @@ import java.util.Set;
|
|||
*/
|
||||
public class DataSegmentTest
|
||||
{
|
||||
private final static ObjectMapper mapper = new TestObjectMapper();
|
||||
private final static int TEST_VERSION = 0x7;
|
||||
private static final ObjectMapper mapper = new TestObjectMapper();
|
||||
private static final int TEST_VERSION = 0x7;
|
||||
|
||||
private static ShardSpec getShardSpec(final int partitionNum)
|
||||
{
|
||||
|
|
|
@ -40,7 +40,7 @@ import java.util.concurrent.TimeUnit;
|
|||
@State(Scope.Benchmark)
|
||||
public class CostBalancerStrategyBenchmark
|
||||
{
|
||||
private final static DateTime t0 = DateTimes.of("2016-01-01T01:00:00Z");
|
||||
private static final DateTime t0 = DateTimes.of("2016-01-01T01:00:00Z");
|
||||
|
||||
private List<DataSegment> segments;
|
||||
private DataSegment segment;
|
||||
|
|
|
@ -45,15 +45,15 @@ public class BitmapBenchmark
|
|||
{
|
||||
public static final int LENGTH = 500_000;
|
||||
public static final int SIZE = 10_000;
|
||||
final static ImmutableConciseSet concise[] = new ImmutableConciseSet[SIZE];
|
||||
final static ImmutableConciseSet offheapConcise[] = new ImmutableConciseSet[SIZE];
|
||||
final static ImmutableRoaringBitmap roaring[] = new ImmutableRoaringBitmap[SIZE];
|
||||
final static ImmutableRoaringBitmap immutableRoaring[] = new ImmutableRoaringBitmap[SIZE];
|
||||
final static ImmutableRoaringBitmap offheapRoaring[] = new ImmutableRoaringBitmap[SIZE];
|
||||
final static ImmutableBitmap genericConcise[] = new ImmutableBitmap[SIZE];
|
||||
final static ImmutableBitmap genericRoaring[] = new ImmutableBitmap[SIZE];
|
||||
final static ConciseBitmapFactory conciseFactory = new ConciseBitmapFactory();
|
||||
final static RoaringBitmapFactory roaringFactory = new RoaringBitmapFactory();
|
||||
static final ImmutableConciseSet concise[] = new ImmutableConciseSet[SIZE];
|
||||
static final ImmutableConciseSet offheapConcise[] = new ImmutableConciseSet[SIZE];
|
||||
static final ImmutableRoaringBitmap roaring[] = new ImmutableRoaringBitmap[SIZE];
|
||||
static final ImmutableRoaringBitmap immutableRoaring[] = new ImmutableRoaringBitmap[SIZE];
|
||||
static final ImmutableRoaringBitmap offheapRoaring[] = new ImmutableRoaringBitmap[SIZE];
|
||||
static final ImmutableBitmap genericConcise[] = new ImmutableBitmap[SIZE];
|
||||
static final ImmutableBitmap genericRoaring[] = new ImmutableBitmap[SIZE];
|
||||
static final ConciseBitmapFactory conciseFactory = new ConciseBitmapFactory();
|
||||
static final RoaringBitmapFactory roaringFactory = new RoaringBitmapFactory();
|
||||
static Random rand = new Random(0);
|
||||
static long totalConciseBytes = 0;
|
||||
static long totalRoaringBytes = 0;
|
||||
|
|
|
@ -99,6 +99,8 @@
|
|||
<property name="caseIndent" value="2"/>
|
||||
</module>
|
||||
|
||||
<module name="ModifierOrder" />
|
||||
|
||||
<module name="Regexp">
|
||||
<property name="format" value="com\.google\.common\.io\.Closer"/>
|
||||
<property name="illegalPattern" value="true"/>
|
||||
|
|
|
@ -119,7 +119,7 @@ public abstract class ExprEval<T>
|
|||
|
||||
public abstract Expr toExpr();
|
||||
|
||||
private static abstract class NumericExprEval extends ExprEval<Number>
|
||||
private abstract static class NumericExprEval extends ExprEval<Number>
|
||||
{
|
||||
|
||||
private NumericExprEval(Number value)
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.io.UnsupportedEncodingException;
|
|||
public class StringUtilsTest
|
||||
{
|
||||
// copied from https://github.com/druid-io/druid/pull/2612
|
||||
public final static String[] TEST_STRINGS = new String[]{
|
||||
public static final String[] TEST_STRINGS = new String[]{
|
||||
"peach", "péché", "pêche", "sin", "",
|
||||
"☃", "C", "c", "Ç", "ç", "G", "g", "Ğ", "ğ", "I", "ı", "İ", "i",
|
||||
"O", "o", "Ö", "ö", "S", "s", "Ş", "ş", "U", "u", "Ü", "ü", "ä",
|
||||
|
|
|
@ -22,37 +22,37 @@ public class ConciseSetUtils
|
|||
* <tt>31 * (1 << 25)</tt>, followed by a literal with 30 0's and the
|
||||
* MSB (31<sup>st</sup> bit) equal to 1
|
||||
*/
|
||||
public final static int MAX_ALLOWED_INTEGER = 31 * (1 << 25) + 30; // 1040187422
|
||||
public static final int MAX_ALLOWED_INTEGER = 31 * (1 << 25) + 30; // 1040187422
|
||||
|
||||
/**
|
||||
* The lowest representable integer.
|
||||
*/
|
||||
public final static int MIN_ALLOWED_SET_BIT = 0;
|
||||
public static final int MIN_ALLOWED_SET_BIT = 0;
|
||||
|
||||
/**
|
||||
* Maximum number of representable bits within a literal
|
||||
*/
|
||||
public final static int MAX_LITERAL_LENGTH = 31;
|
||||
public static final int MAX_LITERAL_LENGTH = 31;
|
||||
|
||||
/**
|
||||
* Literal that represents all bits set to 1 (and MSB = 1)
|
||||
*/
|
||||
public final static int ALL_ONES_LITERAL = 0xFFFFFFFF;
|
||||
public static final int ALL_ONES_LITERAL = 0xFFFFFFFF;
|
||||
|
||||
/**
|
||||
* Literal that represents all bits set to 0 (and MSB = 1)
|
||||
*/
|
||||
public final static int ALL_ZEROS_LITERAL = 0x80000000;
|
||||
public static final int ALL_ZEROS_LITERAL = 0x80000000;
|
||||
|
||||
/**
|
||||
* All bits set to 1 and MSB = 0
|
||||
*/
|
||||
public final static int ALL_ONES_WITHOUT_MSB = 0x7FFFFFFF;
|
||||
public static final int ALL_ONES_WITHOUT_MSB = 0x7FFFFFFF;
|
||||
|
||||
/**
|
||||
* Sequence bit
|
||||
*/
|
||||
public final static int SEQUENCE_BIT = 0x40000000;
|
||||
public static final int SEQUENCE_BIT = 0x40000000;
|
||||
|
||||
/**
|
||||
* Calculates the modulus division by 31 in a faster way than using <code>n % 31</code>
|
||||
|
|
|
@ -34,7 +34,7 @@ import java.util.PriorityQueue;
|
|||
|
||||
public class ImmutableConciseSet
|
||||
{
|
||||
private final static int CHUNK_SIZE = 10000;
|
||||
private static final int CHUNK_SIZE = 10000;
|
||||
|
||||
private static final Comparator<WordIterator> UNION_COMPARATOR = new Comparator<WordIterator>()
|
||||
{
|
||||
|
|
|
@ -29,10 +29,10 @@ import java.util.List;
|
|||
|
||||
public class AmbariMetricsEmitterConfig
|
||||
{
|
||||
private final static int DEFAULT_BATCH_SIZE = 100;
|
||||
private final static Long DEFAULT_FLUSH_PERIOD_MILLIS = (long) (60 * 1000); // flush every one minute
|
||||
private final static long DEFAULT_GET_TIMEOUT = 1000; // default wait for get operations on the queue 1 sec
|
||||
private final static String DEFAULT_PROTOCOL = "http";
|
||||
private static final int DEFAULT_BATCH_SIZE = 100;
|
||||
private static final Long DEFAULT_FLUSH_PERIOD_MILLIS = (long) (60 * 1000); // flush every one minute
|
||||
private static final long DEFAULT_GET_TIMEOUT = 1000; // default wait for get operations on the queue 1 sec
|
||||
private static final String DEFAULT_PROTOCOL = "http";
|
||||
|
||||
@JsonProperty
|
||||
private final String hostname;
|
||||
|
|
|
@ -37,8 +37,8 @@ import org.junit.runner.RunWith;
|
|||
@RunWith(JUnitParamsRunner.class)
|
||||
public class WhiteListBasedDruidToTimelineEventConverterTest
|
||||
{
|
||||
final private String prefix = "druid";
|
||||
final private WhiteListBasedDruidToTimelineEventConverter defaultWhiteListBasedDruidToTimelineEventConverter = new WhiteListBasedDruidToTimelineEventConverter(
|
||||
private final String prefix = "druid";
|
||||
private final WhiteListBasedDruidToTimelineEventConverter defaultWhiteListBasedDruidToTimelineEventConverter = new WhiteListBasedDruidToTimelineEventConverter(
|
||||
prefix,
|
||||
"druid",
|
||||
null,
|
||||
|
|
|
@ -30,9 +30,9 @@ import java.net.URISyntaxException;
|
|||
public class AzureByteSource extends ByteSource
|
||||
{
|
||||
|
||||
final private AzureStorage azureStorage;
|
||||
final private String containerName;
|
||||
final private String blobPath;
|
||||
private final AzureStorage azureStorage;
|
||||
private final String containerName;
|
||||
private final String blobPath;
|
||||
|
||||
public AzureByteSource(
|
||||
AzureStorage azureStorage,
|
||||
|
|
|
@ -29,8 +29,8 @@ import java.io.InputStream;
|
|||
public class CloudFilesByteSource extends ByteSource
|
||||
{
|
||||
|
||||
final private CloudFilesObjectApiProxy objectApi;
|
||||
final private String path;
|
||||
private final CloudFilesObjectApiProxy objectApi;
|
||||
private final String path;
|
||||
private Payload payload;
|
||||
|
||||
public CloudFilesByteSource(CloudFilesObjectApiProxy objectApi, String path)
|
||||
|
|
|
@ -29,36 +29,36 @@ import java.util.List;
|
|||
|
||||
public class GraphiteEmitterConfig
|
||||
{
|
||||
public final static String PLAINTEXT_PROTOCOL = "plaintext";
|
||||
public final static String PICKLE_PROTOCOL = "pickle";
|
||||
private final static int DEFAULT_BATCH_SIZE = 100;
|
||||
public static final String PLAINTEXT_PROTOCOL = "plaintext";
|
||||
public static final String PICKLE_PROTOCOL = "pickle";
|
||||
private static final int DEFAULT_BATCH_SIZE = 100;
|
||||
private static final Long DEFAULT_FLUSH_PERIOD = (long) (60 * 1000); // flush every one minute
|
||||
private final static long DEFAULT_GET_TIMEOUT = 1000; // default wait for get operations on the queue 1 sec
|
||||
private static final long DEFAULT_GET_TIMEOUT = 1000; // default wait for get operations on the queue 1 sec
|
||||
|
||||
@JsonProperty
|
||||
final private String hostname;
|
||||
private final String hostname;
|
||||
@JsonProperty
|
||||
final private int port;
|
||||
private final int port;
|
||||
@JsonProperty
|
||||
final private int batchSize;
|
||||
private final int batchSize;
|
||||
@JsonProperty
|
||||
final private String protocol;
|
||||
private final String protocol;
|
||||
@JsonProperty
|
||||
final private Long flushPeriod;
|
||||
private final Long flushPeriod;
|
||||
@JsonProperty
|
||||
final private Integer maxQueueSize;
|
||||
private final Integer maxQueueSize;
|
||||
@JsonProperty("eventConverter")
|
||||
final private DruidToGraphiteEventConverter druidToGraphiteEventConverter;
|
||||
private final DruidToGraphiteEventConverter druidToGraphiteEventConverter;
|
||||
@JsonProperty
|
||||
final private List<String> alertEmitters;
|
||||
private final List<String> alertEmitters;
|
||||
@JsonProperty
|
||||
final private List<String> requestLogEmitters;
|
||||
private final List<String> requestLogEmitters;
|
||||
|
||||
@JsonProperty
|
||||
final private Long emitWaitTime;
|
||||
private final Long emitWaitTime;
|
||||
//waiting up to the specified wait time if necessary for an event to become available.
|
||||
@JsonProperty
|
||||
final private Long waitForEventTime;
|
||||
private final Long waitForEventTime;
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o)
|
||||
|
|
|
@ -36,8 +36,8 @@ import org.junit.runner.RunWith;
|
|||
@RunWith(JUnitParamsRunner.class)
|
||||
public class WhiteListBasedConverterTest
|
||||
{
|
||||
final private String prefix = "druid";
|
||||
final private WhiteListBasedConverter defaultWhiteListBasedConverter = new WhiteListBasedConverter(
|
||||
private final String prefix = "druid";
|
||||
private final WhiteListBasedConverter defaultWhiteListBasedConverter = new WhiteListBasedConverter(
|
||||
prefix,
|
||||
false,
|
||||
false,
|
||||
|
|
|
@ -50,7 +50,7 @@ public class KafkaEmitter implements Emitter
|
|||
{
|
||||
private static Logger log = new Logger(KafkaEmitter.class);
|
||||
|
||||
private final static int DEFAULT_RETRIES = 3;
|
||||
private static final int DEFAULT_RETRIES = 3;
|
||||
private final AtomicLong metricLost;
|
||||
private final AtomicLong alertLost;
|
||||
private final AtomicLong invalidLost;
|
||||
|
|
|
@ -32,13 +32,13 @@ public class KafkaEmitterConfig
|
|||
{
|
||||
|
||||
@JsonProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)
|
||||
final private String bootstrapServers;
|
||||
private final String bootstrapServers;
|
||||
@JsonProperty("metric.topic")
|
||||
final private String metricTopic;
|
||||
private final String metricTopic;
|
||||
@JsonProperty("alert.topic")
|
||||
final private String alertTopic;
|
||||
private final String alertTopic;
|
||||
@JsonProperty
|
||||
final private String clusterName;
|
||||
private final String clusterName;
|
||||
@JsonProperty("producer.config")
|
||||
private Map<String, String> kafkaProducerConfig;
|
||||
|
||||
|
|
|
@ -85,7 +85,7 @@ import java.util.zip.ZipInputStream;
|
|||
|
||||
public class OrcIndexGeneratorJobTest
|
||||
{
|
||||
static private final AggregatorFactory[] aggs = {
|
||||
private static final AggregatorFactory[] aggs = {
|
||||
new LongSumAggregatorFactory("visited_num", "visited_num"),
|
||||
new HyperUniquesAggregatorFactory("unique_hosts", "host")
|
||||
};
|
||||
|
|
|
@ -38,7 +38,7 @@ import java.util.Map;
|
|||
public class DimensionConverter
|
||||
{
|
||||
|
||||
private final static Logger log = new Logger(DimensionConverter.class);
|
||||
private static final Logger log = new Logger(DimensionConverter.class);
|
||||
private Map<String, StatsDMetric> metricMap;
|
||||
|
||||
public DimensionConverter(ObjectMapper mapper, String dimensionMapPath)
|
||||
|
|
|
@ -38,10 +38,10 @@ import java.util.Map;
|
|||
public class StatsDEmitter implements Emitter
|
||||
{
|
||||
|
||||
private final static Logger log = new Logger(StatsDEmitter.class);
|
||||
private final static String DRUID_METRIC_SEPARATOR = "\\/";
|
||||
private final static String STATSD_SEPARATOR = ":|\\|";
|
||||
private final static String BLANK = "\\s+";
|
||||
private static final Logger log = new Logger(StatsDEmitter.class);
|
||||
private static final String DRUID_METRIC_SEPARATOR = "\\/";
|
||||
private static final String STATSD_SEPARATOR = ":|\\|";
|
||||
private static final String BLANK = "\\s+";
|
||||
|
||||
static final StatsDEmitter of(StatsDEmitterConfig config, ObjectMapper mapper)
|
||||
{
|
||||
|
|
|
@ -29,19 +29,19 @@ public class StatsDEmitterConfig
|
|||
{
|
||||
|
||||
@JsonProperty
|
||||
final private String hostname;
|
||||
private final String hostname;
|
||||
@JsonProperty
|
||||
final private Integer port;
|
||||
private final Integer port;
|
||||
@JsonProperty
|
||||
final private String prefix;
|
||||
private final String prefix;
|
||||
@JsonProperty
|
||||
final private String separator;
|
||||
private final String separator;
|
||||
@JsonProperty
|
||||
final private Boolean includeHost;
|
||||
private final Boolean includeHost;
|
||||
@JsonProperty
|
||||
final private String dimensionMapPath;
|
||||
private final String dimensionMapPath;
|
||||
@JsonProperty
|
||||
final private String blankHolder;
|
||||
private final String blankHolder;
|
||||
|
||||
@JsonCreator
|
||||
public StatsDEmitterConfig(
|
||||
|
|
|
@ -54,7 +54,7 @@ public class ThriftInputRowParser implements InputRowParser<Object>
|
|||
private final String thriftClassName;
|
||||
|
||||
private Parser<String, Object> parser;
|
||||
volatile private Class<TBase> thriftClass = null;
|
||||
private volatile Class<TBase> thriftClass = null;
|
||||
|
||||
@JsonCreator
|
||||
public ThriftInputRowParser(
|
||||
|
|
|
@ -163,7 +163,7 @@ public class MapVirtualColumn implements VirtualColumn
|
|||
}
|
||||
}
|
||||
|
||||
private static abstract class MapVirtualColumnValueSelector<T> implements ColumnValueSelector<T>
|
||||
private abstract static class MapVirtualColumnValueSelector<T> implements ColumnValueSelector<T>
|
||||
{
|
||||
final DimensionSelector keySelector;
|
||||
final DimensionSelector valueSelector;
|
||||
|
|
|
@ -27,10 +27,10 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
|||
*/
|
||||
public class SketchEstimateWithErrorBounds
|
||||
{
|
||||
final private double estimate;
|
||||
final private double highBound;
|
||||
final private double lowBound;
|
||||
final private int numStdDev;
|
||||
private final double estimate;
|
||||
private final double highBound;
|
||||
private final double lowBound;
|
||||
private final int numStdDev;
|
||||
|
||||
@JsonCreator
|
||||
public SketchEstimateWithErrorBounds(
|
||||
|
|
|
@ -419,7 +419,7 @@ public class SketchAggregationTest
|
|||
);
|
||||
}
|
||||
|
||||
public final static String readFileFromClasspathAsString(String fileName) throws IOException
|
||||
public static final String readFileFromClasspathAsString(String fileName) throws IOException
|
||||
{
|
||||
return Files.asCharSource(
|
||||
new File(SketchAggregationTest.class.getClassLoader().getResource(fileName).getFile()),
|
||||
|
|
|
@ -277,7 +277,7 @@ public class SketchAggregationWithSimpleDataTest
|
|||
Assert.assertEquals("AgMDAAAazJMCAAAAAACAPzz9j7pWTMdROWGf15uY1nI=", result.getValue().getEvents().get(0).getEvent().get("pty_country"));
|
||||
}
|
||||
|
||||
public final static String readFileFromClasspathAsString(String fileName) throws IOException
|
||||
public static final String readFileFromClasspathAsString(String fileName) throws IOException
|
||||
{
|
||||
return Files.asCharSource(
|
||||
new File(SketchAggregationTest.class.getClassLoader().getResource(fileName).getFile()),
|
||||
|
|
|
@ -226,7 +226,7 @@ public class OldApiSketchAggregationTest
|
|||
);
|
||||
}
|
||||
|
||||
public final static String readFileFromClasspathAsString(String fileName) throws IOException
|
||||
public static final String readFileFromClasspathAsString(String fileName) throws IOException
|
||||
{
|
||||
return Files.asCharSource(
|
||||
new File(OldApiSketchAggregationTest.class.getClassLoader().getResource(fileName).getFile()),
|
||||
|
|
|
@ -47,18 +47,18 @@ public class BasicAuthUtils
|
|||
|
||||
private static final Logger log = new Logger(BasicAuthUtils.class);
|
||||
private static final SecureRandom SECURE_RANDOM = new SecureRandom();
|
||||
public final static String ADMIN_NAME = "admin";
|
||||
public final static String INTERNAL_USER_NAME = "druid_system";
|
||||
public static final String ADMIN_NAME = "admin";
|
||||
public static final String INTERNAL_USER_NAME = "druid_system";
|
||||
|
||||
// PBKDF2WithHmacSHA512 is chosen since it has built-in support in Java8.
|
||||
// Argon2 (https://github.com/p-h-c/phc-winner-argon2) is newer but the only presently
|
||||
// available Java binding is LGPLv3 licensed.
|
||||
// Key length is 512-bit to match the PBKDF2WithHmacSHA512 algorithm.
|
||||
// 256-bit salt should be more than sufficient for uniqueness, expected user count is on the order of thousands.
|
||||
public final static int SALT_LENGTH = 32;
|
||||
public final static int DEFAULT_KEY_ITERATIONS = 10000;
|
||||
public final static int KEY_LENGTH = 512;
|
||||
public final static String ALGORITHM = "PBKDF2WithHmacSHA512";
|
||||
public static final int SALT_LENGTH = 32;
|
||||
public static final int DEFAULT_KEY_ITERATIONS = 10000;
|
||||
public static final int KEY_LENGTH = 512;
|
||||
public static final String ALGORITHM = "PBKDF2WithHmacSHA512";
|
||||
|
||||
public static final TypeReference AUTHENTICATOR_USER_MAP_TYPE_REFERENCE =
|
||||
new TypeReference<Map<String, BasicAuthenticatorUser>>()
|
||||
|
|
|
@ -56,7 +56,7 @@ import java.util.Map;
|
|||
|
||||
public class CoordinatorBasicAuthenticatorMetadataStorageUpdaterTest
|
||||
{
|
||||
private final static String AUTHENTICATOR_NAME = "test";
|
||||
private static final String AUTHENTICATOR_NAME = "test";
|
||||
@Rule
|
||||
public ExpectedException expectedException = ExpectedException.none();
|
||||
|
||||
|
|
|
@ -62,8 +62,8 @@ import java.util.Set;
|
|||
|
||||
public class CoordinatorBasicAuthenticatorResourceTest
|
||||
{
|
||||
private final static String AUTHENTICATOR_NAME = "test";
|
||||
private final static String AUTHENTICATOR_NAME2 = "test2";
|
||||
private static final String AUTHENTICATOR_NAME = "test";
|
||||
private static final String AUTHENTICATOR_NAME2 = "test2";
|
||||
|
||||
@Rule
|
||||
public ExpectedException expectedException = ExpectedException.none();
|
||||
|
|
|
@ -51,9 +51,9 @@ import java.util.Map;
|
|||
|
||||
public class CoordinatorBasicAuthorizerMetadataStorageUpdaterTest
|
||||
{
|
||||
private final static String AUTHORIZER_NAME = "test";
|
||||
private static final String AUTHORIZER_NAME = "test";
|
||||
|
||||
private final static Map<String, BasicAuthorizerUser> BASE_USER_MAP = ImmutableMap.of(
|
||||
private static final Map<String, BasicAuthorizerUser> BASE_USER_MAP = ImmutableMap.of(
|
||||
BasicAuthUtils.ADMIN_NAME,
|
||||
new BasicAuthorizerUser(BasicAuthUtils.ADMIN_NAME, ImmutableSet.of(BasicAuthUtils.ADMIN_NAME)),
|
||||
BasicAuthUtils.INTERNAL_USER_NAME,
|
||||
|
@ -61,7 +61,7 @@ public class CoordinatorBasicAuthorizerMetadataStorageUpdaterTest
|
|||
BasicAuthUtils.INTERNAL_USER_NAME))
|
||||
);
|
||||
|
||||
private final static Map<String, BasicAuthorizerRole> BASE_ROLE_MAP = ImmutableMap.of(
|
||||
private static final Map<String, BasicAuthorizerRole> BASE_ROLE_MAP = ImmutableMap.of(
|
||||
BasicAuthUtils.ADMIN_NAME,
|
||||
new BasicAuthorizerRole(
|
||||
BasicAuthUtils.ADMIN_NAME,
|
||||
|
|
|
@ -59,8 +59,8 @@ import java.util.Set;
|
|||
|
||||
public class CoordinatorBasicAuthorizerResourceTest
|
||||
{
|
||||
private final static String AUTHORIZER_NAME = "test";
|
||||
private final static String AUTHORIZER_NAME2 = "test2";
|
||||
private static final String AUTHORIZER_NAME = "test";
|
||||
private static final String AUTHORIZER_NAME2 = "test2";
|
||||
|
||||
@Rule
|
||||
public ExpectedException expectedException = ExpectedException.none();
|
||||
|
|
|
@ -44,7 +44,7 @@ import java.util.Random;
|
|||
|
||||
public class TestBroker implements Closeable
|
||||
{
|
||||
private final static Random RANDOM = new Random();
|
||||
private static final Random RANDOM = new Random();
|
||||
|
||||
private final String zookeeperConnect;
|
||||
private final File directory;
|
||||
|
|
|
@ -35,7 +35,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
@JsonTypeName("loadingLookup")
|
||||
public class LoadingLookupFactory implements LookupExtractorFactory
|
||||
{
|
||||
private final static Logger LOGGER = new Logger(LoadingLookupFactory.class);
|
||||
private static final Logger LOGGER = new Logger(LoadingLookupFactory.class);
|
||||
|
||||
@JsonProperty("dataFetcher")
|
||||
private final DataFetcher<String, String> dataFetcher;
|
||||
|
|
|
@ -36,7 +36,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
|
||||
public class OnHeapLoadingCache<K, V> implements LoadingCache<K, V>
|
||||
{
|
||||
private final static Logger log = new Logger(OnHeapLoadingCache.class);
|
||||
private static final Logger log = new Logger(OnHeapLoadingCache.class);
|
||||
private static final int DEFAULT_INITIAL_CAPACITY = 16;
|
||||
//See com.google.common.cache.CacheBuilder#DEFAULT_CONCURRENCY_LEVEL
|
||||
private static final int DEFAULT_CONCURRENCY_LEVEL = 4;
|
||||
|
|
|
@ -69,7 +69,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
|||
private static final int bitsPerBucket = 4;
|
||||
private static final int range = (int) Math.pow(2, bitsPerBucket) - 1;
|
||||
|
||||
private final static double[][] minNumRegisterLookup = new double[64][256];
|
||||
private static final double[][] minNumRegisterLookup = new double[64][256];
|
||||
|
||||
static {
|
||||
for (int registerOffset = 0; registerOffset < 64; ++registerOffset) {
|
||||
|
@ -82,7 +82,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
|||
}
|
||||
|
||||
// we have to keep track of the number of zeroes in each of the two halves of the byte register (0, 1, or 2)
|
||||
private final static int[] numZeroLookup = new int[256];
|
||||
private static final int[] numZeroLookup = new int[256];
|
||||
|
||||
static {
|
||||
for (int i = 0; i < numZeroLookup.length; ++i) {
|
||||
|
|
|
@ -468,7 +468,7 @@ public class DeterminePartitionsJob implements Jobby
|
|||
}
|
||||
}
|
||||
|
||||
private static abstract class DeterminePartitionsDimSelectionBaseReducer
|
||||
private abstract static class DeterminePartitionsDimSelectionBaseReducer
|
||||
extends Reducer<BytesWritable, Text, BytesWritable, Text>
|
||||
{
|
||||
protected volatile HadoopDruidIndexerConfig config = null;
|
||||
|
|
|
@ -111,7 +111,7 @@ public abstract class HadoopDruidIndexerMapper<KEYOUT, VALUEOUT> extends Mapper<
|
|||
}
|
||||
}
|
||||
|
||||
abstract protected void innerMap(InputRow inputRow, Context context, boolean reportParseExceptions)
|
||||
protected abstract void innerMap(InputRow inputRow, Context context, boolean reportParseExceptions)
|
||||
throws IOException, InterruptedException;
|
||||
|
||||
}
|
||||
|
|
|
@ -80,12 +80,12 @@ import java.util.Map;
|
|||
@RunWith(Parameterized.class)
|
||||
public class IndexGeneratorJobTest
|
||||
{
|
||||
final private static AggregatorFactory[] aggs1 = {
|
||||
private static final AggregatorFactory[] aggs1 = {
|
||||
new LongSumAggregatorFactory("visited_num", "visited_num"),
|
||||
new HyperUniquesAggregatorFactory("unique_hosts", "host")
|
||||
};
|
||||
|
||||
final private static AggregatorFactory[] aggs2 = {
|
||||
private static final AggregatorFactory[] aggs2 = {
|
||||
new CountAggregatorFactory("count")
|
||||
};
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ public abstract class HadoopTask extends AbstractTask
|
|||
private static final Logger log = new Logger(HadoopTask.class);
|
||||
private static final ExtensionsConfig extensionsConfig;
|
||||
|
||||
final static Injector injector = GuiceInjectors.makeStartupInjector();
|
||||
static final Injector injector = GuiceInjectors.makeStartupInjector();
|
||||
|
||||
static {
|
||||
extensionsConfig = injector.getInstance(ExtensionsConfig.class);
|
||||
|
|
|
@ -87,7 +87,7 @@ public class RealtimeIndexTask extends AbstractTask
|
|||
public static final String CTX_KEY_LOOKUP_TIER = "lookupTier";
|
||||
|
||||
private static final EmittingLogger log = new EmittingLogger(RealtimeIndexTask.class);
|
||||
private final static Random random = new Random();
|
||||
private static final Random random = new Random();
|
||||
|
||||
private static String makeTaskId(FireDepartment fireDepartment)
|
||||
{
|
||||
|
|
|
@ -863,8 +863,8 @@ public class TaskLockbox
|
|||
|
||||
static class TaskLockPosse
|
||||
{
|
||||
final private TaskLock taskLock;
|
||||
final private Set<String> taskIds;
|
||||
private final TaskLock taskLock;
|
||||
private final Set<String> taskIds;
|
||||
|
||||
TaskLockPosse(TaskLock taskLock)
|
||||
{
|
||||
|
|
|
@ -174,8 +174,8 @@ public class IngestSegmentFirehoseFactoryTest
|
|||
|
||||
final IndexerSQLMetadataStorageCoordinator mdc = new IndexerSQLMetadataStorageCoordinator(null, null, null)
|
||||
{
|
||||
final private Set<DataSegment> published = Sets.newHashSet();
|
||||
final private Set<DataSegment> nuked = Sets.newHashSet();
|
||||
private final Set<DataSegment> published = Sets.newHashSet();
|
||||
private final Set<DataSegment> nuked = Sets.newHashSet();
|
||||
|
||||
@Override
|
||||
public List<DataSegment> getUsedSegmentsForInterval(String dataSource, Interval interval) throws IOException
|
||||
|
|
|
@ -68,8 +68,8 @@ public class PendingTaskBasedProvisioningStrategyTest
|
|||
private PendingTaskBasedWorkerProvisioningStrategy strategy;
|
||||
private AtomicReference<WorkerBehaviorConfig> workerConfig;
|
||||
private ScheduledExecutorService executorService = Execs.scheduledSingleThreaded("test service");
|
||||
private final static String MIN_VERSION = "2014-01-00T00:01:00Z";
|
||||
private final static String INVALID_VERSION = "0";
|
||||
private static final String MIN_VERSION = "2014-01-00T00:01:00Z";
|
||||
private static final String INVALID_VERSION = "0";
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception
|
||||
|
|
|
@ -36,9 +36,9 @@ import java.util.Set;
|
|||
|
||||
public class TestIndexerMetadataStorageCoordinator implements IndexerMetadataStorageCoordinator
|
||||
{
|
||||
final private Set<DataSegment> published = Sets.newConcurrentHashSet();
|
||||
final private Set<DataSegment> nuked = Sets.newConcurrentHashSet();
|
||||
final private List<DataSegment> unusedSegments;
|
||||
private final Set<DataSegment> published = Sets.newConcurrentHashSet();
|
||||
private final Set<DataSegment> nuked = Sets.newConcurrentHashSet();
|
||||
private final List<DataSegment> unusedSegments;
|
||||
|
||||
public TestIndexerMetadataStorageCoordinator()
|
||||
{
|
||||
|
|
|
@ -32,7 +32,7 @@ import java.util.Map;
|
|||
|
||||
public class ConfigFileConfigProvider implements IntegrationTestingConfigProvider
|
||||
{
|
||||
private final static Logger LOG = new Logger(ConfigFileConfigProvider.class);
|
||||
private static final Logger LOG = new Logger(ConfigFileConfigProvider.class);
|
||||
private String routerUrl;
|
||||
private String brokerUrl;
|
||||
private String historicalUrl;
|
||||
|
|
|
@ -49,7 +49,7 @@ import java.util.concurrent.Callable;
|
|||
|
||||
public class OverlordResourceTestClient
|
||||
{
|
||||
private final static Logger LOG = new Logger(OverlordResourceTestClient.class);
|
||||
private static final Logger LOG = new Logger(OverlordResourceTestClient.class);
|
||||
private final ObjectMapper jsonMapper;
|
||||
private final HttpClient httpClient;
|
||||
private final String indexer;
|
||||
|
|
|
@ -28,19 +28,19 @@ import java.util.List;
|
|||
*/
|
||||
public class DruidMetrics
|
||||
{
|
||||
public final static String DATASOURCE = "dataSource";
|
||||
public final static String TYPE = "type";
|
||||
public final static String INTERVAL = "interval";
|
||||
public final static String ID = "id";
|
||||
public final static String TASK_ID = "taskId";
|
||||
public final static String STATUS = "status";
|
||||
public static final String DATASOURCE = "dataSource";
|
||||
public static final String TYPE = "type";
|
||||
public static final String INTERVAL = "interval";
|
||||
public static final String ID = "id";
|
||||
public static final String TASK_ID = "taskId";
|
||||
public static final String STATUS = "status";
|
||||
|
||||
// task metrics
|
||||
public final static String TASK_TYPE = "taskType";
|
||||
public final static String TASK_STATUS = "taskStatus";
|
||||
public static final String TASK_TYPE = "taskType";
|
||||
public static final String TASK_STATUS = "taskStatus";
|
||||
|
||||
public final static String SERVER = "server";
|
||||
public final static String TIER = "tier";
|
||||
public static final String SERVER = "server";
|
||||
public static final String TIER = "tier";
|
||||
|
||||
public static int findNumComplexAggs(List<AggregatorFactory> aggs)
|
||||
{
|
||||
|
|
|
@ -27,11 +27,11 @@ import java.util.Arrays;
|
|||
|
||||
public class HistogramVisual
|
||||
{
|
||||
@JsonProperty final public double[] breaks;
|
||||
@JsonProperty public final double[] breaks;
|
||||
@JsonProperty
|
||||
final public double[] counts;
|
||||
public final double[] counts;
|
||||
// an array of the quantiles including the min. and max.
|
||||
@JsonProperty final public double[] quantiles;
|
||||
@JsonProperty public final double[] quantiles;
|
||||
|
||||
@JsonCreator
|
||||
public HistogramVisual(
|
||||
|
|
|
@ -38,7 +38,7 @@ public class AndDimFilter implements DimFilter
|
|||
{
|
||||
private static final Joiner AND_JOINER = Joiner.on(" && ");
|
||||
|
||||
final private List<DimFilter> fields;
|
||||
private final List<DimFilter> fields;
|
||||
|
||||
@JsonCreator
|
||||
public AndDimFilter(
|
||||
|
|
|
@ -34,7 +34,7 @@ import java.util.List;
|
|||
public class NotDimFilter implements DimFilter
|
||||
{
|
||||
|
||||
final private DimFilter field;
|
||||
private final DimFilter field;
|
||||
|
||||
@JsonCreator
|
||||
public NotDimFilter(
|
||||
|
|
|
@ -39,7 +39,7 @@ public class OrDimFilter implements DimFilter
|
|||
{
|
||||
private static final Joiner OR_JOINER = Joiner.on(" || ");
|
||||
|
||||
final private List<DimFilter> fields;
|
||||
private final List<DimFilter> fields;
|
||||
|
||||
@JsonCreator
|
||||
public OrDimFilter(
|
||||
|
|
|
@ -80,9 +80,9 @@ import java.util.stream.Collectors;
|
|||
*/
|
||||
public class GroupByQuery extends BaseQuery<Row>
|
||||
{
|
||||
public final static String CTX_KEY_SORT_BY_DIMS_FIRST = "sortByDimsFirst";
|
||||
public static final String CTX_KEY_SORT_BY_DIMS_FIRST = "sortByDimsFirst";
|
||||
|
||||
private final static Comparator<Row> NON_GRANULAR_TIME_COMP = (Row lhs, Row rhs) -> Longs.compare(
|
||||
private static final Comparator<Row> NON_GRANULAR_TIME_COMP = (Row lhs, Row rhs) -> Longs.compare(
|
||||
lhs.getTimestampFromEpoch(),
|
||||
rhs.getTimestampFromEpoch()
|
||||
);
|
||||
|
|
|
@ -55,7 +55,7 @@ import java.util.concurrent.ConcurrentLinkedQueue;
|
|||
|
||||
public class GroupByQueryHelper
|
||||
{
|
||||
public final static String CTX_KEY_SORT_RESULTS = "sortResults";
|
||||
public static final String CTX_KEY_SORT_RESULTS = "sortResults";
|
||||
|
||||
public static <T> Pair<IncrementalIndex, Accumulator<IncrementalIndex, T>> createIndexAccumulatorPair(
|
||||
final GroupByQuery query,
|
||||
|
|
|
@ -227,7 +227,7 @@ public abstract class BaseTopNAlgorithm<DimValSelector, DimValAggregateStore, Pa
|
|||
}
|
||||
}
|
||||
|
||||
protected static abstract class BaseArrayProvider<T> implements TopNMetricSpecBuilder<T>
|
||||
protected abstract static class BaseArrayProvider<T> implements TopNMetricSpecBuilder<T>
|
||||
{
|
||||
private volatile String previousStop;
|
||||
private volatile boolean ignoreAfterThreshold;
|
||||
|
|
|
@ -50,7 +50,7 @@ public final class DimensionHandlerUtils
|
|||
|
||||
private DimensionHandlerUtils() {}
|
||||
|
||||
public final static ColumnCapabilities DEFAULT_STRING_CAPABILITIES =
|
||||
public static final ColumnCapabilities DEFAULT_STRING_CAPABILITIES =
|
||||
new ColumnCapabilitiesImpl().setType(ValueType.STRING)
|
||||
.setDictionaryEncoded(true)
|
||||
.setHasBitmapIndexes(true);
|
||||
|
|
|
@ -163,7 +163,7 @@ public enum CompressionStrategy
|
|||
void decompress(ByteBuffer in, int numBytes, ByteBuffer out);
|
||||
}
|
||||
|
||||
public static abstract class Compressor
|
||||
public abstract static class Compressor
|
||||
{
|
||||
/**
|
||||
* Allocates a buffer that should be passed to {@link #compress} method as input buffer. Different Compressors
|
||||
|
|
|
@ -26,7 +26,7 @@ import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
|
|||
*/
|
||||
public class ZeroIndexedInts implements IndexedInts
|
||||
{
|
||||
private final static ZeroIndexedInts INSTANCE = new ZeroIndexedInts();
|
||||
private static final ZeroIndexedInts INSTANCE = new ZeroIndexedInts();
|
||||
|
||||
private ZeroIndexedInts()
|
||||
{
|
||||
|
|
|
@ -40,7 +40,7 @@ import java.util.concurrent.TimeoutException;
|
|||
public class AsyncQueryRunnerTest
|
||||
{
|
||||
|
||||
private final static long TEST_TIMEOUT = 60000;
|
||||
private static final long TEST_TIMEOUT = 60000;
|
||||
|
||||
private final ExecutorService executor;
|
||||
private final Query query;
|
||||
|
|
|
@ -201,7 +201,7 @@ public class QueryRunnerTestHelper
|
|||
qualityUniques
|
||||
);
|
||||
|
||||
public final static List<AggregatorFactory> commonFloatAggregators = Arrays.asList(
|
||||
public static final List<AggregatorFactory> commonFloatAggregators = Arrays.asList(
|
||||
new FloatSumAggregatorFactory("index", "indexFloat"),
|
||||
new CountAggregatorFactory("rows"),
|
||||
new HyperUniquesAggregatorFactory(
|
||||
|
|
|
@ -43,7 +43,7 @@ import java.util.List;
|
|||
*/
|
||||
public class CardinalityAggregatorBenchmark extends SimpleBenchmark
|
||||
{
|
||||
private final static int MAX = 5_000_000;
|
||||
private static final int MAX = 5_000_000;
|
||||
|
||||
CardinalityBufferAggregator agg;
|
||||
List<DimensionSelector> selectorList;
|
||||
|
|
|
@ -35,11 +35,11 @@ import java.util.Random;
|
|||
|
||||
public class HyperUniquesAggregatorFactoryTest
|
||||
{
|
||||
final static HyperUniquesAggregatorFactory aggregatorFactory = new HyperUniquesAggregatorFactory(
|
||||
static final HyperUniquesAggregatorFactory aggregatorFactory = new HyperUniquesAggregatorFactory(
|
||||
"hyperUnique",
|
||||
"uniques"
|
||||
);
|
||||
final static String V0_BASE64 = "AAYbEyQwFyQVASMCVFEQQgEQIxIhM4ISAQMhUkICEDFDIBMhMgFQFAFAMjAAEhEREyVAEiUBAhIjISATMCECMiERIRIiVRFRAyIAEgFCQSMEJAITATAAEAMQgCEBEjQiAyUTAyEQASJyAGURAAISAwISATETQhAREBYDIVIlFTASAzJgERIgRCcmUyAwNAMyEJMjIhQXQhEWECABQDETATEREjIRAgEyIiMxMBQiAkBBMDYAMEQQACMzMhIkMTQSkYIRABIBADMBAhIEISAENkEBQDAxETMAIEEwEzQiQSEVQSFBBAQDICIiAVIAMTAQIQYBIRABADMDEzEAQSMkEiAYFBAQI0AmECEyQSARRTIVMhEkMiKAMCUBxUghAkIBI3EmMAQiACEAJDJCAAADOzESEDBCRjMgEUQQETQwEWIhA6MlAiAAZDI1AgEIIDUyFDIHMQEEAwIRBRABBStCZCQhAgJSMQIiQEEURTBmM1MxACIAETGhMgQnBRICNiIREyIUNAEAAkABAwQSEBJBIhIhIRERAiIRACUhEUAVMkQGEVMjECYjACBwEQQSIRIgAAEyExQUFSEAIBJCIDIDYTAgMiNBIUADUiETADMoFEADETMCIwUEQkIAESMSIzIABDERIXEhIiACQgUSEgJiQCAUARIRAREDQiEUAkQgAgQiIEAzIxRCARIgBAAVAzMAECEwE0Qh8gAAASEhEiAiMhUxcRImIVABATYyUBAwIoE1QhRDIiYBIBEBEiQSQyERAAADMAARAEACFYUwQSQBIRIgURITARFSEzEHEBACOTMREBIAMjIgEhU0cxEQIRIhIi1wEgMRUBEgMQIRAnAVASURMHQBAiEyBSAAEBQTAWQ5EQA0IUMSISAUEiASIjIhMhMFJBBSEjEAECEwACASEQFBAjARITEQIgYTEKEAeAAiMkEyARowARFBAicRISIBIxAQAgEBARMCIRQgMSIVIAkjMxIAIEMyADASMgFRIjEyKjEjBBIEQCUAARYBEQMxMCIBACNCACRCMlEzUUAAUDM1MhAjEgAxAAISAVFQECAhQAMBMhEzEgASNxAhFRIxECMRJBQAERAToBgQMhJSRQFAEhAwMiIhMQAwAgQiBQJiIGMQQhEiQxR1MiAjIAIEEiAkARECEzQlMjECIRATBgIhEBQAIQAEATEjBCMwAgMBMhAhIyFBIxQAARI1AAEABCIDFBIRUzMBIgAgEiARQCASMQQDQCFBAQAUJwMUElAyIAIRBSIRITICEAIxMAEUBEYTcBMBEEIxMREwIRIDAGIAEgYxBAEANCAhBAI2UhIiIgIRABIEVRAwNEIQERQgEFMhFCQSIAEhQDMTEQMiAjJyEQ==";
|
||||
static final String V0_BASE64 = "AAYbEyQwFyQVASMCVFEQQgEQIxIhM4ISAQMhUkICEDFDIBMhMgFQFAFAMjAAEhEREyVAEiUBAhIjISATMCECMiERIRIiVRFRAyIAEgFCQSMEJAITATAAEAMQgCEBEjQiAyUTAyEQASJyAGURAAISAwISATETQhAREBYDIVIlFTASAzJgERIgRCcmUyAwNAMyEJMjIhQXQhEWECABQDETATEREjIRAgEyIiMxMBQiAkBBMDYAMEQQACMzMhIkMTQSkYIRABIBADMBAhIEISAENkEBQDAxETMAIEEwEzQiQSEVQSFBBAQDICIiAVIAMTAQIQYBIRABADMDEzEAQSMkEiAYFBAQI0AmECEyQSARRTIVMhEkMiKAMCUBxUghAkIBI3EmMAQiACEAJDJCAAADOzESEDBCRjMgEUQQETQwEWIhA6MlAiAAZDI1AgEIIDUyFDIHMQEEAwIRBRABBStCZCQhAgJSMQIiQEEURTBmM1MxACIAETGhMgQnBRICNiIREyIUNAEAAkABAwQSEBJBIhIhIRERAiIRACUhEUAVMkQGEVMjECYjACBwEQQSIRIgAAEyExQUFSEAIBJCIDIDYTAgMiNBIUADUiETADMoFEADETMCIwUEQkIAESMSIzIABDERIXEhIiACQgUSEgJiQCAUARIRAREDQiEUAkQgAgQiIEAzIxRCARIgBAAVAzMAECEwE0Qh8gAAASEhEiAiMhUxcRImIVABATYyUBAwIoE1QhRDIiYBIBEBEiQSQyERAAADMAARAEACFYUwQSQBIRIgURITARFSEzEHEBACOTMREBIAMjIgEhU0cxEQIRIhIi1wEgMRUBEgMQIRAnAVASURMHQBAiEyBSAAEBQTAWQ5EQA0IUMSISAUEiASIjIhMhMFJBBSEjEAECEwACASEQFBAjARITEQIgYTEKEAeAAiMkEyARowARFBAicRISIBIxAQAgEBARMCIRQgMSIVIAkjMxIAIEMyADASMgFRIjEyKjEjBBIEQCUAARYBEQMxMCIBACNCACRCMlEzUUAAUDM1MhAjEgAxAAISAVFQECAhQAMBMhEzEgASNxAhFRIxECMRJBQAERAToBgQMhJSRQFAEhAwMiIhMQAwAgQiBQJiIGMQQhEiQxR1MiAjIAIEEiAkARECEzQlMjECIRATBgIhEBQAIQAEATEjBCMwAgMBMhAhIyFBIxQAARI1AAEABCIDFBIRUzMBIgAgEiARQCASMQQDQCFBAQAUJwMUElAyIAIRBSIRITICEAIxMAEUBEYTcBMBEEIxMREwIRIDAGIAEgYxBAEANCAhBAI2UhIiIgIRABIEVRAwNEIQERQgEFMhFCQSIAEhQDMTEQMiAjJyEQ==";
|
||||
|
||||
private final HashFunction fn = Hashing.murmur3_128();
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ import javax.annotation.Nullable;
|
|||
*/
|
||||
class TestDimensionSelector implements DimensionSelector
|
||||
{
|
||||
public final static TestDimensionSelector instance = new TestDimensionSelector();
|
||||
public static final TestDimensionSelector instance = new TestDimensionSelector();
|
||||
|
||||
private TestDimensionSelector()
|
||||
{
|
||||
|
|
|
@ -176,7 +176,7 @@ public class GroupByQueryMergeBufferTest
|
|||
);
|
||||
}
|
||||
|
||||
private final static TestBlockingPool mergeBufferPool = new TestBlockingPool(
|
||||
private static final TestBlockingPool mergeBufferPool = new TestBlockingPool(
|
||||
new Supplier<ByteBuffer>()
|
||||
{
|
||||
@Override
|
||||
|
|
|
@ -140,7 +140,7 @@ public class GroupByQueryRunnerFailureTest
|
|||
);
|
||||
}
|
||||
|
||||
private final static BlockingPool<ByteBuffer> mergeBufferPool = new DefaultBlockingPool<>(
|
||||
private static final BlockingPool<ByteBuffer> mergeBufferPool = new DefaultBlockingPool<>(
|
||||
new Supplier<ByteBuffer>()
|
||||
{
|
||||
@Override
|
||||
|
|
|
@ -52,10 +52,10 @@ public class BatchServerInventoryView extends AbstractCuratorServerInventoryView
|
|||
{
|
||||
private static final EmittingLogger log = new EmittingLogger(BatchServerInventoryView.class);
|
||||
|
||||
final private ConcurrentMap<String, Set<DataSegment>> zNodes = new ConcurrentHashMap<>();
|
||||
final private ConcurrentMap<SegmentCallback, Predicate<Pair<DruidServerMetadata, DataSegment>>> segmentPredicates =
|
||||
private final ConcurrentMap<String, Set<DataSegment>> zNodes = new ConcurrentHashMap<>();
|
||||
private final ConcurrentMap<SegmentCallback, Predicate<Pair<DruidServerMetadata, DataSegment>>> segmentPredicates =
|
||||
new ConcurrentHashMap<>();
|
||||
final private Predicate<Pair<DruidServerMetadata, DataSegment>> defaultFilter;
|
||||
private final Predicate<Pair<DruidServerMetadata, DataSegment>> defaultFilter;
|
||||
|
||||
@Inject
|
||||
public BatchServerInventoryView(
|
||||
|
|
|
@ -46,7 +46,7 @@ public class SingleServerInventoryView extends AbstractCuratorServerInventoryVie
|
|||
{
|
||||
private static final EmittingLogger log = new EmittingLogger(SingleServerInventoryView.class);
|
||||
|
||||
final private ConcurrentMap<SegmentCallback, Predicate<Pair<DruidServerMetadata, DataSegment>>> segmentPredicates =
|
||||
private final ConcurrentMap<SegmentCallback, Predicate<Pair<DruidServerMetadata, DataSegment>>> segmentPredicates =
|
||||
new ConcurrentHashMap<>();
|
||||
private final Predicate<Pair<DruidServerMetadata, DataSegment>> defaultFilter;
|
||||
|
||||
|
|
|
@ -59,8 +59,8 @@ public interface Cache
|
|||
|
||||
class NamedKey
|
||||
{
|
||||
final public String namespace;
|
||||
final public byte[] key;
|
||||
public final String namespace;
|
||||
public final byte[] key;
|
||||
|
||||
public NamedKey(String namespace, byte[] key)
|
||||
{
|
||||
|
|
|
@ -80,7 +80,7 @@ public class MemcachedCache implements Cache
|
|||
* If some other algorithms are considered as the default algorithm instead of this one, the cache distribution for
|
||||
* those hash algorithms should be checked and compared using {@code CacheDistributionTest}.
|
||||
*/
|
||||
final static HashAlgorithm MURMUR3_128 = new HashAlgorithm()
|
||||
static final HashAlgorithm MURMUR3_128 = new HashAlgorithm()
|
||||
{
|
||||
final HashFunction fn = Hashing.murmur3_128();
|
||||
|
||||
|
|
|
@ -98,7 +98,7 @@ public class Initialization
|
|||
private static final Logger log = new Logger(Initialization.class);
|
||||
private static final ConcurrentMap<File, URLClassLoader> loadersMap = new ConcurrentHashMap<>();
|
||||
|
||||
private final static Map<Class, Collection> extensionsMap = Maps.newHashMap();
|
||||
private static final Map<Class, Collection> extensionsMap = Maps.newHashMap();
|
||||
|
||||
/**
|
||||
* @param clazz service class
|
||||
|
@ -140,7 +140,7 @@ public class Initialization
|
|||
* elements in the returned collection is not specified and not guaranteed to be the same for different calls to
|
||||
* getFromExtensions().
|
||||
*/
|
||||
public synchronized static <T> Collection<T> getFromExtensions(ExtensionsConfig config, Class<T> serviceClass)
|
||||
public static synchronized <T> Collection<T> getFromExtensions(ExtensionsConfig config, Class<T> serviceClass)
|
||||
{
|
||||
Collection<T> modulesToLoad = new ServiceLoadingFromExtensions<>(config, serviceClass).implsToLoad;
|
||||
extensionsMap.put(serviceClass, modulesToLoad);
|
||||
|
|
|
@ -122,7 +122,7 @@ public class MapLookupExtractorFactory implements LookupExtractorFactory
|
|||
|
||||
public static class MapLookupIntrospectionHandler implements LookupIntrospectHandler
|
||||
{
|
||||
final private Map<String, String> map;
|
||||
private final Map<String, String> map;
|
||||
public MapLookupIntrospectionHandler(Map<String, String> map)
|
||||
{
|
||||
this.map = map;
|
||||
|
|
|
@ -753,8 +753,8 @@ public class SegmentLoadDropHandler implements DataSegmentChangeHandler
|
|||
private final STATE state;
|
||||
private final String failureCause;
|
||||
|
||||
public final static Status SUCCESS = new Status(STATE.SUCCESS, null);
|
||||
public final static Status PENDING = new Status(STATE.PENDING, null);
|
||||
public static final Status SUCCESS = new Status(STATE.SUCCESS, null);
|
||||
public static final Status PENDING = new Status(STATE.PENDING, null);
|
||||
|
||||
@JsonCreator
|
||||
Status(
|
||||
|
|
|
@ -277,16 +277,16 @@ public class CoordinatorDynamicConfig
|
|||
|
||||
public static class Builder
|
||||
{
|
||||
private final static long DEFAULT_MILLIS_TO_WAIT_BEFORE_DELETING = TimeUnit.MINUTES.toMillis(15);
|
||||
private final static long DEFAULT_MERGE_BYTES_LIMIT = 524288000L;
|
||||
private final static int DEFAULT_MERGE_SEGMENTS_LIMIT = 100;
|
||||
private final static int DEFAULT_MAX_SEGMENTS_TO_MOVE = 5;
|
||||
private final static int DEFAULT_REPLICANT_LIFETIME = 15;
|
||||
private final static int DEFAULT_REPLICATION_THROTTLE_LIMIT = 10;
|
||||
private final static int DEFAULT_BALANCER_COMPUTE_THREADS = 1;
|
||||
private final static boolean DEFAULT_EMIT_BALANCING_STATS = false;
|
||||
private final static boolean DEFAULT_KILL_ALL_DATA_SOURCES = false;
|
||||
private final static int DEFAULT_MAX_SEGMENTS_IN_NODE_LOADING_QUEUE = 0;
|
||||
private static final long DEFAULT_MILLIS_TO_WAIT_BEFORE_DELETING = TimeUnit.MINUTES.toMillis(15);
|
||||
private static final long DEFAULT_MERGE_BYTES_LIMIT = 524288000L;
|
||||
private static final int DEFAULT_MERGE_SEGMENTS_LIMIT = 100;
|
||||
private static final int DEFAULT_MAX_SEGMENTS_TO_MOVE = 5;
|
||||
private static final int DEFAULT_REPLICANT_LIFETIME = 15;
|
||||
private static final int DEFAULT_REPLICATION_THROTTLE_LIMIT = 10;
|
||||
private static final int DEFAULT_BALANCER_COMPUTE_THREADS = 1;
|
||||
private static final boolean DEFAULT_EMIT_BALANCING_STATS = false;
|
||||
private static final boolean DEFAULT_KILL_ALL_DATA_SOURCES = false;
|
||||
private static final int DEFAULT_MAX_SEGMENTS_IN_NODE_LOADING_QUEUE = 0;
|
||||
|
||||
private Long millisToWaitBeforeDeleting;
|
||||
private Long mergeBytesLimit;
|
||||
|
|
|
@ -39,7 +39,7 @@ import java.util.List;
|
|||
*/
|
||||
public class DruidCoordinatorSegmentKiller implements DruidCoordinatorHelper
|
||||
{
|
||||
private final static Logger log = new Logger(DruidCoordinatorSegmentKiller.class);
|
||||
private static final Logger log = new Logger(DruidCoordinatorSegmentKiller.class);
|
||||
|
||||
private final long period;
|
||||
private final long retainDuration;
|
||||
|
|
|
@ -29,7 +29,7 @@ import io.druid.java.util.common.logger.Logger;
|
|||
|
||||
public class JettyRequestLog extends AbstractLifeCycle implements RequestLog
|
||||
{
|
||||
private final static Logger logger = new Logger("io.druid.jetty.RequestLog");
|
||||
private static final Logger logger = new Logger("io.druid.jetty.RequestLog");
|
||||
|
||||
@Override
|
||||
public void log(Request request, Response response)
|
||||
|
|
|
@ -35,7 +35,7 @@ import java.util.Properties;
|
|||
*/
|
||||
public class MonitorsConfig
|
||||
{
|
||||
public final static String METRIC_DIMENSION_PREFIX = "druid.metrics.emitter.dimension.";
|
||||
public static final String METRIC_DIMENSION_PREFIX = "druid.metrics.emitter.dimension.";
|
||||
|
||||
@JsonProperty("monitors")
|
||||
@NotNull
|
||||
|
|
|
@ -23,7 +23,7 @@ import io.druid.java.util.common.StringUtils;
|
|||
|
||||
public class Access
|
||||
{
|
||||
public final static Access OK = new Access(true);
|
||||
public static final Access OK = new Access(true);
|
||||
|
||||
private final boolean allowed;
|
||||
private final String message;
|
||||
|
|
|
@ -40,10 +40,10 @@ import java.util.Map;
|
|||
public class NumberedShardSpec implements ShardSpec
|
||||
{
|
||||
@JsonIgnore
|
||||
final private int partitionNum;
|
||||
private final int partitionNum;
|
||||
|
||||
@JsonIgnore
|
||||
final private int partitions;
|
||||
private final int partitions;
|
||||
|
||||
@JsonCreator
|
||||
public NumberedShardSpec(
|
||||
|
|
|
@ -32,7 +32,7 @@ import org.junit.Test;
|
|||
*/
|
||||
public class PeriodDropRuleTest
|
||||
{
|
||||
private final static DataSegment.Builder builder = DataSegment.builder()
|
||||
private static final DataSegment.Builder builder = DataSegment.builder()
|
||||
.dataSource("test")
|
||||
.version(DateTimes.of("2012-12-31T01:00:00").toString())
|
||||
.shardSpec(NoneShardSpec.instance());
|
||||
|
|
|
@ -37,7 +37,7 @@ import org.junit.Test;
|
|||
*/
|
||||
public class PeriodLoadRuleTest
|
||||
{
|
||||
private final static DataSegment.Builder builder = DataSegment.builder()
|
||||
private static final DataSegment.Builder builder = DataSegment.builder()
|
||||
.dataSource("test")
|
||||
.version(DateTimes.nowUtc().toString())
|
||||
.shardSpec(NoneShardSpec.instance());
|
||||
|
|
|
@ -129,7 +129,7 @@ public class DruidRules
|
|||
}
|
||||
}
|
||||
|
||||
public static abstract class DruidOuterQueryRule extends RelOptRule
|
||||
public abstract static class DruidOuterQueryRule extends RelOptRule
|
||||
{
|
||||
public static RelOptRule AGGREGATE = new DruidOuterQueryRule(
|
||||
operand(Aggregate.class, operand(DruidRel.class, any())),
|
||||
|
|
Loading…
Reference in New Issue