Enforce modifier order with Checkstyle (#5246)

This commit is contained in:
Roman Leventov 2018-01-11 09:50:42 +01:00 committed by GitHub
parent 3cc4a0ab19
commit 8877ce38d6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
82 changed files with 179 additions and 177 deletions

View File

@ -32,7 +32,7 @@ import java.util.Map;
*/
public class NoneShardSpec implements ShardSpec
{
private final static NoneShardSpec INSTANCE = new NoneShardSpec();
private static final NoneShardSpec INSTANCE = new NoneShardSpec();
@JsonCreator
public static NoneShardSpec instance()

View File

@ -312,7 +312,7 @@ public class ConditionalMultibindTest
Assert.assertEquals(ImmutableSet.of(zoo1, zoo2), actualZooSet);
}
static abstract class Animal
abstract static class Animal
{
private final String type;

View File

@ -50,8 +50,8 @@ import java.util.Set;
*/
public class DataSegmentTest
{
private final static ObjectMapper mapper = new TestObjectMapper();
private final static int TEST_VERSION = 0x7;
private static final ObjectMapper mapper = new TestObjectMapper();
private static final int TEST_VERSION = 0x7;
private static ShardSpec getShardSpec(final int partitionNum)
{

View File

@ -40,7 +40,7 @@ import java.util.concurrent.TimeUnit;
@State(Scope.Benchmark)
public class CostBalancerStrategyBenchmark
{
private final static DateTime t0 = DateTimes.of("2016-01-01T01:00:00Z");
private static final DateTime t0 = DateTimes.of("2016-01-01T01:00:00Z");
private List<DataSegment> segments;
private DataSegment segment;

View File

@ -45,15 +45,15 @@ public class BitmapBenchmark
{
public static final int LENGTH = 500_000;
public static final int SIZE = 10_000;
final static ImmutableConciseSet concise[] = new ImmutableConciseSet[SIZE];
final static ImmutableConciseSet offheapConcise[] = new ImmutableConciseSet[SIZE];
final static ImmutableRoaringBitmap roaring[] = new ImmutableRoaringBitmap[SIZE];
final static ImmutableRoaringBitmap immutableRoaring[] = new ImmutableRoaringBitmap[SIZE];
final static ImmutableRoaringBitmap offheapRoaring[] = new ImmutableRoaringBitmap[SIZE];
final static ImmutableBitmap genericConcise[] = new ImmutableBitmap[SIZE];
final static ImmutableBitmap genericRoaring[] = new ImmutableBitmap[SIZE];
final static ConciseBitmapFactory conciseFactory = new ConciseBitmapFactory();
final static RoaringBitmapFactory roaringFactory = new RoaringBitmapFactory();
static final ImmutableConciseSet concise[] = new ImmutableConciseSet[SIZE];
static final ImmutableConciseSet offheapConcise[] = new ImmutableConciseSet[SIZE];
static final ImmutableRoaringBitmap roaring[] = new ImmutableRoaringBitmap[SIZE];
static final ImmutableRoaringBitmap immutableRoaring[] = new ImmutableRoaringBitmap[SIZE];
static final ImmutableRoaringBitmap offheapRoaring[] = new ImmutableRoaringBitmap[SIZE];
static final ImmutableBitmap genericConcise[] = new ImmutableBitmap[SIZE];
static final ImmutableBitmap genericRoaring[] = new ImmutableBitmap[SIZE];
static final ConciseBitmapFactory conciseFactory = new ConciseBitmapFactory();
static final RoaringBitmapFactory roaringFactory = new RoaringBitmapFactory();
static Random rand = new Random(0);
static long totalConciseBytes = 0;
static long totalRoaringBytes = 0;

View File

@ -99,6 +99,8 @@
<property name="caseIndent" value="2"/>
</module>
<module name="ModifierOrder" />
<module name="Regexp">
<property name="format" value="com\.google\.common\.io\.Closer"/>
<property name="illegalPattern" value="true"/>

View File

@ -119,7 +119,7 @@ public abstract class ExprEval<T>
public abstract Expr toExpr();
private static abstract class NumericExprEval extends ExprEval<Number>
private abstract static class NumericExprEval extends ExprEval<Number>
{
private NumericExprEval(Number value)

View File

@ -30,7 +30,7 @@ import java.io.UnsupportedEncodingException;
public class StringUtilsTest
{
// copied from https://github.com/druid-io/druid/pull/2612
public final static String[] TEST_STRINGS = new String[]{
public static final String[] TEST_STRINGS = new String[]{
"peach", "péché", "pêche", "sin", "",
"", "C", "c", "Ç", "ç", "G", "g", "Ğ", "ğ", "I", "ı", "İ", "i",
"O", "o", "Ö", "ö", "S", "s", "Ş", "ş", "U", "u", "Ü", "ü", "ä",

View File

@ -22,37 +22,37 @@ public class ConciseSetUtils
* <tt>31 * (1 << 25)</tt>, followed by a literal with 30 0's and the
* MSB (31<sup>st</sup> bit) equal to 1
*/
public final static int MAX_ALLOWED_INTEGER = 31 * (1 << 25) + 30; // 1040187422
public static final int MAX_ALLOWED_INTEGER = 31 * (1 << 25) + 30; // 1040187422
/**
* The lowest representable integer.
*/
public final static int MIN_ALLOWED_SET_BIT = 0;
public static final int MIN_ALLOWED_SET_BIT = 0;
/**
* Maximum number of representable bits within a literal
*/
public final static int MAX_LITERAL_LENGTH = 31;
public static final int MAX_LITERAL_LENGTH = 31;
/**
* Literal that represents all bits set to 1 (and MSB = 1)
*/
public final static int ALL_ONES_LITERAL = 0xFFFFFFFF;
public static final int ALL_ONES_LITERAL = 0xFFFFFFFF;
/**
* Literal that represents all bits set to 0 (and MSB = 1)
*/
public final static int ALL_ZEROS_LITERAL = 0x80000000;
public static final int ALL_ZEROS_LITERAL = 0x80000000;
/**
* All bits set to 1 and MSB = 0
*/
public final static int ALL_ONES_WITHOUT_MSB = 0x7FFFFFFF;
public static final int ALL_ONES_WITHOUT_MSB = 0x7FFFFFFF;
/**
* Sequence bit
*/
public final static int SEQUENCE_BIT = 0x40000000;
public static final int SEQUENCE_BIT = 0x40000000;
/**
* Calculates the modulus division by 31 in a faster way than using <code>n % 31</code>

View File

@ -34,7 +34,7 @@ import java.util.PriorityQueue;
public class ImmutableConciseSet
{
private final static int CHUNK_SIZE = 10000;
private static final int CHUNK_SIZE = 10000;
private static final Comparator<WordIterator> UNION_COMPARATOR = new Comparator<WordIterator>()
{

View File

@ -29,10 +29,10 @@ import java.util.List;
public class AmbariMetricsEmitterConfig
{
private final static int DEFAULT_BATCH_SIZE = 100;
private final static Long DEFAULT_FLUSH_PERIOD_MILLIS = (long) (60 * 1000); // flush every one minute
private final static long DEFAULT_GET_TIMEOUT = 1000; // default wait for get operations on the queue 1 sec
private final static String DEFAULT_PROTOCOL = "http";
private static final int DEFAULT_BATCH_SIZE = 100;
private static final Long DEFAULT_FLUSH_PERIOD_MILLIS = (long) (60 * 1000); // flush every one minute
private static final long DEFAULT_GET_TIMEOUT = 1000; // default wait for get operations on the queue 1 sec
private static final String DEFAULT_PROTOCOL = "http";
@JsonProperty
private final String hostname;

View File

@ -37,8 +37,8 @@ import org.junit.runner.RunWith;
@RunWith(JUnitParamsRunner.class)
public class WhiteListBasedDruidToTimelineEventConverterTest
{
final private String prefix = "druid";
final private WhiteListBasedDruidToTimelineEventConverter defaultWhiteListBasedDruidToTimelineEventConverter = new WhiteListBasedDruidToTimelineEventConverter(
private final String prefix = "druid";
private final WhiteListBasedDruidToTimelineEventConverter defaultWhiteListBasedDruidToTimelineEventConverter = new WhiteListBasedDruidToTimelineEventConverter(
prefix,
"druid",
null,

View File

@ -30,9 +30,9 @@ import java.net.URISyntaxException;
public class AzureByteSource extends ByteSource
{
final private AzureStorage azureStorage;
final private String containerName;
final private String blobPath;
private final AzureStorage azureStorage;
private final String containerName;
private final String blobPath;
public AzureByteSource(
AzureStorage azureStorage,

View File

@ -29,8 +29,8 @@ import java.io.InputStream;
public class CloudFilesByteSource extends ByteSource
{
final private CloudFilesObjectApiProxy objectApi;
final private String path;
private final CloudFilesObjectApiProxy objectApi;
private final String path;
private Payload payload;
public CloudFilesByteSource(CloudFilesObjectApiProxy objectApi, String path)

View File

@ -29,36 +29,36 @@ import java.util.List;
public class GraphiteEmitterConfig
{
public final static String PLAINTEXT_PROTOCOL = "plaintext";
public final static String PICKLE_PROTOCOL = "pickle";
private final static int DEFAULT_BATCH_SIZE = 100;
public static final String PLAINTEXT_PROTOCOL = "plaintext";
public static final String PICKLE_PROTOCOL = "pickle";
private static final int DEFAULT_BATCH_SIZE = 100;
private static final Long DEFAULT_FLUSH_PERIOD = (long) (60 * 1000); // flush every one minute
private final static long DEFAULT_GET_TIMEOUT = 1000; // default wait for get operations on the queue 1 sec
private static final long DEFAULT_GET_TIMEOUT = 1000; // default wait for get operations on the queue 1 sec
@JsonProperty
final private String hostname;
private final String hostname;
@JsonProperty
final private int port;
private final int port;
@JsonProperty
final private int batchSize;
private final int batchSize;
@JsonProperty
final private String protocol;
private final String protocol;
@JsonProperty
final private Long flushPeriod;
private final Long flushPeriod;
@JsonProperty
final private Integer maxQueueSize;
private final Integer maxQueueSize;
@JsonProperty("eventConverter")
final private DruidToGraphiteEventConverter druidToGraphiteEventConverter;
private final DruidToGraphiteEventConverter druidToGraphiteEventConverter;
@JsonProperty
final private List<String> alertEmitters;
private final List<String> alertEmitters;
@JsonProperty
final private List<String> requestLogEmitters;
private final List<String> requestLogEmitters;
@JsonProperty
final private Long emitWaitTime;
private final Long emitWaitTime;
//waiting up to the specified wait time if necessary for an event to become available.
@JsonProperty
final private Long waitForEventTime;
private final Long waitForEventTime;
@Override
public boolean equals(Object o)

View File

@ -36,8 +36,8 @@ import org.junit.runner.RunWith;
@RunWith(JUnitParamsRunner.class)
public class WhiteListBasedConverterTest
{
final private String prefix = "druid";
final private WhiteListBasedConverter defaultWhiteListBasedConverter = new WhiteListBasedConverter(
private final String prefix = "druid";
private final WhiteListBasedConverter defaultWhiteListBasedConverter = new WhiteListBasedConverter(
prefix,
false,
false,

View File

@ -50,7 +50,7 @@ public class KafkaEmitter implements Emitter
{
private static Logger log = new Logger(KafkaEmitter.class);
private final static int DEFAULT_RETRIES = 3;
private static final int DEFAULT_RETRIES = 3;
private final AtomicLong metricLost;
private final AtomicLong alertLost;
private final AtomicLong invalidLost;

View File

@ -32,13 +32,13 @@ public class KafkaEmitterConfig
{
@JsonProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)
final private String bootstrapServers;
private final String bootstrapServers;
@JsonProperty("metric.topic")
final private String metricTopic;
private final String metricTopic;
@JsonProperty("alert.topic")
final private String alertTopic;
private final String alertTopic;
@JsonProperty
final private String clusterName;
private final String clusterName;
@JsonProperty("producer.config")
private Map<String, String> kafkaProducerConfig;

View File

@ -85,7 +85,7 @@ import java.util.zip.ZipInputStream;
public class OrcIndexGeneratorJobTest
{
static private final AggregatorFactory[] aggs = {
private static final AggregatorFactory[] aggs = {
new LongSumAggregatorFactory("visited_num", "visited_num"),
new HyperUniquesAggregatorFactory("unique_hosts", "host")
};

View File

@ -38,7 +38,7 @@ import java.util.Map;
public class DimensionConverter
{
private final static Logger log = new Logger(DimensionConverter.class);
private static final Logger log = new Logger(DimensionConverter.class);
private Map<String, StatsDMetric> metricMap;
public DimensionConverter(ObjectMapper mapper, String dimensionMapPath)

View File

@ -38,10 +38,10 @@ import java.util.Map;
public class StatsDEmitter implements Emitter
{
private final static Logger log = new Logger(StatsDEmitter.class);
private final static String DRUID_METRIC_SEPARATOR = "\\/";
private final static String STATSD_SEPARATOR = ":|\\|";
private final static String BLANK = "\\s+";
private static final Logger log = new Logger(StatsDEmitter.class);
private static final String DRUID_METRIC_SEPARATOR = "\\/";
private static final String STATSD_SEPARATOR = ":|\\|";
private static final String BLANK = "\\s+";
static final StatsDEmitter of(StatsDEmitterConfig config, ObjectMapper mapper)
{

View File

@ -29,19 +29,19 @@ public class StatsDEmitterConfig
{
@JsonProperty
final private String hostname;
private final String hostname;
@JsonProperty
final private Integer port;
private final Integer port;
@JsonProperty
final private String prefix;
private final String prefix;
@JsonProperty
final private String separator;
private final String separator;
@JsonProperty
final private Boolean includeHost;
private final Boolean includeHost;
@JsonProperty
final private String dimensionMapPath;
private final String dimensionMapPath;
@JsonProperty
final private String blankHolder;
private final String blankHolder;
@JsonCreator
public StatsDEmitterConfig(

View File

@ -54,7 +54,7 @@ public class ThriftInputRowParser implements InputRowParser<Object>
private final String thriftClassName;
private Parser<String, Object> parser;
volatile private Class<TBase> thriftClass = null;
private volatile Class<TBase> thriftClass = null;
@JsonCreator
public ThriftInputRowParser(

View File

@ -163,7 +163,7 @@ public class MapVirtualColumn implements VirtualColumn
}
}
private static abstract class MapVirtualColumnValueSelector<T> implements ColumnValueSelector<T>
private abstract static class MapVirtualColumnValueSelector<T> implements ColumnValueSelector<T>
{
final DimensionSelector keySelector;
final DimensionSelector valueSelector;

View File

@ -27,10 +27,10 @@ import com.fasterxml.jackson.annotation.JsonProperty;
*/
public class SketchEstimateWithErrorBounds
{
final private double estimate;
final private double highBound;
final private double lowBound;
final private int numStdDev;
private final double estimate;
private final double highBound;
private final double lowBound;
private final int numStdDev;
@JsonCreator
public SketchEstimateWithErrorBounds(

View File

@ -419,7 +419,7 @@ public class SketchAggregationTest
);
}
public final static String readFileFromClasspathAsString(String fileName) throws IOException
public static final String readFileFromClasspathAsString(String fileName) throws IOException
{
return Files.asCharSource(
new File(SketchAggregationTest.class.getClassLoader().getResource(fileName).getFile()),

View File

@ -277,7 +277,7 @@ public class SketchAggregationWithSimpleDataTest
Assert.assertEquals("AgMDAAAazJMCAAAAAACAPzz9j7pWTMdROWGf15uY1nI=", result.getValue().getEvents().get(0).getEvent().get("pty_country"));
}
public final static String readFileFromClasspathAsString(String fileName) throws IOException
public static final String readFileFromClasspathAsString(String fileName) throws IOException
{
return Files.asCharSource(
new File(SketchAggregationTest.class.getClassLoader().getResource(fileName).getFile()),

View File

@ -226,7 +226,7 @@ public class OldApiSketchAggregationTest
);
}
public final static String readFileFromClasspathAsString(String fileName) throws IOException
public static final String readFileFromClasspathAsString(String fileName) throws IOException
{
return Files.asCharSource(
new File(OldApiSketchAggregationTest.class.getClassLoader().getResource(fileName).getFile()),

View File

@ -47,18 +47,18 @@ public class BasicAuthUtils
private static final Logger log = new Logger(BasicAuthUtils.class);
private static final SecureRandom SECURE_RANDOM = new SecureRandom();
public final static String ADMIN_NAME = "admin";
public final static String INTERNAL_USER_NAME = "druid_system";
public static final String ADMIN_NAME = "admin";
public static final String INTERNAL_USER_NAME = "druid_system";
// PBKDF2WithHmacSHA512 is chosen since it has built-in support in Java8.
// Argon2 (https://github.com/p-h-c/phc-winner-argon2) is newer but the only presently
// available Java binding is LGPLv3 licensed.
// Key length is 512-bit to match the PBKDF2WithHmacSHA512 algorithm.
// 256-bit salt should be more than sufficient for uniqueness, expected user count is on the order of thousands.
public final static int SALT_LENGTH = 32;
public final static int DEFAULT_KEY_ITERATIONS = 10000;
public final static int KEY_LENGTH = 512;
public final static String ALGORITHM = "PBKDF2WithHmacSHA512";
public static final int SALT_LENGTH = 32;
public static final int DEFAULT_KEY_ITERATIONS = 10000;
public static final int KEY_LENGTH = 512;
public static final String ALGORITHM = "PBKDF2WithHmacSHA512";
public static final TypeReference AUTHENTICATOR_USER_MAP_TYPE_REFERENCE =
new TypeReference<Map<String, BasicAuthenticatorUser>>()

View File

@ -56,7 +56,7 @@ import java.util.Map;
public class CoordinatorBasicAuthenticatorMetadataStorageUpdaterTest
{
private final static String AUTHENTICATOR_NAME = "test";
private static final String AUTHENTICATOR_NAME = "test";
@Rule
public ExpectedException expectedException = ExpectedException.none();

View File

@ -62,8 +62,8 @@ import java.util.Set;
public class CoordinatorBasicAuthenticatorResourceTest
{
private final static String AUTHENTICATOR_NAME = "test";
private final static String AUTHENTICATOR_NAME2 = "test2";
private static final String AUTHENTICATOR_NAME = "test";
private static final String AUTHENTICATOR_NAME2 = "test2";
@Rule
public ExpectedException expectedException = ExpectedException.none();

View File

@ -51,9 +51,9 @@ import java.util.Map;
public class CoordinatorBasicAuthorizerMetadataStorageUpdaterTest
{
private final static String AUTHORIZER_NAME = "test";
private static final String AUTHORIZER_NAME = "test";
private final static Map<String, BasicAuthorizerUser> BASE_USER_MAP = ImmutableMap.of(
private static final Map<String, BasicAuthorizerUser> BASE_USER_MAP = ImmutableMap.of(
BasicAuthUtils.ADMIN_NAME,
new BasicAuthorizerUser(BasicAuthUtils.ADMIN_NAME, ImmutableSet.of(BasicAuthUtils.ADMIN_NAME)),
BasicAuthUtils.INTERNAL_USER_NAME,
@ -61,7 +61,7 @@ public class CoordinatorBasicAuthorizerMetadataStorageUpdaterTest
BasicAuthUtils.INTERNAL_USER_NAME))
);
private final static Map<String, BasicAuthorizerRole> BASE_ROLE_MAP = ImmutableMap.of(
private static final Map<String, BasicAuthorizerRole> BASE_ROLE_MAP = ImmutableMap.of(
BasicAuthUtils.ADMIN_NAME,
new BasicAuthorizerRole(
BasicAuthUtils.ADMIN_NAME,

View File

@ -59,8 +59,8 @@ import java.util.Set;
public class CoordinatorBasicAuthorizerResourceTest
{
private final static String AUTHORIZER_NAME = "test";
private final static String AUTHORIZER_NAME2 = "test2";
private static final String AUTHORIZER_NAME = "test";
private static final String AUTHORIZER_NAME2 = "test2";
@Rule
public ExpectedException expectedException = ExpectedException.none();

View File

@ -44,7 +44,7 @@ import java.util.Random;
public class TestBroker implements Closeable
{
private final static Random RANDOM = new Random();
private static final Random RANDOM = new Random();
private final String zookeeperConnect;
private final File directory;

View File

@ -35,7 +35,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
@JsonTypeName("loadingLookup")
public class LoadingLookupFactory implements LookupExtractorFactory
{
private final static Logger LOGGER = new Logger(LoadingLookupFactory.class);
private static final Logger LOGGER = new Logger(LoadingLookupFactory.class);
@JsonProperty("dataFetcher")
private final DataFetcher<String, String> dataFetcher;

View File

@ -36,7 +36,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
public class OnHeapLoadingCache<K, V> implements LoadingCache<K, V>
{
private final static Logger log = new Logger(OnHeapLoadingCache.class);
private static final Logger log = new Logger(OnHeapLoadingCache.class);
private static final int DEFAULT_INITIAL_CAPACITY = 16;
//See com.google.common.cache.CacheBuilder#DEFAULT_CONCURRENCY_LEVEL
private static final int DEFAULT_CONCURRENCY_LEVEL = 4;

View File

@ -69,7 +69,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
private static final int bitsPerBucket = 4;
private static final int range = (int) Math.pow(2, bitsPerBucket) - 1;
private final static double[][] minNumRegisterLookup = new double[64][256];
private static final double[][] minNumRegisterLookup = new double[64][256];
static {
for (int registerOffset = 0; registerOffset < 64; ++registerOffset) {
@ -82,7 +82,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
}
// we have to keep track of the number of zeroes in each of the two halves of the byte register (0, 1, or 2)
private final static int[] numZeroLookup = new int[256];
private static final int[] numZeroLookup = new int[256];
static {
for (int i = 0; i < numZeroLookup.length; ++i) {

View File

@ -468,7 +468,7 @@ public class DeterminePartitionsJob implements Jobby
}
}
private static abstract class DeterminePartitionsDimSelectionBaseReducer
private abstract static class DeterminePartitionsDimSelectionBaseReducer
extends Reducer<BytesWritable, Text, BytesWritable, Text>
{
protected volatile HadoopDruidIndexerConfig config = null;

View File

@ -111,7 +111,7 @@ public abstract class HadoopDruidIndexerMapper<KEYOUT, VALUEOUT> extends Mapper<
}
}
abstract protected void innerMap(InputRow inputRow, Context context, boolean reportParseExceptions)
protected abstract void innerMap(InputRow inputRow, Context context, boolean reportParseExceptions)
throws IOException, InterruptedException;
}

View File

@ -80,12 +80,12 @@ import java.util.Map;
@RunWith(Parameterized.class)
public class IndexGeneratorJobTest
{
final private static AggregatorFactory[] aggs1 = {
private static final AggregatorFactory[] aggs1 = {
new LongSumAggregatorFactory("visited_num", "visited_num"),
new HyperUniquesAggregatorFactory("unique_hosts", "host")
};
final private static AggregatorFactory[] aggs2 = {
private static final AggregatorFactory[] aggs2 = {
new CountAggregatorFactory("count")
};

View File

@ -51,7 +51,7 @@ public abstract class HadoopTask extends AbstractTask
private static final Logger log = new Logger(HadoopTask.class);
private static final ExtensionsConfig extensionsConfig;
final static Injector injector = GuiceInjectors.makeStartupInjector();
static final Injector injector = GuiceInjectors.makeStartupInjector();
static {
extensionsConfig = injector.getInstance(ExtensionsConfig.class);

View File

@ -87,7 +87,7 @@ public class RealtimeIndexTask extends AbstractTask
public static final String CTX_KEY_LOOKUP_TIER = "lookupTier";
private static final EmittingLogger log = new EmittingLogger(RealtimeIndexTask.class);
private final static Random random = new Random();
private static final Random random = new Random();
private static String makeTaskId(FireDepartment fireDepartment)
{

View File

@ -863,8 +863,8 @@ public class TaskLockbox
static class TaskLockPosse
{
final private TaskLock taskLock;
final private Set<String> taskIds;
private final TaskLock taskLock;
private final Set<String> taskIds;
TaskLockPosse(TaskLock taskLock)
{

View File

@ -174,8 +174,8 @@ public class IngestSegmentFirehoseFactoryTest
final IndexerSQLMetadataStorageCoordinator mdc = new IndexerSQLMetadataStorageCoordinator(null, null, null)
{
final private Set<DataSegment> published = Sets.newHashSet();
final private Set<DataSegment> nuked = Sets.newHashSet();
private final Set<DataSegment> published = Sets.newHashSet();
private final Set<DataSegment> nuked = Sets.newHashSet();
@Override
public List<DataSegment> getUsedSegmentsForInterval(String dataSource, Interval interval) throws IOException

View File

@ -68,8 +68,8 @@ public class PendingTaskBasedProvisioningStrategyTest
private PendingTaskBasedWorkerProvisioningStrategy strategy;
private AtomicReference<WorkerBehaviorConfig> workerConfig;
private ScheduledExecutorService executorService = Execs.scheduledSingleThreaded("test service");
private final static String MIN_VERSION = "2014-01-00T00:01:00Z";
private final static String INVALID_VERSION = "0";
private static final String MIN_VERSION = "2014-01-00T00:01:00Z";
private static final String INVALID_VERSION = "0";
@Before
public void setUp() throws Exception

View File

@ -36,9 +36,9 @@ import java.util.Set;
public class TestIndexerMetadataStorageCoordinator implements IndexerMetadataStorageCoordinator
{
final private Set<DataSegment> published = Sets.newConcurrentHashSet();
final private Set<DataSegment> nuked = Sets.newConcurrentHashSet();
final private List<DataSegment> unusedSegments;
private final Set<DataSegment> published = Sets.newConcurrentHashSet();
private final Set<DataSegment> nuked = Sets.newConcurrentHashSet();
private final List<DataSegment> unusedSegments;
public TestIndexerMetadataStorageCoordinator()
{

View File

@ -32,7 +32,7 @@ import java.util.Map;
public class ConfigFileConfigProvider implements IntegrationTestingConfigProvider
{
private final static Logger LOG = new Logger(ConfigFileConfigProvider.class);
private static final Logger LOG = new Logger(ConfigFileConfigProvider.class);
private String routerUrl;
private String brokerUrl;
private String historicalUrl;

View File

@ -49,7 +49,7 @@ import java.util.concurrent.Callable;
public class OverlordResourceTestClient
{
private final static Logger LOG = new Logger(OverlordResourceTestClient.class);
private static final Logger LOG = new Logger(OverlordResourceTestClient.class);
private final ObjectMapper jsonMapper;
private final HttpClient httpClient;
private final String indexer;

View File

@ -28,19 +28,19 @@ import java.util.List;
*/
public class DruidMetrics
{
public final static String DATASOURCE = "dataSource";
public final static String TYPE = "type";
public final static String INTERVAL = "interval";
public final static String ID = "id";
public final static String TASK_ID = "taskId";
public final static String STATUS = "status";
public static final String DATASOURCE = "dataSource";
public static final String TYPE = "type";
public static final String INTERVAL = "interval";
public static final String ID = "id";
public static final String TASK_ID = "taskId";
public static final String STATUS = "status";
// task metrics
public final static String TASK_TYPE = "taskType";
public final static String TASK_STATUS = "taskStatus";
public static final String TASK_TYPE = "taskType";
public static final String TASK_STATUS = "taskStatus";
public final static String SERVER = "server";
public final static String TIER = "tier";
public static final String SERVER = "server";
public static final String TIER = "tier";
public static int findNumComplexAggs(List<AggregatorFactory> aggs)
{

View File

@ -27,11 +27,11 @@ import java.util.Arrays;
public class HistogramVisual
{
@JsonProperty final public double[] breaks;
@JsonProperty public final double[] breaks;
@JsonProperty
final public double[] counts;
public final double[] counts;
// an array of the quantiles including the min. and max.
@JsonProperty final public double[] quantiles;
@JsonProperty public final double[] quantiles;
@JsonCreator
public HistogramVisual(

View File

@ -38,7 +38,7 @@ public class AndDimFilter implements DimFilter
{
private static final Joiner AND_JOINER = Joiner.on(" && ");
final private List<DimFilter> fields;
private final List<DimFilter> fields;
@JsonCreator
public AndDimFilter(

View File

@ -34,7 +34,7 @@ import java.util.List;
public class NotDimFilter implements DimFilter
{
final private DimFilter field;
private final DimFilter field;
@JsonCreator
public NotDimFilter(

View File

@ -39,7 +39,7 @@ public class OrDimFilter implements DimFilter
{
private static final Joiner OR_JOINER = Joiner.on(" || ");
final private List<DimFilter> fields;
private final List<DimFilter> fields;
@JsonCreator
public OrDimFilter(

View File

@ -80,9 +80,9 @@ import java.util.stream.Collectors;
*/
public class GroupByQuery extends BaseQuery<Row>
{
public final static String CTX_KEY_SORT_BY_DIMS_FIRST = "sortByDimsFirst";
public static final String CTX_KEY_SORT_BY_DIMS_FIRST = "sortByDimsFirst";
private final static Comparator<Row> NON_GRANULAR_TIME_COMP = (Row lhs, Row rhs) -> Longs.compare(
private static final Comparator<Row> NON_GRANULAR_TIME_COMP = (Row lhs, Row rhs) -> Longs.compare(
lhs.getTimestampFromEpoch(),
rhs.getTimestampFromEpoch()
);

View File

@ -55,7 +55,7 @@ import java.util.concurrent.ConcurrentLinkedQueue;
public class GroupByQueryHelper
{
public final static String CTX_KEY_SORT_RESULTS = "sortResults";
public static final String CTX_KEY_SORT_RESULTS = "sortResults";
public static <T> Pair<IncrementalIndex, Accumulator<IncrementalIndex, T>> createIndexAccumulatorPair(
final GroupByQuery query,

View File

@ -227,7 +227,7 @@ public abstract class BaseTopNAlgorithm<DimValSelector, DimValAggregateStore, Pa
}
}
protected static abstract class BaseArrayProvider<T> implements TopNMetricSpecBuilder<T>
protected abstract static class BaseArrayProvider<T> implements TopNMetricSpecBuilder<T>
{
private volatile String previousStop;
private volatile boolean ignoreAfterThreshold;

View File

@ -50,7 +50,7 @@ public final class DimensionHandlerUtils
private DimensionHandlerUtils() {}
public final static ColumnCapabilities DEFAULT_STRING_CAPABILITIES =
public static final ColumnCapabilities DEFAULT_STRING_CAPABILITIES =
new ColumnCapabilitiesImpl().setType(ValueType.STRING)
.setDictionaryEncoded(true)
.setHasBitmapIndexes(true);

View File

@ -163,7 +163,7 @@ public enum CompressionStrategy
void decompress(ByteBuffer in, int numBytes, ByteBuffer out);
}
public static abstract class Compressor
public abstract static class Compressor
{
/**
* Allocates a buffer that should be passed to {@link #compress} method as input buffer. Different Compressors

View File

@ -26,7 +26,7 @@ import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
*/
public class ZeroIndexedInts implements IndexedInts
{
private final static ZeroIndexedInts INSTANCE = new ZeroIndexedInts();
private static final ZeroIndexedInts INSTANCE = new ZeroIndexedInts();
private ZeroIndexedInts()
{

View File

@ -40,7 +40,7 @@ import java.util.concurrent.TimeoutException;
public class AsyncQueryRunnerTest
{
private final static long TEST_TIMEOUT = 60000;
private static final long TEST_TIMEOUT = 60000;
private final ExecutorService executor;
private final Query query;

View File

@ -201,7 +201,7 @@ public class QueryRunnerTestHelper
qualityUniques
);
public final static List<AggregatorFactory> commonFloatAggregators = Arrays.asList(
public static final List<AggregatorFactory> commonFloatAggregators = Arrays.asList(
new FloatSumAggregatorFactory("index", "indexFloat"),
new CountAggregatorFactory("rows"),
new HyperUniquesAggregatorFactory(

View File

@ -43,7 +43,7 @@ import java.util.List;
*/
public class CardinalityAggregatorBenchmark extends SimpleBenchmark
{
private final static int MAX = 5_000_000;
private static final int MAX = 5_000_000;
CardinalityBufferAggregator agg;
List<DimensionSelector> selectorList;

View File

@ -35,11 +35,11 @@ import java.util.Random;
public class HyperUniquesAggregatorFactoryTest
{
final static HyperUniquesAggregatorFactory aggregatorFactory = new HyperUniquesAggregatorFactory(
static final HyperUniquesAggregatorFactory aggregatorFactory = new HyperUniquesAggregatorFactory(
"hyperUnique",
"uniques"
);
final static String V0_BASE64 = "AAYbEyQwFyQVASMCVFEQQgEQIxIhM4ISAQMhUkICEDFDIBMhMgFQFAFAMjAAEhEREyVAEiUBAhIjISATMCECMiERIRIiVRFRAyIAEgFCQSMEJAITATAAEAMQgCEBEjQiAyUTAyEQASJyAGURAAISAwISATETQhAREBYDIVIlFTASAzJgERIgRCcmUyAwNAMyEJMjIhQXQhEWECABQDETATEREjIRAgEyIiMxMBQiAkBBMDYAMEQQACMzMhIkMTQSkYIRABIBADMBAhIEISAENkEBQDAxETMAIEEwEzQiQSEVQSFBBAQDICIiAVIAMTAQIQYBIRABADMDEzEAQSMkEiAYFBAQI0AmECEyQSARRTIVMhEkMiKAMCUBxUghAkIBI3EmMAQiACEAJDJCAAADOzESEDBCRjMgEUQQETQwEWIhA6MlAiAAZDI1AgEIIDUyFDIHMQEEAwIRBRABBStCZCQhAgJSMQIiQEEURTBmM1MxACIAETGhMgQnBRICNiIREyIUNAEAAkABAwQSEBJBIhIhIRERAiIRACUhEUAVMkQGEVMjECYjACBwEQQSIRIgAAEyExQUFSEAIBJCIDIDYTAgMiNBIUADUiETADMoFEADETMCIwUEQkIAESMSIzIABDERIXEhIiACQgUSEgJiQCAUARIRAREDQiEUAkQgAgQiIEAzIxRCARIgBAAVAzMAECEwE0Qh8gAAASEhEiAiMhUxcRImIVABATYyUBAwIoE1QhRDIiYBIBEBEiQSQyERAAADMAARAEACFYUwQSQBIRIgURITARFSEzEHEBACOTMREBIAMjIgEhU0cxEQIRIhIi1wEgMRUBEgMQIRAnAVASURMHQBAiEyBSAAEBQTAWQ5EQA0IUMSISAUEiASIjIhMhMFJBBSEjEAECEwACASEQFBAjARITEQIgYTEKEAeAAiMkEyARowARFBAicRISIBIxAQAgEBARMCIRQgMSIVIAkjMxIAIEMyADASMgFRIjEyKjEjBBIEQCUAARYBEQMxMCIBACNCACRCMlEzUUAAUDM1MhAjEgAxAAISAVFQECAhQAMBMhEzEgASNxAhFRIxECMRJBQAERAToBgQMhJSRQFAEhAwMiIhMQAwAgQiBQJiIGMQQhEiQxR1MiAjIAIEEiAkARECEzQlMjECIRATBgIhEBQAIQAEATEjBCMwAgMBMhAhIyFBIxQAARI1AAEABCIDFBIRUzMBIgAgEiARQCASMQQDQCFBAQAUJwMUElAyIAIRBSIRITICEAIxMAEUBEYTcBMBEEIxMREwIRIDAGIAEgYxBAEANCAhBAI2UhIiIgIRABIEVRAwNEIQERQgEFMhFCQSIAEhQDMTEQMiAjJyEQ==";
static final String V0_BASE64 = "AAYbEyQwFyQVASMCVFEQQgEQIxIhM4ISAQMhUkICEDFDIBMhMgFQFAFAMjAAEhEREyVAEiUBAhIjISATMCECMiERIRIiVRFRAyIAEgFCQSMEJAITATAAEAMQgCEBEjQiAyUTAyEQASJyAGURAAISAwISATETQhAREBYDIVIlFTASAzJgERIgRCcmUyAwNAMyEJMjIhQXQhEWECABQDETATEREjIRAgEyIiMxMBQiAkBBMDYAMEQQACMzMhIkMTQSkYIRABIBADMBAhIEISAENkEBQDAxETMAIEEwEzQiQSEVQSFBBAQDICIiAVIAMTAQIQYBIRABADMDEzEAQSMkEiAYFBAQI0AmECEyQSARRTIVMhEkMiKAMCUBxUghAkIBI3EmMAQiACEAJDJCAAADOzESEDBCRjMgEUQQETQwEWIhA6MlAiAAZDI1AgEIIDUyFDIHMQEEAwIRBRABBStCZCQhAgJSMQIiQEEURTBmM1MxACIAETGhMgQnBRICNiIREyIUNAEAAkABAwQSEBJBIhIhIRERAiIRACUhEUAVMkQGEVMjECYjACBwEQQSIRIgAAEyExQUFSEAIBJCIDIDYTAgMiNBIUADUiETADMoFEADETMCIwUEQkIAESMSIzIABDERIXEhIiACQgUSEgJiQCAUARIRAREDQiEUAkQgAgQiIEAzIxRCARIgBAAVAzMAECEwE0Qh8gAAASEhEiAiMhUxcRImIVABATYyUBAwIoE1QhRDIiYBIBEBEiQSQyERAAADMAARAEACFYUwQSQBIRIgURITARFSEzEHEBACOTMREBIAMjIgEhU0cxEQIRIhIi1wEgMRUBEgMQIRAnAVASURMHQBAiEyBSAAEBQTAWQ5EQA0IUMSISAUEiASIjIhMhMFJBBSEjEAECEwACASEQFBAjARITEQIgYTEKEAeAAiMkEyARowARFBAicRISIBIxAQAgEBARMCIRQgMSIVIAkjMxIAIEMyADASMgFRIjEyKjEjBBIEQCUAARYBEQMxMCIBACNCACRCMlEzUUAAUDM1MhAjEgAxAAISAVFQECAhQAMBMhEzEgASNxAhFRIxECMRJBQAERAToBgQMhJSRQFAEhAwMiIhMQAwAgQiBQJiIGMQQhEiQxR1MiAjIAIEEiAkARECEzQlMjECIRATBgIhEBQAIQAEATEjBCMwAgMBMhAhIyFBIxQAARI1AAEABCIDFBIRUzMBIgAgEiARQCASMQQDQCFBAQAUJwMUElAyIAIRBSIRITICEAIxMAEUBEYTcBMBEEIxMREwIRIDAGIAEgYxBAEANCAhBAI2UhIiIgIRABIEVRAwNEIQERQgEFMhFCQSIAEhQDMTEQMiAjJyEQ==";
private final HashFunction fn = Hashing.murmur3_128();

View File

@ -37,7 +37,7 @@ import javax.annotation.Nullable;
*/
class TestDimensionSelector implements DimensionSelector
{
public final static TestDimensionSelector instance = new TestDimensionSelector();
public static final TestDimensionSelector instance = new TestDimensionSelector();
private TestDimensionSelector()
{

View File

@ -176,7 +176,7 @@ public class GroupByQueryMergeBufferTest
);
}
private final static TestBlockingPool mergeBufferPool = new TestBlockingPool(
private static final TestBlockingPool mergeBufferPool = new TestBlockingPool(
new Supplier<ByteBuffer>()
{
@Override

View File

@ -140,7 +140,7 @@ public class GroupByQueryRunnerFailureTest
);
}
private final static BlockingPool<ByteBuffer> mergeBufferPool = new DefaultBlockingPool<>(
private static final BlockingPool<ByteBuffer> mergeBufferPool = new DefaultBlockingPool<>(
new Supplier<ByteBuffer>()
{
@Override

View File

@ -52,10 +52,10 @@ public class BatchServerInventoryView extends AbstractCuratorServerInventoryView
{
private static final EmittingLogger log = new EmittingLogger(BatchServerInventoryView.class);
final private ConcurrentMap<String, Set<DataSegment>> zNodes = new ConcurrentHashMap<>();
final private ConcurrentMap<SegmentCallback, Predicate<Pair<DruidServerMetadata, DataSegment>>> segmentPredicates =
private final ConcurrentMap<String, Set<DataSegment>> zNodes = new ConcurrentHashMap<>();
private final ConcurrentMap<SegmentCallback, Predicate<Pair<DruidServerMetadata, DataSegment>>> segmentPredicates =
new ConcurrentHashMap<>();
final private Predicate<Pair<DruidServerMetadata, DataSegment>> defaultFilter;
private final Predicate<Pair<DruidServerMetadata, DataSegment>> defaultFilter;
@Inject
public BatchServerInventoryView(

View File

@ -46,7 +46,7 @@ public class SingleServerInventoryView extends AbstractCuratorServerInventoryVie
{
private static final EmittingLogger log = new EmittingLogger(SingleServerInventoryView.class);
final private ConcurrentMap<SegmentCallback, Predicate<Pair<DruidServerMetadata, DataSegment>>> segmentPredicates =
private final ConcurrentMap<SegmentCallback, Predicate<Pair<DruidServerMetadata, DataSegment>>> segmentPredicates =
new ConcurrentHashMap<>();
private final Predicate<Pair<DruidServerMetadata, DataSegment>> defaultFilter;

View File

@ -59,8 +59,8 @@ public interface Cache
class NamedKey
{
final public String namespace;
final public byte[] key;
public final String namespace;
public final byte[] key;
public NamedKey(String namespace, byte[] key)
{

View File

@ -80,7 +80,7 @@ public class MemcachedCache implements Cache
* If some other algorithms are considered as the default algorithm instead of this one, the cache distribution for
* those hash algorithms should be checked and compared using {@code CacheDistributionTest}.
*/
final static HashAlgorithm MURMUR3_128 = new HashAlgorithm()
static final HashAlgorithm MURMUR3_128 = new HashAlgorithm()
{
final HashFunction fn = Hashing.murmur3_128();

View File

@ -98,7 +98,7 @@ public class Initialization
private static final Logger log = new Logger(Initialization.class);
private static final ConcurrentMap<File, URLClassLoader> loadersMap = new ConcurrentHashMap<>();
private final static Map<Class, Collection> extensionsMap = Maps.newHashMap();
private static final Map<Class, Collection> extensionsMap = Maps.newHashMap();
/**
* @param clazz service class
@ -140,7 +140,7 @@ public class Initialization
* elements in the returned collection is not specified and not guaranteed to be the same for different calls to
* getFromExtensions().
*/
public synchronized static <T> Collection<T> getFromExtensions(ExtensionsConfig config, Class<T> serviceClass)
public static synchronized <T> Collection<T> getFromExtensions(ExtensionsConfig config, Class<T> serviceClass)
{
Collection<T> modulesToLoad = new ServiceLoadingFromExtensions<>(config, serviceClass).implsToLoad;
extensionsMap.put(serviceClass, modulesToLoad);

View File

@ -122,7 +122,7 @@ public class MapLookupExtractorFactory implements LookupExtractorFactory
public static class MapLookupIntrospectionHandler implements LookupIntrospectHandler
{
final private Map<String, String> map;
private final Map<String, String> map;
public MapLookupIntrospectionHandler(Map<String, String> map)
{
this.map = map;

View File

@ -753,8 +753,8 @@ public class SegmentLoadDropHandler implements DataSegmentChangeHandler
private final STATE state;
private final String failureCause;
public final static Status SUCCESS = new Status(STATE.SUCCESS, null);
public final static Status PENDING = new Status(STATE.PENDING, null);
public static final Status SUCCESS = new Status(STATE.SUCCESS, null);
public static final Status PENDING = new Status(STATE.PENDING, null);
@JsonCreator
Status(

View File

@ -277,16 +277,16 @@ public class CoordinatorDynamicConfig
public static class Builder
{
private final static long DEFAULT_MILLIS_TO_WAIT_BEFORE_DELETING = TimeUnit.MINUTES.toMillis(15);
private final static long DEFAULT_MERGE_BYTES_LIMIT = 524288000L;
private final static int DEFAULT_MERGE_SEGMENTS_LIMIT = 100;
private final static int DEFAULT_MAX_SEGMENTS_TO_MOVE = 5;
private final static int DEFAULT_REPLICANT_LIFETIME = 15;
private final static int DEFAULT_REPLICATION_THROTTLE_LIMIT = 10;
private final static int DEFAULT_BALANCER_COMPUTE_THREADS = 1;
private final static boolean DEFAULT_EMIT_BALANCING_STATS = false;
private final static boolean DEFAULT_KILL_ALL_DATA_SOURCES = false;
private final static int DEFAULT_MAX_SEGMENTS_IN_NODE_LOADING_QUEUE = 0;
private static final long DEFAULT_MILLIS_TO_WAIT_BEFORE_DELETING = TimeUnit.MINUTES.toMillis(15);
private static final long DEFAULT_MERGE_BYTES_LIMIT = 524288000L;
private static final int DEFAULT_MERGE_SEGMENTS_LIMIT = 100;
private static final int DEFAULT_MAX_SEGMENTS_TO_MOVE = 5;
private static final int DEFAULT_REPLICANT_LIFETIME = 15;
private static final int DEFAULT_REPLICATION_THROTTLE_LIMIT = 10;
private static final int DEFAULT_BALANCER_COMPUTE_THREADS = 1;
private static final boolean DEFAULT_EMIT_BALANCING_STATS = false;
private static final boolean DEFAULT_KILL_ALL_DATA_SOURCES = false;
private static final int DEFAULT_MAX_SEGMENTS_IN_NODE_LOADING_QUEUE = 0;
private Long millisToWaitBeforeDeleting;
private Long mergeBytesLimit;

View File

@ -39,7 +39,7 @@ import java.util.List;
*/
public class DruidCoordinatorSegmentKiller implements DruidCoordinatorHelper
{
private final static Logger log = new Logger(DruidCoordinatorSegmentKiller.class);
private static final Logger log = new Logger(DruidCoordinatorSegmentKiller.class);
private final long period;
private final long retainDuration;

View File

@ -29,7 +29,7 @@ import io.druid.java.util.common.logger.Logger;
public class JettyRequestLog extends AbstractLifeCycle implements RequestLog
{
private final static Logger logger = new Logger("io.druid.jetty.RequestLog");
private static final Logger logger = new Logger("io.druid.jetty.RequestLog");
@Override
public void log(Request request, Response response)

View File

@ -35,7 +35,7 @@ import java.util.Properties;
*/
public class MonitorsConfig
{
public final static String METRIC_DIMENSION_PREFIX = "druid.metrics.emitter.dimension.";
public static final String METRIC_DIMENSION_PREFIX = "druid.metrics.emitter.dimension.";
@JsonProperty("monitors")
@NotNull

View File

@ -23,7 +23,7 @@ import io.druid.java.util.common.StringUtils;
public class Access
{
public final static Access OK = new Access(true);
public static final Access OK = new Access(true);
private final boolean allowed;
private final String message;

View File

@ -40,10 +40,10 @@ import java.util.Map;
public class NumberedShardSpec implements ShardSpec
{
@JsonIgnore
final private int partitionNum;
private final int partitionNum;
@JsonIgnore
final private int partitions;
private final int partitions;
@JsonCreator
public NumberedShardSpec(

View File

@ -32,7 +32,7 @@ import org.junit.Test;
*/
public class PeriodDropRuleTest
{
private final static DataSegment.Builder builder = DataSegment.builder()
private static final DataSegment.Builder builder = DataSegment.builder()
.dataSource("test")
.version(DateTimes.of("2012-12-31T01:00:00").toString())
.shardSpec(NoneShardSpec.instance());

View File

@ -37,7 +37,7 @@ import org.junit.Test;
*/
public class PeriodLoadRuleTest
{
private final static DataSegment.Builder builder = DataSegment.builder()
private static final DataSegment.Builder builder = DataSegment.builder()
.dataSource("test")
.version(DateTimes.nowUtc().toString())
.shardSpec(NoneShardSpec.instance());

View File

@ -129,7 +129,7 @@ public class DruidRules
}
}
public static abstract class DruidOuterQueryRule extends RelOptRule
public abstract static class DruidOuterQueryRule extends RelOptRule
{
public static RelOptRule AGGREGATE = new DruidOuterQueryRule(
operand(Aggregate.class, operand(DruidRel.class, any())),