mirror of https://github.com/apache/druid.git
Add MethodParamPad, OneStatementPerLine and EmptyStatement Checkstyle checks (#5272)
This commit is contained in:
parent
d6932c1621
commit
87c744ac1d
|
@ -148,7 +148,6 @@ public class IncrementalIndexRowTypeBenchmark
|
|||
@Setup(Level.Iteration)
|
||||
public void setup2() throws IOException
|
||||
{
|
||||
;
|
||||
incIndex = makeIncIndex();
|
||||
incFloatIndex = makeIncIndex();
|
||||
incStrIndex = makeIncIndex();
|
||||
|
|
|
@ -31,7 +31,7 @@ public class BenchmarkSchemaInfo
|
|||
private Interval dataInterval;
|
||||
private boolean withRollup;
|
||||
|
||||
public BenchmarkSchemaInfo (
|
||||
public BenchmarkSchemaInfo(
|
||||
List<BenchmarkColumnSchema> columnSchemas,
|
||||
List<AggregatorFactory> aggs,
|
||||
Interval dataInterval,
|
||||
|
|
|
@ -51,6 +51,8 @@
|
|||
<suppress checks="Indentation" files="[\\/]target[\\/]generated-test-sources[\\/]" />
|
||||
<suppress checks="Indentation" files="ProtoTestEventWrapper.java" />
|
||||
|
||||
<suppress checks="OneStatementPerLine" files="[\\/]target[\\/]generated-test-sources[\\/]" />
|
||||
|
||||
<!-- extendedset is a fork of Alessandro Colantonio's CONCISE (COmpressed 'N' Composable Integer SEt) repository and licensed to Metamarkets under a CLA is not true. -->
|
||||
<suppress checks="Header" files="[\\/]extendedset[\\/]" />
|
||||
</suppressions>
|
||||
|
|
|
@ -99,6 +99,12 @@
|
|||
<property name="caseIndent" value="2"/>
|
||||
</module>
|
||||
|
||||
<module name="MethodParamPad">
|
||||
<property name="allowLineBreaks" value="true"/>
|
||||
</module>
|
||||
<module name="OneStatementPerLine"/>
|
||||
<module name="EmptyStatement"/>
|
||||
|
||||
<module name="ModifierOrder" />
|
||||
|
||||
<module name="Regexp">
|
||||
|
|
|
@ -32,7 +32,7 @@ public class IntegrationTestingCuratorConfig extends CuratorConfig
|
|||
private IntegrationTestingConfig config;
|
||||
|
||||
@Inject
|
||||
public IntegrationTestingCuratorConfig (IntegrationTestingConfig config)
|
||||
public IntegrationTestingCuratorConfig(IntegrationTestingConfig config)
|
||||
{
|
||||
this.config = config;
|
||||
}
|
||||
|
|
|
@ -154,7 +154,7 @@ public class EventReceiverFirehoseTestClient
|
|||
? MediaType.APPLICATION_JSON
|
||||
: SmileMediaTypes.APPLICATION_JACKSON_SMILE;
|
||||
totalEventsPosted += postEvents(events, mapper, mediaType);
|
||||
;
|
||||
|
||||
expectedEventsPosted += events.size();
|
||||
events = new ArrayList<>();
|
||||
}
|
||||
|
|
|
@ -31,25 +31,25 @@ public class LoggerListener extends TestListenerAdapter
|
|||
@Override
|
||||
public void onTestFailure(ITestResult tr)
|
||||
{
|
||||
LOG.info ("[%s] -- Test method failed", tr.getName());
|
||||
LOG.info("[%s] -- Test method failed", tr.getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTestSkipped(ITestResult tr)
|
||||
{
|
||||
LOG.info ("[%s] -- Test method skipped", tr.getName());
|
||||
LOG.info("[%s] -- Test method skipped", tr.getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTestSuccess(ITestResult tr)
|
||||
{
|
||||
LOG.info ("[%s] -- Test method passed", tr.getName());
|
||||
LOG.info("[%s] -- Test method passed", tr.getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTestStart(ITestResult tr)
|
||||
{
|
||||
LOG.info ("[%s] -- TEST START", tr.getName());
|
||||
LOG.info("[%s] -- TEST START", tr.getName());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -87,7 +87,7 @@ public class DruidTestRunnerFactory implements ITestRunnerFactory
|
|||
Injector injector = DruidTestModuleFactory.getInjector();
|
||||
IntegrationTestingConfig config = injector.getInstance(IntegrationTestingConfig.class);
|
||||
HttpClient client = injector.getInstance(Key.get(HttpClient.class, TestClient.class));
|
||||
;
|
||||
|
||||
waitUntilInstanceReady(client, config.getCoordinatorUrl());
|
||||
waitUntilInstanceReady(client, config.getIndexerUrl());
|
||||
waitUntilInstanceReady(client, config.getBrokerUrl());
|
||||
|
|
|
@ -62,10 +62,10 @@ public abstract class AbstractIndexerTest
|
|||
// we'll sort the list (ISO dates have lexicographic order)
|
||||
// then delete segments from the 1st date in the first string
|
||||
// to the 2nd date in the last string
|
||||
Collections.sort (intervals);
|
||||
Collections.sort(intervals);
|
||||
String first = intervals.get(0).split("/")[0];
|
||||
String last = intervals.get(intervals.size() - 1).split("/")[1];
|
||||
unloadAndKillData (dataSource, first, last);
|
||||
unloadAndKillData(dataSource, first, last);
|
||||
}
|
||||
|
||||
protected void unloadAndKillData(final String dataSource, String start, String end) throws Exception
|
||||
|
|
|
@ -152,7 +152,7 @@ public class TimewarpOperator<T> implements PostProcessingOperator<T>
|
|||
if (startOffset < 0) {
|
||||
startOffset += periodMillis;
|
||||
}
|
||||
;
|
||||
|
||||
start -= startOffset;
|
||||
|
||||
// tOffset is the offset time t within the last period
|
||||
|
|
|
@ -180,7 +180,7 @@ public class GroupByQueryMergeBufferTest
|
|||
new Supplier<ByteBuffer>()
|
||||
{
|
||||
@Override
|
||||
public ByteBuffer get ()
|
||||
public ByteBuffer get()
|
||||
{
|
||||
return ByteBuffer.allocateDirect(PROCESSING_CONFIG.intermediateComputeSizeBytes());
|
||||
}
|
||||
|
|
|
@ -144,7 +144,7 @@ public class GroupByQueryRunnerFailureTest
|
|||
new Supplier<ByteBuffer>()
|
||||
{
|
||||
@Override
|
||||
public ByteBuffer get ()
|
||||
public ByteBuffer get()
|
||||
{
|
||||
return ByteBuffer.allocateDirect(DEFAULT_PROCESSING_CONFIG.intermediateComputeSizeBytes());
|
||||
}
|
||||
|
|
|
@ -35,11 +35,12 @@ public class ClientMergeQueryTest
|
|||
private static final String DATA_SOURCE = "data_source";
|
||||
public static final DateTime START = DateTimes.nowUtc();
|
||||
private static final Interval INTERVAL = new Interval(START, START.plus(1));
|
||||
private static final DataSegment DATA_SEGMENT = new DataSegment(DATA_SOURCE, INTERVAL, START.toString(), null,
|
||||
null, null, null, 0, 0);
|
||||
private static final DataSegment DATA_SEGMENT =
|
||||
new DataSegment(DATA_SOURCE, INTERVAL, START.toString(), null, null, null, null, 0, 0);
|
||||
private static final List<DataSegment> SEGMENT_LIST = Lists.newArrayList(DATA_SEGMENT);
|
||||
private static final List<AggregatorFactory> AGGREGATOR_LIST = Lists.newArrayList();
|
||||
private static final ClientMergeQuery CLIENT_MERGE_QUERY = new ClientMergeQuery(DATA_SOURCE, SEGMENT_LIST, AGGREGATOR_LIST);;
|
||||
private static final ClientMergeQuery CLIENT_MERGE_QUERY =
|
||||
new ClientMergeQuery(DATA_SOURCE, SEGMENT_LIST, AGGREGATOR_LIST);
|
||||
|
||||
@Test
|
||||
public void testGetType()
|
||||
|
|
|
@ -74,7 +74,7 @@ public class LookupNodeDiscoveryTest
|
|||
|
||||
EasyMock.expect(druidNodeDiscovery.getAllNodes())
|
||||
.andReturn(ImmutableSet.of(node1, node2, node3))
|
||||
.anyTimes();;
|
||||
.anyTimes();
|
||||
|
||||
EasyMock.replay(druidNodeDiscoveryProvider, druidNodeDiscovery);
|
||||
|
||||
|
|
|
@ -107,7 +107,7 @@ public class TieredBrokerHostSelectorTest
|
|||
};
|
||||
|
||||
EasyMock.expect(druidNodeDiscoveryProvider.getForNodeType(DruidNodeDiscoveryProvider.NODE_TYPE_BROKER))
|
||||
.andReturn(druidNodeDiscovery);;
|
||||
.andReturn(druidNodeDiscovery);
|
||||
|
||||
EasyMock.replay(druidNodeDiscoveryProvider);
|
||||
|
||||
|
|
|
@ -175,7 +175,7 @@ public class ResetCluster extends GuiceRunnable
|
|||
log.info("Deleting all TaskLogs.");
|
||||
log.info("===========================================================================");
|
||||
|
||||
TaskLogKiller taskLogKiller = injector.getInstance(TaskLogKiller.class);;
|
||||
TaskLogKiller taskLogKiller = injector.getInstance(TaskLogKiller.class);
|
||||
taskLogKiller.killAll();
|
||||
}
|
||||
catch (Exception ex) {
|
||||
|
|
Loading…
Reference in New Issue