Fix some IntelliJ inspections (#7273)

Prepare TeamCity for IntelliJ 2018.3.1 upgrade. Mostly removed redundant exceptions declarations in `throws` clauses.
This commit is contained in:
Roman Leventov 2019-03-25 21:11:01 -03:00 committed by GitHub
parent 8ca7cb4886
commit bca40dcdaf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 43 additions and 50 deletions

View File

@ -79,6 +79,9 @@
<inspection_tool class="IteratorHasNextCallsIteratorNext" enabled="true" level="ERROR" enabled_by_default="true" />
<inspection_tool class="IteratorNextDoesNotThrowNoSuchElementException" enabled="true" level="WARNING" enabled_by_default="true" />
<inspection_tool class="Java8MapForEach" enabled="true" level="ERROR" enabled_by_default="true" />
<inspection_tool class="JavadocReference" enabled="true" level="ERROR" enabled_by_default="true">
<option name="REPORT_INACCESSIBLE" value="false" />
</inspection_tool>
<inspection_tool class="JsonDuplicatePropertyKeys" enabled="true" level="ERROR" enabled_by_default="true" />
<inspection_tool class="JsonStandardCompliance" enabled="true" level="WARNING" enabled_by_default="true" />
<inspection_tool class="LengthOneStringInIndexOf" enabled="true" level="ERROR" enabled_by_default="true" />

View File

@ -147,7 +147,6 @@ public abstract class PrefetchSqlFirehoseFactory<T>
@Override
public Firehose connect(InputRowParser<Map<String, Object>> firehoseParser, @Nullable File temporaryDirectory)
throws IOException
{
if (objects == null) {
objects = ImmutableList.copyOf(Preconditions.checkNotNull(initObjects(), "objects"));

View File

@ -24,7 +24,6 @@ import com.google.common.io.ByteSource;
import org.apache.druid.java.util.common.logger.Logger;
import java.io.File;
import java.io.IOException;
public class NoopTaskLogs implements TaskLogs
{
@ -43,7 +42,7 @@ public class NoopTaskLogs implements TaskLogs
}
@Override
public void pushTaskReports(String taskid, File reportFile) throws IOException
public void pushTaskReports(String taskid, File reportFile)
{
log.info("Not pushing reports for task: %s", taskid);
}

View File

@ -30,14 +30,14 @@ public class NoopDataSegmentArchiver implements DataSegmentArchiver
{
@Nullable
@Override
public DataSegment archive(DataSegment segment) throws SegmentLoadingException
public DataSegment archive(DataSegment segment)
{
return segment;
}
@Nullable
@Override
public DataSegment restore(DataSegment segment) throws SegmentLoadingException
public DataSegment restore(DataSegment segment)
{
return segment;
}

View File

@ -21,20 +21,18 @@ package org.apache.druid.segment.loading;
import org.apache.druid.timeline.DataSegment;
import java.io.IOException;
/**
* Mostly used for test purpose.
*/
public class NoopDataSegmentKiller implements DataSegmentKiller
{
@Override
public void kill(DataSegment segments) throws SegmentLoadingException
public void kill(DataSegment segments)
{
}
@Override
public void killAll() throws IOException
public void killAll()
{
}
}

View File

@ -57,7 +57,7 @@ public class AzureTaskLogs implements TaskLogs
}
@Override
public void pushTaskReports(String taskid, File reportFile) throws IOException
public void pushTaskReports(String taskid, File reportFile)
{
final String taskKey = getTaskReportsKey(taskid);
log.info("Pushing task reports %s to: %s", reportFile, taskKey);

View File

@ -34,7 +34,7 @@ public class DerivativeDataSourceMetadataTest
public ExpectedException expectedException = ExpectedException.none();
@Test
public void testEmptyBaseDataSource() throws Exception
public void testEmptyBaseDataSource()
{
expectedException.expect(CoreMatchers.instanceOf(IllegalArgumentException.class));
expectedException.expectMessage(
@ -47,7 +47,7 @@ public class DerivativeDataSourceMetadataTest
}
@Test
public void testNullBaseDataSource() throws Exception
public void testNullBaseDataSource()
{
expectedException.expect(CoreMatchers.instanceOf(IllegalArgumentException.class));
expectedException.expectMessage(

View File

@ -199,7 +199,7 @@ public class MaterializedViewSupervisorSpecTest
}
@Test
public void testEmptyBaseDataSource() throws Exception
public void testEmptyBaseDataSource()
{
expectedException.expect(CoreMatchers.instanceOf(IllegalArgumentException.class));
expectedException.expectMessage(
@ -244,7 +244,7 @@ public class MaterializedViewSupervisorSpecTest
}
@Test
public void testNullBaseDataSource() throws Exception
public void testNullBaseDataSource()
{
expectedException.expect(CoreMatchers.instanceOf(IllegalArgumentException.class));
expectedException.expectMessage(

View File

@ -246,15 +246,14 @@ public class OrcHadoopInputRowParser implements InputRowParser<OrcStruct>
builder.append(parseSpec.getTimestampSpec().getTimestampColumn()).append(":string");
// the typeString seems positionally dependent, so repeated timestamp column causes incorrect mapping
if (parseSpec.getDimensionsSpec().getDimensionNames().size() > 0) {
builder.append(",");
builder.append(String.join(
":string,",
parseSpec.getDimensionsSpec()
.getDimensionNames()
.stream()
.filter(s -> !s.equals(parseSpec.getTimestampSpec().getTimestampColumn()))
.collect(Collectors.toList())));
builder.append(":string");
builder.append(
parseSpec
.getDimensionsSpec()
.getDimensionNames()
.stream()
.filter(s -> !s.equals(parseSpec.getTimestampSpec().getTimestampColumn()))
.collect(Collectors.joining(":string,", ",", ":string"))
);
}
builder.append(">");

View File

@ -30,7 +30,6 @@ import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisPoolConfig;
import redis.clients.jedis.exceptions.JedisException;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -151,7 +150,7 @@ public class RedisCache implements Cache
@Override
@LifecycleStop
public void close() throws IOException
public void close()
{
pool.close();
}

View File

@ -26,7 +26,6 @@ import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.aggregation.AggregateCombiner;
import org.apache.druid.query.aggregation.Aggregator;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.AggregatorFactoryNotMergeableException;
import org.apache.druid.query.aggregation.AggregatorUtil;
import org.apache.druid.query.aggregation.BufferAggregator;
import org.apache.druid.query.aggregation.ObjectAggregateCombiner;
@ -172,7 +171,7 @@ public class FixedBucketsHistogramAggregatorFactory extends AggregatorFactory
}
@Override
public AggregatorFactory getMergingFactory(AggregatorFactory other) throws AggregatorFactoryNotMergeableException
public AggregatorFactory getMergingFactory(AggregatorFactory other)
{
return new FixedBucketsHistogramAggregatorFactory(
name,

View File

@ -50,7 +50,6 @@ import org.junit.Test;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@ -242,13 +241,13 @@ public class HadoopDruidIndexerMapperTest
}
@Override
public void write(DataOutput out) throws IOException
public void write(DataOutput out)
{
}
@Override
public void readFields(DataInput in) throws IOException
public void readFields(DataInput in)
{
}

View File

@ -233,7 +233,7 @@ public class AppenderatorDriverRealtimeIndexTask extends AbstractTask implements
}
@Override
public TaskStatus run(final TaskToolbox toolbox) throws Exception
public TaskStatus run(final TaskToolbox toolbox)
{
runThread = Thread.currentThread();

View File

@ -230,7 +230,7 @@ public class HadoopIndexTask extends HadoopTask implements ChatHandler
}
@Override
public TaskStatus run(TaskToolbox toolbox) throws Exception
public TaskStatus run(TaskToolbox toolbox)
{
try {
taskConfig = toolbox.getConfig();

View File

@ -741,7 +741,7 @@ public class SupervisorResourceTest extends EasyMockSupport
}
@Test
public void testSpecGetHistoryWithAuthFailure() throws Exception
public void testSpecGetHistoryWithAuthFailure()
{
List<VersionedSupervisorSpec> versions1 = ImmutableList.of(
new VersionedSupervisorSpec(

View File

@ -468,7 +468,7 @@ public class GenericIndexed<T> implements CloseableIndexed<T>, Serializer
}
@Override
public void close() throws IOException
public void close()
{
// nothing to close
}

View File

@ -45,7 +45,7 @@ public class StringFirstBufferAggregatorTest
}
@Test
public void testBufferAggregate() throws Exception
public void testBufferAggregate()
{
final long[] timestamps = {1526724600L, 1526724700L, 1526724800L, 1526725900L, 1526725000L};
@ -89,7 +89,7 @@ public class StringFirstBufferAggregatorTest
}
@Test
public void testNullBufferAggregate() throws Exception
public void testNullBufferAggregate()
{
final long[] timestamps = {2222L, 1111L, 3333L, 4444L, 5555L};

View File

@ -45,7 +45,7 @@ public class StringLastBufferAggregatorTest
}
@Test
public void testBufferAggregate() throws Exception
public void testBufferAggregate()
{
final long[] timestamps = {1526724600L, 1526724700L, 1526724800L, 1526725900L, 1526725000L};
@ -89,7 +89,7 @@ public class StringLastBufferAggregatorTest
}
@Test
public void testNullBufferAggregate() throws Exception
public void testNullBufferAggregate()
{
final long[] timestamps = {1111L, 2222L, 6666L, 4444L, 5555L};

View File

@ -34,7 +34,6 @@ import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import org.apache.druid.utils.JvmUtils;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Map;
import java.util.OptionalLong;
@ -119,7 +118,7 @@ public class CaffeineCache implements org.apache.druid.client.cache.Cache
@Override
@LifecycleStop
public void close() throws IOException
public void close()
{
cache.cleanUp();
}

View File

@ -23,7 +23,6 @@ import com.google.common.primitives.Ints;
import org.apache.druid.java.util.common.lifecycle.LifecycleStop;
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
@ -148,7 +147,7 @@ public class MapCache implements Cache
@Override
@LifecycleStop
public void close() throws IOException
public void close()
{
baseMap.clear();
byteCountingLRUMap.clear();

View File

@ -598,7 +598,7 @@ public class MemcachedCache implements Cache
@Override
@LifecycleStop
public void close() throws IOException
public void close()
{
monitor.stop();
}

View File

@ -37,13 +37,13 @@ public class PotentiallyGzippedCompressionProvider implements CompressionProvide
}
@Override
public byte[] compress(String path, byte[] data) throws Exception
public byte[] compress(String path, byte[] data)
{
return compressOutput ? base.compress(path, data) : data;
}
@Override
public byte[] decompress(String path, byte[] data) throws Exception
public byte[] decompress(String path, byte[] data)
{
try {
return base.decompress(path, data);

View File

@ -42,7 +42,7 @@ public class AllowOptionsResourceFilter implements Filter
}
@Override
public void init(FilterConfig filterConfig) throws ServletException
public void init(FilterConfig filterConfig)
{
}

View File

@ -287,7 +287,7 @@ public class DataSchemaTest
}
@Test
public void testEmptyDatasource() throws Exception
public void testEmptyDatasource()
{
Map<String, Object> parser = jsonMapper.convertValue(
new StringInputRowParser(

View File

@ -100,7 +100,7 @@ public class StreamAppenderatorDriverTest extends EasyMockSupport
private DataSegmentKiller dataSegmentKiller;
@Before
public void setUp() throws Exception
public void setUp()
{
appenderatorTester = new AppenderatorTester(MAX_ROWS_IN_MEMORY);
allocator = new TestSegmentAllocator(DATA_SOURCE, Granularities.HOUR);

View File

@ -55,7 +55,7 @@ public class EventReceiverFirehoseIdleTest
private HttpServletRequest req;
@Before
public void setUp() throws Exception
public void setUp()
{
req = EasyMock.createMock(HttpServletRequest.class);
eventReceiverFirehoseFactory = new EventReceiverFirehoseFactory(

View File

@ -68,7 +68,7 @@ public enum ResultFormat
}
@Override
public Writer createFormatter(final OutputStream outputStream, final ObjectMapper jsonMapper) throws IOException
public Writer createFormatter(final OutputStream outputStream, final ObjectMapper jsonMapper)
{
return new CsvWriter(outputStream);
}