Make more package EverythingIsNonnullByDefault by default (#8198)

* Make more package EverythingIsNonnullByDefault by default

* Fixed additional voilations after pulling in master

* Change iterator to list.addAll

* Fix annotations
This commit is contained in:
Fokko Driesprong 2019-10-01 03:53:18 +03:00 committed by Gian Merlino
parent db65068c42
commit 82bfe86d0c
16 changed files with 116 additions and 44 deletions

View File

@ -19,11 +19,14 @@
package org.apache.druid.indexer; package org.apache.druid.indexer;
import javax.annotation.Nullable;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
public interface TaskMetricsGetter public interface TaskMetricsGetter
{ {
List<String> getKeys(); List<String> getKeys();
@Nullable
Map<String, Number> getTotalMetrics(); Map<String, Number> getTotalMetrics();
} }

View File

@ -22,6 +22,7 @@ package org.apache.druid.java.util.common.collect;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
@ -64,9 +65,7 @@ public class Utils
list.add(null); list.add(null);
} else { } else {
list = new ArrayList<>(elements.length); list = new ArrayList<>(elements.length);
for (T element : elements) { list.addAll(Arrays.asList(elements));
list.add(element);
}
} }
return list; return list;
} }

View File

@ -264,8 +264,11 @@ public class DetermineHashedPartitionsJob implements Jobby
public static class DetermineCardinalityMapper extends HadoopDruidIndexerMapper<LongWritable, BytesWritable> public static class DetermineCardinalityMapper extends HadoopDruidIndexerMapper<LongWritable, BytesWritable>
{ {
private static HashFunction hashFunction = Hashing.murmur3_128(); private static HashFunction hashFunction = Hashing.murmur3_128();
@Nullable
private Granularity rollupGranularity = null; private Granularity rollupGranularity = null;
@Nullable
private Map<Interval, HyperLogLogCollector> hyperLogLogs; private Map<Interval, HyperLogLogCollector> hyperLogLogs;
@Nullable
private HadoopDruidIndexerConfig config; private HadoopDruidIndexerConfig config;
private boolean determineIntervals; private boolean determineIntervals;
@ -349,6 +352,7 @@ public class DetermineHashedPartitionsJob implements Jobby
extends Reducer<LongWritable, BytesWritable, NullWritable, NullWritable> extends Reducer<LongWritable, BytesWritable, NullWritable, NullWritable>
{ {
private final List<Interval> intervals = new ArrayList<>(); private final List<Interval> intervals = new ArrayList<>();
@Nullable
protected HadoopDruidIndexerConfig config = null; protected HadoopDruidIndexerConfig config = null;
private boolean determineIntervals; private boolean determineIntervals;
@ -429,8 +433,10 @@ public class DetermineHashedPartitionsJob implements Jobby
public static class DetermineHashedPartitionsPartitioner public static class DetermineHashedPartitionsPartitioner
extends Partitioner<LongWritable, BytesWritable> implements Configurable extends Partitioner<LongWritable, BytesWritable> implements Configurable
{ {
@Nullable
private Configuration config; private Configuration config;
private boolean determineIntervals; private boolean determineIntervals;
@Nullable
private Map<LongWritable, Integer> reducerLookup; private Map<LongWritable, Integer> reducerLookup;
@Override @Override

View File

@ -325,6 +325,7 @@ public class DeterminePartitionsJob implements Jobby
public static class DeterminePartitionsGroupByMapper extends HadoopDruidIndexerMapper<BytesWritable, NullWritable> public static class DeterminePartitionsGroupByMapper extends HadoopDruidIndexerMapper<BytesWritable, NullWritable>
{ {
@Nullable
private Granularity rollupGranularity = null; private Granularity rollupGranularity = null;
@Override @Override
@ -374,6 +375,7 @@ public class DeterminePartitionsJob implements Jobby
public static class DeterminePartitionsDimSelectionPostGroupByMapper public static class DeterminePartitionsDimSelectionPostGroupByMapper
extends Mapper<BytesWritable, NullWritable, BytesWritable, Text> extends Mapper<BytesWritable, NullWritable, BytesWritable, Text>
{ {
@Nullable
private DeterminePartitionsDimSelectionMapperHelper helper; private DeterminePartitionsDimSelectionMapperHelper helper;
@Override @Override
@ -537,6 +539,7 @@ public class DeterminePartitionsJob implements Jobby
private abstract static class DeterminePartitionsDimSelectionBaseReducer private abstract static class DeterminePartitionsDimSelectionBaseReducer
extends Reducer<BytesWritable, Text, BytesWritable, Text> extends Reducer<BytesWritable, Text, BytesWritable, Text>
{ {
@Nullable
protected volatile HadoopDruidIndexerConfig config = null; protected volatile HadoopDruidIndexerConfig config = null;
@Override @Override
@ -905,6 +908,7 @@ public class DeterminePartitionsJob implements Jobby
private static class DimPartition private static class DimPartition
{ {
@Nullable
public ShardSpec shardSpec = null; public ShardSpec shardSpec = null;
int cardinality = 0; int cardinality = 0;
public long rows = 0; public long rows = 0;

View File

@ -30,7 +30,6 @@ import com.google.common.base.Preconditions;
import com.google.common.base.Splitter; import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.inject.Binder;
import com.google.inject.Injector; import com.google.inject.Injector;
import com.google.inject.Key; import com.google.inject.Key;
import com.google.inject.Module; import com.google.inject.Module;
@ -102,18 +101,13 @@ public class HadoopDruidIndexerConfig
INJECTOR = Initialization.makeInjectorWithModules( INJECTOR = Initialization.makeInjectorWithModules(
GuiceInjectors.makeStartupInjector(), GuiceInjectors.makeStartupInjector(),
ImmutableList.of( ImmutableList.of(
new Module() (Module) binder -> {
{
@Override
public void configure(Binder binder)
{
JsonConfigProvider.bindInstance( JsonConfigProvider.bindInstance(
binder, binder,
Key.get(DruidNode.class, Self.class), Key.get(DruidNode.class, Self.class),
new DruidNode("hadoop-indexer", null, false, null, null, true, false) new DruidNode("hadoop-indexer", null, false, null, null, true, false)
); );
JsonConfigProvider.bind(binder, "druid.hadoop.security.kerberos", HadoopKerberosConfig.class); JsonConfigProvider.bind(binder, "druid.hadoop.security.kerberos", HadoopKerberosConfig.class);
}
}, },
new IndexingHadoopModule() new IndexingHadoopModule()
) )

View File

@ -35,9 +35,13 @@ public class HadoopDruidIndexerJob implements Jobby
{ {
private static final Logger log = new Logger(HadoopDruidIndexerJob.class); private static final Logger log = new Logger(HadoopDruidIndexerJob.class);
private final HadoopDruidIndexerConfig config; private final HadoopDruidIndexerConfig config;
@Nullable
private final MetadataStorageUpdaterJob metadataStorageUpdaterJob; private final MetadataStorageUpdaterJob metadataStorageUpdaterJob;
@Nullable
private IndexGeneratorJob indexJob; private IndexGeneratorJob indexJob;
@Nullable
private volatile List<DataSegment> publishedSegments = null; private volatile List<DataSegment> publishedSegments = null;
@Nullable
private String hadoopJobIdFile; private String hadoopJobIdFile;
@Inject @Inject

View File

@ -25,6 +25,8 @@ import com.fasterxml.jackson.annotation.JsonTypeName;
import org.apache.druid.indexer.updater.MetadataStorageUpdaterJobSpec; import org.apache.druid.indexer.updater.MetadataStorageUpdaterJobSpec;
import org.apache.druid.segment.indexing.IOConfig; import org.apache.druid.segment.indexing.IOConfig;
import javax.annotation.Nullable;
import java.util.Map; import java.util.Map;
/** /**
@ -33,14 +35,16 @@ import java.util.Map;
public class HadoopIOConfig implements IOConfig public class HadoopIOConfig implements IOConfig
{ {
private final Map<String, Object> pathSpec; private final Map<String, Object> pathSpec;
@Nullable
private final MetadataStorageUpdaterJobSpec metadataUpdateSpec; private final MetadataStorageUpdaterJobSpec metadataUpdateSpec;
@Nullable
private final String segmentOutputPath; private final String segmentOutputPath;
@JsonCreator @JsonCreator
public HadoopIOConfig( public HadoopIOConfig(
final @JsonProperty("inputSpec") Map<String, Object> pathSpec, final @JsonProperty("inputSpec") Map<String, Object> pathSpec,
final @JsonProperty("metadataUpdateSpec") MetadataStorageUpdaterJobSpec metadataUpdateSpec, final @JsonProperty("metadataUpdateSpec") @Nullable MetadataStorageUpdaterJobSpec metadataUpdateSpec,
final @JsonProperty("segmentOutputPath") String segmentOutputPath final @JsonProperty("segmentOutputPath") @Nullable String segmentOutputPath
) )
{ {
this.pathSpec = pathSpec; this.pathSpec = pathSpec;
@ -54,12 +58,14 @@ public class HadoopIOConfig implements IOConfig
return pathSpec; return pathSpec;
} }
@Nullable
@JsonProperty("metadataUpdateSpec") @JsonProperty("metadataUpdateSpec")
public MetadataStorageUpdaterJobSpec getMetadataUpdateSpec() public MetadataStorageUpdaterJobSpec getMetadataUpdateSpec()
{ {
return metadataUpdateSpec; return metadataUpdateSpec;
} }
@Nullable
@JsonProperty @JsonProperty
public String getSegmentOutputPath() public String getSegmentOutputPath()
{ {

View File

@ -34,6 +34,8 @@ import org.apache.druid.timeline.VersionedIntervalTimeline;
import org.apache.druid.timeline.partition.PartitionChunk; import org.apache.druid.timeline.partition.PartitionChunk;
import org.joda.time.Interval; import org.joda.time.Interval;
import javax.annotation.Nullable;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet; import java.util.HashSet;
@ -56,8 +58,8 @@ public class HadoopIngestionSpec extends IngestionSpec<HadoopIOConfig, HadoopTun
public HadoopIngestionSpec( public HadoopIngestionSpec(
@JsonProperty("dataSchema") DataSchema dataSchema, @JsonProperty("dataSchema") DataSchema dataSchema,
@JsonProperty("ioConfig") HadoopIOConfig ioConfig, @JsonProperty("ioConfig") HadoopIOConfig ioConfig,
@JsonProperty("tuningConfig") HadoopTuningConfig tuningConfig, @JsonProperty("tuningConfig") @Nullable HadoopTuningConfig tuningConfig,
@JsonProperty("uniqueId") String uniqueId @JsonProperty("uniqueId") @Nullable String uniqueId
) )
{ {
super(dataSchema, ioConfig, tuningConfig); super(dataSchema, ioConfig, tuningConfig);

View File

@ -76,7 +76,7 @@ public class HadoopTuningConfig implements TuningConfig
null null
); );
} }
@Nullable
private final String workingPath; private final String workingPath;
private final String version; private final String version;
private final DimensionBasedPartitionsSpec partitionsSpec; private final DimensionBasedPartitionsSpec partitionsSpec;
@ -86,9 +86,9 @@ public class HadoopTuningConfig implements TuningConfig
private final int rowFlushBoundary; private final int rowFlushBoundary;
private final long maxBytesInMemory; private final long maxBytesInMemory;
private final boolean leaveIntermediate; private final boolean leaveIntermediate;
private final Boolean cleanupOnFailure; private final boolean cleanupOnFailure;
private final boolean overwriteFiles; private final boolean overwriteFiles;
private final Boolean ignoreInvalidRows; private final boolean ignoreInvalidRows;
private final Map<String, String> jobProperties; private final Map<String, String> jobProperties;
private final boolean combineText; private final boolean combineText;
private final boolean useCombiner; private final boolean useCombiner;
@ -102,29 +102,29 @@ public class HadoopTuningConfig implements TuningConfig
@JsonCreator @JsonCreator
public HadoopTuningConfig( public HadoopTuningConfig(
final @JsonProperty("workingPath") String workingPath, final @JsonProperty("workingPath") @Nullable String workingPath,
final @JsonProperty("version") String version, final @JsonProperty("version") @Nullable String version,
final @JsonProperty("partitionsSpec") DimensionBasedPartitionsSpec partitionsSpec, final @JsonProperty("partitionsSpec") @Nullable DimensionBasedPartitionsSpec partitionsSpec,
final @JsonProperty("shardSpecs") Map<Long, List<HadoopyShardSpec>> shardSpecs, final @JsonProperty("shardSpecs") @Nullable Map<Long, List<HadoopyShardSpec>> shardSpecs,
final @JsonProperty("indexSpec") IndexSpec indexSpec, final @JsonProperty("indexSpec") @Nullable IndexSpec indexSpec,
final @JsonProperty("indexSpecForIntermediatePersists") @Nullable IndexSpec indexSpecForIntermediatePersists, final @JsonProperty("indexSpecForIntermediatePersists") @Nullable IndexSpec indexSpecForIntermediatePersists,
final @JsonProperty("maxRowsInMemory") Integer maxRowsInMemory, final @JsonProperty("maxRowsInMemory") @Nullable Integer maxRowsInMemory,
final @JsonProperty("maxBytesInMemory") Long maxBytesInMemory, final @JsonProperty("maxBytesInMemory") @Nullable Long maxBytesInMemory,
final @JsonProperty("leaveIntermediate") boolean leaveIntermediate, final @JsonProperty("leaveIntermediate") boolean leaveIntermediate,
final @JsonProperty("cleanupOnFailure") Boolean cleanupOnFailure, final @JsonProperty("cleanupOnFailure") @Nullable Boolean cleanupOnFailure,
final @JsonProperty("overwriteFiles") boolean overwriteFiles, final @JsonProperty("overwriteFiles") boolean overwriteFiles,
final @Deprecated @JsonProperty("ignoreInvalidRows") Boolean ignoreInvalidRows, final @Deprecated @JsonProperty("ignoreInvalidRows") @Nullable Boolean ignoreInvalidRows,
final @JsonProperty("jobProperties") Map<String, String> jobProperties, final @JsonProperty("jobProperties") @Nullable Map<String, String> jobProperties,
final @JsonProperty("combineText") boolean combineText, final @JsonProperty("combineText") boolean combineText,
final @JsonProperty("useCombiner") Boolean useCombiner, final @JsonProperty("useCombiner") @Nullable Boolean useCombiner,
// See https://github.com/apache/incubator-druid/pull/1922 // See https://github.com/apache/incubator-druid/pull/1922
final @JsonProperty("rowFlushBoundary") Integer maxRowsInMemoryCOMPAT, final @JsonProperty("rowFlushBoundary") @Nullable Integer maxRowsInMemoryCOMPAT,
// This parameter is left for compatibility when reading existing configs, to be removed in Druid 0.12. // This parameter is left for compatibility when reading existing configs, to be removed in Druid 0.12.
final @JsonProperty("buildV9Directly") Boolean buildV9Directly, final @JsonProperty("buildV9Directly") Boolean buildV9Directly,
final @JsonProperty("numBackgroundPersistThreads") Integer numBackgroundPersistThreads, final @JsonProperty("numBackgroundPersistThreads") @Nullable Integer numBackgroundPersistThreads,
final @JsonProperty("forceExtendableShardSpecs") boolean forceExtendableShardSpecs, final @JsonProperty("forceExtendableShardSpecs") boolean forceExtendableShardSpecs,
final @JsonProperty("useExplicitVersion") boolean useExplicitVersion, final @JsonProperty("useExplicitVersion") boolean useExplicitVersion,
final @JsonProperty("allowedHadoopPrefix") List<String> allowedHadoopPrefix, final @JsonProperty("allowedHadoopPrefix") @Nullable List<String> allowedHadoopPrefix,
final @JsonProperty("logParseExceptions") @Nullable Boolean logParseExceptions, final @JsonProperty("logParseExceptions") @Nullable Boolean logParseExceptions,
final @JsonProperty("maxParseExceptions") @Nullable Integer maxParseExceptions, final @JsonProperty("maxParseExceptions") @Nullable Integer maxParseExceptions,
final @JsonProperty("useYarnRMJobStatusFallback") @Nullable Boolean useYarnRMJobStatusFallback final @JsonProperty("useYarnRMJobStatusFallback") @Nullable Boolean useYarnRMJobStatusFallback
@ -150,7 +150,7 @@ public class HadoopTuningConfig implements TuningConfig
? ImmutableMap.of() ? ImmutableMap.of()
: ImmutableMap.copyOf(jobProperties)); : ImmutableMap.copyOf(jobProperties));
this.combineText = combineText; this.combineText = combineText;
this.useCombiner = useCombiner == null ? DEFAULT_USE_COMBINER : useCombiner.booleanValue(); this.useCombiner = useCombiner == null ? DEFAULT_USE_COMBINER : useCombiner;
this.numBackgroundPersistThreads = numBackgroundPersistThreads == null this.numBackgroundPersistThreads = numBackgroundPersistThreads == null
? DEFAULT_NUM_BACKGROUND_PERSIST_THREADS ? DEFAULT_NUM_BACKGROUND_PERSIST_THREADS
: numBackgroundPersistThreads; : numBackgroundPersistThreads;
@ -174,6 +174,7 @@ public class HadoopTuningConfig implements TuningConfig
this.useYarnRMJobStatusFallback = useYarnRMJobStatusFallback == null ? true : useYarnRMJobStatusFallback; this.useYarnRMJobStatusFallback = useYarnRMJobStatusFallback == null ? true : useYarnRMJobStatusFallback;
} }
@Nullable
@JsonProperty @JsonProperty
public String getWorkingPath() public String getWorkingPath()
{ {

View File

@ -32,14 +32,13 @@ public class HadoopWorkingDirCleaner
{ {
private static final Logger log = new Logger(HadoopWorkingDirCleaner.class); private static final Logger log = new Logger(HadoopWorkingDirCleaner.class);
public static String runTask(String[] args) throws Exception public static void runTask(String[] args) throws Exception
{ {
String workingPath = args[0]; String workingPath = args[0];
log.info("Deleting indexing hadoop working path [%s].", workingPath); log.info("Deleting indexing hadoop working path [%s].", workingPath);
Path p = new Path(workingPath); Path p = new Path(workingPath);
FileSystem fs = p.getFileSystem(new Configuration()); try (FileSystem fs = p.getFileSystem(new Configuration())) {
fs.delete(p, true); fs.delete(p, true);
}
return null;
} }
} }

View File

@ -408,6 +408,7 @@ public class InputRowSerde
return result; return result;
} }
@Nullable
private static List<String> readStringArray(DataInput in) throws IOException private static List<String> readStringArray(DataInput in) throws IOException
{ {
int count = WritableUtils.readVInt(in); int count = WritableUtils.readVInt(in);
@ -493,6 +494,7 @@ public class InputRowSerde
} }
} }
@Nullable
private static String getType(String metric, AggregatorFactory[] aggs, int i) private static String getType(String metric, AggregatorFactory[] aggs, int i)
{ {
if (aggs[i].getName().equals(metric)) { if (aggs[i].getName().equals(metric)) {

View File

@ -49,6 +49,8 @@ import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Progressable;
import javax.annotation.Nullable;
import java.io.BufferedOutputStream; import java.io.BufferedOutputStream;
import java.io.File; import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
@ -705,7 +707,7 @@ public class JobHelper
final Configuration configuration, final Configuration configuration,
final File outDir, final File outDir,
final Progressable progressable, final Progressable progressable,
final RetryPolicy retryPolicy @Nullable final RetryPolicy retryPolicy
) throws IOException ) throws IOException
{ {
final RetryPolicy effectiveRetryPolicy; final RetryPolicy effectiveRetryPolicy;

View File

@ -39,6 +39,8 @@ import org.apache.hadoop.util.ReflectionUtils;
import org.eclipse.jetty.client.HttpClient; import org.eclipse.jetty.client.HttpClient;
import org.eclipse.jetty.client.api.ContentResponse; import org.eclipse.jetty.client.api.ContentResponse;
import javax.annotation.Nullable;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
@ -131,6 +133,7 @@ public class Utils
JSON_MAPPER.writeValue(makePathAndOutputStream(job, path, true), stats); JSON_MAPPER.writeValue(makePathAndOutputStream(job, path, true), stats);
} }
@Nullable
public static String getFailureMessage(Job failedJob, ObjectMapper jsonMapper) public static String getFailureMessage(Job failedJob, ObjectMapper jsonMapper)
{ {
try { try {

View File

@ -0,0 +1,23 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
@EverythingIsNonnullByDefault
package org.apache.druid.indexer;
import org.apache.druid.annotations.EverythingIsNonnullByDefault;

View File

@ -600,6 +600,7 @@ public class HadoopIndexTask extends HadoopTask implements ChatHandler
return KEYS; return KEYS;
} }
@Nullable
@Override @Override
public Map<String, Number> getTotalMetrics() public Map<String, Number> getTotalMetrics()
{ {

View File

@ -0,0 +1,23 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
@EverythingIsNonnullByDefault
package org.apache.druid.indexing;
import org.apache.druid.annotations.EverythingIsNonnullByDefault;