Size lists in advance when known

When constructing an array list, if we know the size of the list in
advance (because we are adding objects to it derived from another list),
we should size the array list to the appropriate capacity in advance (to
avoid resizing allocations). This commit does this in various places.

Relates #24439
This commit is contained in:
Koen De Groote 2017-05-12 16:36:13 +02:00 committed by Jason Tedor
parent e8e2ccdcf5
commit 878ae8eb3c
28 changed files with 36 additions and 36 deletions

View File

@ -60,7 +60,7 @@ public class GetRepositoriesResponse extends ActionResponse implements Iterable<
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
int size = in.readVInt(); int size = in.readVInt();
List<RepositoryMetaData> repositoryListBuilder = new ArrayList<>(); List<RepositoryMetaData> repositoryListBuilder = new ArrayList<>(size);
for (int j = 0; j < size; j++) { for (int j = 0; j < size; j++) {
repositoryListBuilder.add(new RepositoryMetaData( repositoryListBuilder.add(new RepositoryMetaData(
in.readString(), in.readString(),

View File

@ -59,7 +59,7 @@ public class GetSnapshotsResponse extends ActionResponse implements ToXContentOb
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
int size = in.readVInt(); int size = in.readVInt();
List<SnapshotInfo> builder = new ArrayList<>(); List<SnapshotInfo> builder = new ArrayList<>(size);
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
builder.add(new SnapshotInfo(in)); builder.add(new SnapshotInfo(in));
} }

View File

@ -65,8 +65,8 @@ public class ClusterStatsNodes implements ToXContent {
this.plugins = new HashSet<>(); this.plugins = new HashSet<>();
Set<InetAddress> seenAddresses = new HashSet<>(nodeResponses.size()); Set<InetAddress> seenAddresses = new HashSet<>(nodeResponses.size());
List<NodeInfo> nodeInfos = new ArrayList<>(); List<NodeInfo> nodeInfos = new ArrayList<>(nodeResponses.size());
List<NodeStats> nodeStats = new ArrayList<>(); List<NodeStats> nodeStats = new ArrayList<>(nodeResponses.size());
for (ClusterStatsNodeResponse nodeResponse : nodeResponses) { for (ClusterStatsNodeResponse nodeResponse : nodeResponses) {
nodeInfos.add(nodeResponse.nodeInfo()); nodeInfos.add(nodeResponse.nodeInfo());
nodeStats.add(nodeResponse.nodeStats()); nodeStats.add(nodeResponse.nodeStats());

View File

@ -114,7 +114,7 @@ public class GetIndexResponse extends ActionResponse {
for (int i = 0; i < aliasesSize; i++) { for (int i = 0; i < aliasesSize; i++) {
String key = in.readString(); String key = in.readString();
int valueSize = in.readVInt(); int valueSize = in.readVInt();
List<AliasMetaData> aliasEntryBuilder = new ArrayList<>(); List<AliasMetaData> aliasEntryBuilder = new ArrayList<>(valueSize);
for (int j = 0; j < valueSize; j++) { for (int j = 0; j < valueSize; j++) {
aliasEntryBuilder.add(new AliasMetaData(in)); aliasEntryBuilder.add(new AliasMetaData(in));
} }

View File

@ -81,13 +81,13 @@ public class BaseTasksResponse extends ActionResponse {
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
int size = in.readVInt(); int size = in.readVInt();
List<TaskOperationFailure> taskFailures = new ArrayList<>(); List<TaskOperationFailure> taskFailures = new ArrayList<>(size);
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
taskFailures.add(new TaskOperationFailure(in)); taskFailures.add(new TaskOperationFailure(in));
} }
size = in.readVInt(); size = in.readVInt();
this.taskFailures = Collections.unmodifiableList(taskFailures); this.taskFailures = Collections.unmodifiableList(taskFailures);
List<FailedNodeException> nodeFailures = new ArrayList<>(); List<FailedNodeException> nodeFailures = new ArrayList<>(size);
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
nodeFailures.add(new FailedNodeException(in)); nodeFailures.add(new FailedNodeException(in));
} }

View File

@ -125,7 +125,7 @@ public class ClusterBlock implements Streamable, ToXContent {
id = in.readVInt(); id = in.readVInt();
description = in.readString(); description = in.readString();
final int len = in.readVInt(); final int len = in.readVInt();
ArrayList<ClusterBlockLevel> levels = new ArrayList<>(); ArrayList<ClusterBlockLevel> levels = new ArrayList<>(len);
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
levels.add(ClusterBlockLevel.fromId(in.readVInt())); levels.add(ClusterBlockLevel.fromId(in.readVInt()));
} }

View File

@ -49,7 +49,7 @@ class AssistedConstructor<T> {
Annotation[][] annotations = constructor.getParameterAnnotations(); Annotation[][] annotations = constructor.getParameterAnnotations();
List<Type> typeList = new ArrayList<>(); List<Type> typeList = new ArrayList<>();
allParameters = new ArrayList<>(); allParameters = new ArrayList<>(parameterTypes.size());
// categorize params as @Assisted or @Injected // categorize params as @Assisted or @Injected
for (int i = 0; i < parameterTypes.size(); i++) { for (int i = 0; i < parameterTypes.size(); i++) {

View File

@ -55,8 +55,8 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Custom
throw new IllegalArgumentException("Custom Analyzer [" + name() + "] failed to find tokenizer under name [" + tokenizerName + "]"); throw new IllegalArgumentException("Custom Analyzer [" + name() + "] failed to find tokenizer under name [" + tokenizerName + "]");
} }
List<CharFilterFactory> charFiltersList = new ArrayList<>();
String[] charFilterNames = analyzerSettings.getAsArray("char_filter"); String[] charFilterNames = analyzerSettings.getAsArray("char_filter");
List<CharFilterFactory> charFiltersList = new ArrayList<>(charFilterNames.length);
for (String charFilterName : charFilterNames) { for (String charFilterName : charFilterNames) {
CharFilterFactory charFilter = charFilters.get(charFilterName); CharFilterFactory charFilter = charFilters.get(charFilterName);
if (charFilter == null) { if (charFilter == null) {
@ -65,8 +65,8 @@ public class CustomAnalyzerProvider extends AbstractIndexAnalyzerProvider<Custom
charFiltersList.add(charFilter); charFiltersList.add(charFilter);
} }
List<TokenFilterFactory> tokenFilterList = new ArrayList<>();
String[] tokenFilterNames = analyzerSettings.getAsArray("filter"); String[] tokenFilterNames = analyzerSettings.getAsArray("filter");
List<TokenFilterFactory> tokenFilterList = new ArrayList<>(tokenFilterNames.length);
for (String tokenFilterName : tokenFilterNames) { for (String tokenFilterName : tokenFilterNames) {
TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName); TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName);
if (tokenFilter == null) { if (tokenFilter == null) {

View File

@ -50,8 +50,8 @@ public final class CustomNormalizerProvider extends AbstractIndexAnalyzerProvide
throw new IllegalArgumentException("Custom normalizer [" + name() + "] cannot configure a tokenizer"); throw new IllegalArgumentException("Custom normalizer [" + name() + "] cannot configure a tokenizer");
} }
List<CharFilterFactory> charFiltersList = new ArrayList<>();
String[] charFilterNames = analyzerSettings.getAsArray("char_filter"); String[] charFilterNames = analyzerSettings.getAsArray("char_filter");
List<CharFilterFactory> charFiltersList = new ArrayList<>(charFilterNames.length);
for (String charFilterName : charFilterNames) { for (String charFilterName : charFilterNames) {
CharFilterFactory charFilter = charFilters.get(charFilterName); CharFilterFactory charFilter = charFilters.get(charFilterName);
if (charFilter == null) { if (charFilter == null) {
@ -66,8 +66,8 @@ public final class CustomNormalizerProvider extends AbstractIndexAnalyzerProvide
charFiltersList.add(charFilter); charFiltersList.add(charFilter);
} }
List<TokenFilterFactory> tokenFilterList = new ArrayList<>();
String[] tokenFilterNames = analyzerSettings.getAsArray("filter"); String[] tokenFilterNames = analyzerSettings.getAsArray("filter");
List<TokenFilterFactory> tokenFilterList = new ArrayList<>(tokenFilterNames.length);
for (String tokenFilterName : tokenFilterNames) { for (String tokenFilterName : tokenFilterNames) {
TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName); TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName);
if (tokenFilter == null) { if (tokenFilter == null) {

View File

@ -318,7 +318,7 @@ public class DynamicTemplate implements ToXContent {
} }
private List processList(List list, String name, String dynamicType) { private List processList(List list, String name, String dynamicType) {
List processedList = new ArrayList(); List processedList = new ArrayList(list.size());
for (Object value : list) { for (Object value : list) {
if (value instanceof Map) { if (value instanceof Map) {
value = processMap((Map<String, Object>) value, name, dynamicType); value = processMap((Map<String, Object>) value, name, dynamicType);

View File

@ -250,8 +250,8 @@ public abstract class AbstractQueryBuilder<QB extends AbstractQueryBuilder<QB>>
} }
protected static final List<QueryBuilder> readQueries(StreamInput in) throws IOException { protected static final List<QueryBuilder> readQueries(StreamInput in) throws IOException {
List<QueryBuilder> queries = new ArrayList<>();
int size = in.readVInt(); int size = in.readVInt();
List<QueryBuilder> queries = new ArrayList<>(size);
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
queries.add(in.readNamedWriteable(QueryBuilder.class)); queries.add(in.readNamedWriteable(QueryBuilder.class));
} }

View File

@ -163,7 +163,7 @@ public class GeoPolygonQueryBuilder extends AbstractQueryBuilder<GeoPolygonQuery
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field"); throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
} }
List<GeoPoint> shell = new ArrayList<GeoPoint>(); List<GeoPoint> shell = new ArrayList<>(this.shell.size());
for (GeoPoint geoPoint : this.shell) { for (GeoPoint geoPoint : this.shell) {
shell.add(new GeoPoint(geoPoint)); shell.add(new GeoPoint(geoPoint));
} }

View File

@ -284,7 +284,7 @@ public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, To
} }
} }
List<SnapshotFiles> snapshots = new ArrayList<>(); List<SnapshotFiles> snapshots = new ArrayList<>(snapshotsMap.size());
for (Map.Entry<String, List<String>> entry : snapshotsMap.entrySet()) { for (Map.Entry<String, List<String>> entry : snapshotsMap.entrySet()) {
List<FileInfo> fileInfosBuilder = new ArrayList<>(); List<FileInfo> fileInfosBuilder = new ArrayList<>();
for (String file : entry.getValue()) { for (String file : entry.getValue()) {

View File

@ -177,7 +177,7 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService {
@Override @Override
public AllCircuitBreakerStats stats() { public AllCircuitBreakerStats stats() {
long parentEstimated = 0; long parentEstimated = 0;
List<CircuitBreakerStats> allStats = new ArrayList<>(); List<CircuitBreakerStats> allStats = new ArrayList<>(this.breakers.size());
// Gather the "estimated" count for the parent breaker by adding the // Gather the "estimated" count for the parent breaker by adding the
// estimations for each individual breaker // estimations for each individual breaker
for (CircuitBreaker breaker : this.breakers.values()) { for (CircuitBreaker breaker : this.breakers.values()) {

View File

@ -183,7 +183,7 @@ public class AggregatorFactories {
} }
public List<PipelineAggregator> createPipelineAggregators() throws IOException { public List<PipelineAggregator> createPipelineAggregators() throws IOException {
List<PipelineAggregator> pipelineAggregators = new ArrayList<>(); List<PipelineAggregator> pipelineAggregators = new ArrayList<>(this.pipelineAggregatorFactories.size());
for (PipelineAggregationBuilder factory : this.pipelineAggregatorFactories) { for (PipelineAggregationBuilder factory : this.pipelineAggregatorFactories) {
pipelineAggregators.add(factory.create()); pipelineAggregators.add(factory.create());
} }

View File

@ -70,7 +70,7 @@ public abstract class BucketCollector implements Collector {
@Override @Override
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx) throws IOException { public LeafBucketCollector getLeafCollector(LeafReaderContext ctx) throws IOException {
List<LeafBucketCollector> leafCollectors = new ArrayList<>(); List<LeafBucketCollector> leafCollectors = new ArrayList<>(collectors.length);
for (BucketCollector c : collectors) { for (BucketCollector c : collectors) {
leafCollectors.add(c.getLeafCollector(ctx)); leafCollectors.add(c.getLeafCollector(ctx));
} }

View File

@ -197,7 +197,7 @@ public class AdjacencyMatrixAggregationBuilder extends AbstractAggregationBuilde
+ "] index level setting."); + "] index level setting.");
} }
List<KeyedFilter> rewrittenFilters = new ArrayList<>(); List<KeyedFilter> rewrittenFilters = new ArrayList<>(filters.size());
for (KeyedFilter kf : filters) { for (KeyedFilter kf : filters) {
rewrittenFilters.add(new KeyedFilter(kf.key(), QueryBuilder.rewriteQuery(kf.filter(), context.getQueryShardContext()))); rewrittenFilters.add(new KeyedFilter(kf.key(), QueryBuilder.rewriteQuery(kf.filter(), context.getQueryShardContext())));
} }

View File

@ -169,7 +169,7 @@ public class FiltersAggregationBuilder extends AbstractAggregationBuilder<Filter
@Override @Override
protected AggregatorFactory<?> doBuild(SearchContext context, AggregatorFactory<?> parent, Builder subFactoriesBuilder) protected AggregatorFactory<?> doBuild(SearchContext context, AggregatorFactory<?> parent, Builder subFactoriesBuilder)
throws IOException { throws IOException {
List<KeyedFilter> rewrittenFilters = new ArrayList<>(); List<KeyedFilter> rewrittenFilters = new ArrayList<>(filters.size());
for(KeyedFilter kf : filters) { for(KeyedFilter kf : filters) {
rewrittenFilters.add(new KeyedFilter(kf.key(), QueryBuilder.rewriteQuery(kf.filter(), rewrittenFilters.add(new KeyedFilter(kf.key(), QueryBuilder.rewriteQuery(kf.filter(),
context.getQueryShardContext()))); context.getQueryShardContext())));

View File

@ -164,7 +164,7 @@ public class DoubleTerms extends InternalMappedTerms<DoubleTerms, DoubleTerms.Bu
if (promoteToDouble == false) { if (promoteToDouble == false) {
return super.doReduce(aggregations, reduceContext); return super.doReduce(aggregations, reduceContext);
} }
List<InternalAggregation> newAggs = new ArrayList<>(); List<InternalAggregation> newAggs = new ArrayList<>(aggregations.size());
for (InternalAggregation agg : aggregations) { for (InternalAggregation agg : aggregations) {
if (agg instanceof LongTerms) { if (agg instanceof LongTerms) {
DoubleTerms dTerms = LongTerms.convertLongTermsToDouble((LongTerms) agg, format); DoubleTerms dTerms = LongTerms.convertLongTermsToDouble((LongTerms) agg, format);

View File

@ -75,7 +75,7 @@ public class CumulativeSumPipelineAggregator extends PipelineAggregator {
InternalMultiBucketAggregation.InternalBucket>) aggregation; InternalMultiBucketAggregation.InternalBucket>) aggregation;
List<? extends InternalMultiBucketAggregation.InternalBucket> buckets = histo.getBuckets(); List<? extends InternalMultiBucketAggregation.InternalBucket> buckets = histo.getBuckets();
HistogramFactory factory = (HistogramFactory) histo; HistogramFactory factory = (HistogramFactory) histo;
List<Bucket> newBuckets = new ArrayList<>(); List<Bucket> newBuckets = new ArrayList<>(buckets.size());
double sum = 0; double sum = 0;
for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) {
Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], GapPolicy.INSERT_ZEROS); Double thisBucketValue = resolveBucketValue(histo, bucket, bucketsPaths()[0], GapPolicy.INSERT_ZEROS);

View File

@ -153,7 +153,7 @@ public final class ConvertProcessor extends AbstractProcessor {
if (oldValue instanceof List) { if (oldValue instanceof List) {
List<?> list = (List<?>) oldValue; List<?> list = (List<?>) oldValue;
List<Object> newList = new ArrayList<>(); List<Object> newList = new ArrayList<>(list.size());
for (Object value : list) { for (Object value : list) {
newList.add(convertType.convert(value)); newList.add(convertType.convert(value));
} }

View File

@ -54,7 +54,7 @@ public final class DateProcessor extends AbstractProcessor {
this.field = field; this.field = field;
this.targetField = targetField; this.targetField = targetField;
this.formats = formats; this.formats = formats;
this.dateParsers = new ArrayList<>(); this.dateParsers = new ArrayList<>(this.formats.size());
for (String format : formats) { for (String format : formats) {
DateFormat dateFormat = DateFormat.fromString(format); DateFormat dateFormat = DateFormat.fromString(format);
dateParsers.add(dateFormat.getFunction(format, timezone, locale)); dateParsers.add(dateFormat.getFunction(format, timezone, locale));

View File

@ -240,7 +240,7 @@ public final class Def {
} }
// convert recipe string to a bitset for convenience (the code below should be refactored...) // convert recipe string to a bitset for convenience (the code below should be refactored...)
BitSet lambdaArgs = new BitSet(); BitSet lambdaArgs = new BitSet(recipeString.length());
for (int i = 0; i < recipeString.length(); i++) { for (int i = 0; i < recipeString.length(); i++) {
lambdaArgs.set(recipeString.charAt(i)); lambdaArgs.set(recipeString.charAt(i));
} }

View File

@ -113,7 +113,7 @@ public final class ELambda extends AExpression implements ILambda {
// we don't know anything: treat as def // we don't know anything: treat as def
returnType = Definition.DEF_TYPE; returnType = Definition.DEF_TYPE;
// don't infer any types, replace any null types with def // don't infer any types, replace any null types with def
actualParamTypeStrs = new ArrayList<>(); actualParamTypeStrs = new ArrayList<>(paramTypeStrs.size());
for (String type : paramTypeStrs) { for (String type : paramTypeStrs) {
if (type == null) { if (type == null) {
actualParamTypeStrs.add("def"); actualParamTypeStrs.add("def");
@ -139,7 +139,7 @@ public final class ELambda extends AExpression implements ILambda {
returnType = interfaceMethod.rtn; returnType = interfaceMethod.rtn;
} }
// replace any null types with the actual type // replace any null types with the actual type
actualParamTypeStrs = new ArrayList<>(); actualParamTypeStrs = new ArrayList<>(paramTypeStrs.size());
for (int i = 0; i < paramTypeStrs.size(); i++) { for (int i = 0; i < paramTypeStrs.size(); i++) {
String paramType = paramTypeStrs.get(i); String paramType = paramTypeStrs.get(i);
if (paramType == null) { if (paramType == null) {
@ -162,8 +162,8 @@ public final class ELambda extends AExpression implements ILambda {
} }
} }
// prepend capture list to lambda's arguments // prepend capture list to lambda's arguments
List<String> paramTypes = new ArrayList<>(); List<String> paramTypes = new ArrayList<>(captures.size() + actualParamTypeStrs.size());
List<String> paramNames = new ArrayList<>(); List<String> paramNames = new ArrayList<>(captures.size() + paramNameStrs.size());
for (Variable var : captures) { for (Variable var : captures) {
paramTypes.add(var.type.name); paramTypes.add(var.type.name);
paramNames.add(var.name); paramNames.add(var.name);

View File

@ -125,8 +125,8 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos
// lets see if we can filter based on groups // lets see if we can filter based on groups
if (!groups.isEmpty()) { if (!groups.isEmpty()) {
List<GroupIdentifier> instanceSecurityGroups = instance.getSecurityGroups(); List<GroupIdentifier> instanceSecurityGroups = instance.getSecurityGroups();
ArrayList<String> securityGroupNames = new ArrayList<String>(); List<String> securityGroupNames = new ArrayList<>(instanceSecurityGroups.size());
ArrayList<String> securityGroupIds = new ArrayList<String>(); List<String> securityGroupIds = new ArrayList<>(instanceSecurityGroups.size());
for (GroupIdentifier sg : instanceSecurityGroups) { for (GroupIdentifier sg : instanceSecurityGroups) {
securityGroupNames.add(sg.getGroupName()); securityGroupNames.add(sg.getGroupName());
securityGroupIds.add(sg.getGroupId()); securityGroupIds.add(sg.getGroupId());

View File

@ -249,7 +249,7 @@ class GoogleCloudStorageBlobStore extends AbstractComponent implements BlobStore
deleteBlob(blobNames.iterator().next()); deleteBlob(blobNames.iterator().next());
return; return;
} }
final List<Storage.Objects.Delete> deletions = new ArrayList<>(); final List<Storage.Objects.Delete> deletions = new ArrayList<>(Math.min(MAX_BATCHING_REQUESTS, blobNames.size()));
final Iterator<String> blobs = blobNames.iterator(); final Iterator<String> blobs = blobNames.iterator();
SocketAccess.doPrivilegedVoidIOException(() -> { SocketAccess.doPrivilegedVoidIOException(() -> {

View File

@ -201,7 +201,7 @@ public class BootstrapForTesting {
codebases.removeAll(excluded); codebases.removeAll(excluded);
// parse each policy file, with codebase substitution from the classpath // parse each policy file, with codebase substitution from the classpath
final List<Policy> policies = new ArrayList<>(); final List<Policy> policies = new ArrayList<>(pluginPolicies.size());
for (URL policyFile : pluginPolicies) { for (URL policyFile : pluginPolicies) {
policies.add(Security.readPolicy(policyFile, codebases)); policies.add(Security.readPolicy(policyFile, codebases));
} }

View File

@ -110,7 +110,7 @@ public class ClientYamlTestExecutionContext {
ContentType.create(xContentType.mediaTypeWithoutParameters(), StandardCharsets.UTF_8)); ContentType.create(xContentType.mediaTypeWithoutParameters(), StandardCharsets.UTF_8));
} else { } else {
XContentType xContentType = getContentType(headers, STREAMING_CONTENT_TYPES); XContentType xContentType = getContentType(headers, STREAMING_CONTENT_TYPES);
List<BytesRef> bytesRefList = new ArrayList<>(); List<BytesRef> bytesRefList = new ArrayList<>(bodies.size());
int totalBytesLength = 0; int totalBytesLength = 0;
for (Map<String, Object> body : bodies) { for (Map<String, Object> body : bodies) {
BytesRef bytesRef = bodyAsBytesRef(body, xContentType); BytesRef bytesRef = bodyAsBytesRef(body, xContentType);