LUCENE-8994: Code Cleanup - Pass values to list constructor instead of empty constructor followed by addAll(). (#919)

This commit is contained in:
Koen De Groote 2019-10-14 18:45:47 +02:00 committed by Adrien Grand
parent 5e286e4195
commit e7e6cfaecf
28 changed files with 40 additions and 69 deletions

View File

@ -225,6 +225,9 @@ Other
* LUCENE-8999: LuceneTestCase.expectThrows now propogates assert/assumption failures up to the test
w/o wrapping in a new assertion failure unless the caller has explicitly expected them (hossman)
* LUCENE-8994: Code Cleanup - Pass values to list constructor instead of empty constructor followed by addAll(). (Koen De Groote)
======================= Lucene 8.2.0 =======================
API Changes

View File

@ -237,8 +237,7 @@ public final class OrdsBlockTreeTermsReader extends FieldsProducer {
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
resources.addAll(Accountables.namedAccountables("field", fields));
List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
resources.add(Accountables.namedAccountable("delegate", postingsReader));
return Collections.unmodifiableList(resources);
}

View File

@ -392,8 +392,7 @@ public final class BloomFilteringPostingsFormat extends PostingsFormat {
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
resources.addAll(Accountables.namedAccountables("field", bloomsByFieldName));
List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", bloomsByFieldName));
if (delegateFieldsProducer != null) {
resources.add(Accountables.namedAccountable("delegate", delegateFieldsProducer));
}

View File

@ -867,8 +867,7 @@ public class FSTOrdTermsReader extends FieldsProducer {
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
resources.addAll(Accountables.namedAccountables("field", fields));
List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
resources.add(Accountables.namedAccountable("delegate", postingsReader));
return Collections.unmodifiableList(resources);
}

View File

@ -768,8 +768,7 @@ public class FSTTermsReader extends FieldsProducer {
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
resources.addAll(Accountables.namedAccountables("field", fields));
List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
resources.add(Accountables.namedAccountable("delegate", postingsReader));
return Collections.unmodifiableCollection(resources);
}

View File

@ -336,8 +336,7 @@ public final class BlockTreeTermsReader extends FieldsProducer {
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
resources.addAll(Accountables.namedAccountables("field", fields));
List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
resources.add(Accountables.namedAccountable("delegate", postingsReader));
return Collections.unmodifiableList(resources);
}

View File

@ -144,8 +144,7 @@ public class IndexReplicationHandler implements ReplicationHandler {
// if there were any IO errors reading the expected commit point (i.e.
// segments files mismatch), then ignore that commit either.
if (commit != null && commit.getSegmentsFileName().equals(segmentsFile)) {
Set<String> commitFiles = new HashSet<>();
commitFiles.addAll(commit.getFileNames());
Set<String> commitFiles = new HashSet<>(commit.getFileNames());
Matcher matcher = IndexFileNames.CODEC_FILE_PATTERN.matcher("");
for (String file : dir.listAll()) {
if (!commitFiles.contains(file)

View File

@ -233,8 +233,7 @@ public final class VersionBlockTreeTermsReader extends FieldsProducer {
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
resources.addAll(Accountables.namedAccountables("field", fields));
List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
resources.add(Accountables.namedAccountable("delegate", postingsReader));
return Collections.unmodifiableList(resources);
}

View File

@ -443,8 +443,7 @@ public class GetMavenDependenciesTask extends Task {
private void appendAllInternalDependencies(StringBuilder builder) {
for (Map.Entry<String, SortedSet<String>> entry : internalCompileScopeDependencies.entrySet()) {
String artifactId = entry.getKey();
List<String> exclusions = new ArrayList<>();
exclusions.addAll(entry.getValue());
List<String> exclusions = new ArrayList<>(entry.getValue());
SortedSet<ExternalDependency> extDeps = allExternalDependencies.get(artifactId);
if (null != extDeps) {
for (ExternalDependency externalDependency : extDeps) {

View File

@ -131,8 +131,7 @@ public class RegexTransformer extends Transformer {
@SuppressWarnings("unchecked")
private List<String> readBySplit(String splitBy, String value) {
String[] vals = value.split(splitBy);
List<String> l = new ArrayList<>();
l.addAll(Arrays.asList(vals));
List<String> l = new ArrayList<>(Arrays.asList(vals));
return l;
}

View File

@ -2012,8 +2012,7 @@ public class ZkController implements Closeable {
ZkNodeProps props = null;
if (data != null) {
props = ZkNodeProps.load(data);
Map<String, Object> newProps = new HashMap<>();
newProps.putAll(props.getProperties());
Map<String, Object> newProps = new HashMap<>(props.getProperties());
newProps.put(CONFIGNAME_PROP, confSetName);
props = new ZkNodeProps(newProps);
} else {

View File

@ -521,11 +521,10 @@ public class ReindexCollectionCmd implements OverseerCollectionMessageHandler.Cm
private Map<String, Object> setReindexingState(String collection, State state, Map<String, Object> props) throws Exception {
String path = ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection + REINDEXING_STATE_PATH;
DistribStateManager stateManager = ocmh.cloudManager.getDistribStateManager();
Map<String, Object> copyProps = new HashMap<>();
if (props == null) { // retrieve existing props, if any
props = Utils.getJson(stateManager, path);
}
copyProps.putAll(props);
Map<String, Object> copyProps = new HashMap<>(props);
copyProps.put("state", state.toLower());
if (stateManager.hasData(path)) {
stateManager.setData(path, Utils.toJSON(copyProps), -1);

View File

@ -313,9 +313,8 @@ public class SimNodeStateProvider implements NodeStateProvider {
if (tags.isEmpty()) {
return new HashMap<>();
}
Map<String, Object> result = new HashMap<>();
Map<String, Object> metrics = getReplicaMetricsValues(node, tags.stream().filter(s -> s.startsWith("metrics:solr.core.")).collect(Collectors.toList()));
result.putAll(metrics);
Map<String, Object> result = new HashMap<>(metrics);
Map<String, Object> values = nodeValues.get(node);
if (values == null) {
return result;

View File

@ -182,10 +182,9 @@ class SolrCores {
*/
List<SolrCore> getCores() {
List<SolrCore> lst = new ArrayList<>();
synchronized (modifyLock) {
lst.addAll(cores.values());
List<SolrCore> lst = new ArrayList<>(cores.values());
return lst;
}
}
@ -201,10 +200,10 @@ class SolrCores {
* @return List of currently loaded cores.
*/
Set<String> getLoadedCoreNames() {
Set<String> set = new TreeSet<>();
Set<String> set;
synchronized (modifyLock) {
set.addAll(cores.keySet());
set = new TreeSet<>(cores.keySet());
if (getTransientCacheHandler() != null) {
set.addAll(getTransientCacheHandler().getLoadedCoreNames());
}
@ -239,9 +238,9 @@ class SolrCores {
* @return all cores names, whether loaded or unloaded, transient or permanent.
*/
public Collection<String> getAllCoreNames() {
Set<String> set = new TreeSet<>();
Set<String> set;
synchronized (modifyLock) {
set.addAll(cores.keySet());
set = new TreeSet<>(cores.keySet());
if (getTransientCacheHandler() != null) {
set.addAll(getTransientCacheHandler().getAllCoreNames());
}

View File

@ -170,8 +170,7 @@ public class SegmentsInfoRequestHandler extends RequestHandlerBase {
}
}
SimpleOrderedMap<Object> segmentInfo = null;
List<SegmentCommitInfo> sortable = new ArrayList<>();
sortable.addAll(infos.asList());
List<SegmentCommitInfo> sortable = new ArrayList<>(infos.asList());
// Order by the number of live docs. The display is logarithmic so it is a little jumbled visually
sortable.sort((s1, s2) ->
(s2.info.maxDoc() - s2.getDelCount()) - (s1.info.maxDoc() - s1.getDelCount())

View File

@ -116,9 +116,8 @@ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware ,
public void inform(SolrCore core)
{
this.core = core;
Set<String> missing = new HashSet<>();
List<String> c = (List<String>) initArgs.get(INIT_COMPONENTS);
missing.addAll(core.getSearchComponents().checkContains(c));
Set<String> missing = new HashSet<>(core.getSearchComponents().checkContains(c));
List<String> first = (List<String>) initArgs.get(INIT_FIRST_COMPONENTS);
missing.addAll(core.getSearchComponents().checkContains(first));
List<String> last = (List<String>) initArgs.get(INIT_LAST_COMPONENTS);

View File

@ -161,8 +161,7 @@ public class ManagedSynonymFilterFactory extends BaseManagedTokenFilterFactory {
" but got "+mapping.getClass().getName());
}
Set<String> sortedVals = new TreeSet<>();
sortedVals.addAll((List<String>) entry.getValue());
Set<String> sortedVals = new TreeSet<>((List<String>) entry.getValue());
cpsm.mappings.put(key, sortedVals);
}
}
@ -195,8 +194,7 @@ public class ManagedSynonymFilterFactory extends BaseManagedTokenFilterFactory {
if (cpsm == null)
cpsm = new CasePreservedSynonymMappings();
Set<String> treeTerms = new TreeSet<>();
treeTerms.addAll(jsonList);
Set<String> treeTerms = new TreeSet<>(jsonList);
cpsm.mappings.put(origTerm, treeTerms);
madeChanges = true;
// only add the cpsm to the synonymMappings if it has valid data

View File

@ -156,8 +156,7 @@ public class ManagedSynonymGraphFilterFactory extends BaseManagedTokenFilterFact
" but got "+mapping.getClass().getName());
}
Set<String> sortedVals = new TreeSet<>();
sortedVals.addAll((List<String>) entry.getValue());
Set<String> sortedVals = new TreeSet<>((List<String>) entry.getValue());
cpsm.mappings.put(key, sortedVals);
}
}
@ -190,8 +189,7 @@ public class ManagedSynonymGraphFilterFactory extends BaseManagedTokenFilterFact
if (cpsm == null)
cpsm = new CasePreservedSynonymMappings();
Set<String> treeTerms = new TreeSet<>();
treeTerms.addAll(jsonList);
Set<String> treeTerms = new TreeSet<>(jsonList);
cpsm.mappings.put(origTerm, treeTerms);
madeChanges = true;
// only add the cpsm to the synonymMappings if it has valid data

View File

@ -1012,8 +1012,7 @@ public class HttpSolrCall {
Collections.shuffle(slices, random);
for (Slice slice : slices) {
List<Replica> randomizedReplicas = new ArrayList<>();
randomizedReplicas.addAll(slice.getReplicas());
List<Replica> randomizedReplicas = new ArrayList<>(slice.getReplicas());
Collections.shuffle(randomizedReplicas, random);
for (Replica replica : randomizedReplicas) {

View File

@ -990,8 +990,7 @@ public class SolrCLI implements CLIO {
cloudManager.saveSnapshot(targetDir, true, redact);
CLIO.err("- saved autoscaling snapshot to " + targetDir.getAbsolutePath());
}
HashSet<String> liveNodes = new HashSet<>();
liveNodes.addAll(cloudManager.getClusterStateProvider().getLiveNodes());
HashSet<String> liveNodes = new HashSet<>(cloudManager.getClusterStateProvider().getLiveNodes());
boolean withSuggestions = cli.hasOption("s");
boolean withDiagnostics = cli.hasOption("d") || cli.hasOption("n");
boolean withSortedNodes = cli.hasOption("n");

View File

@ -587,8 +587,7 @@ public abstract class BaseCloudSolrClient extends SolrClient {
}
nonRoutableRequest.setParams(nonRoutableParams);
nonRoutableRequest.setBasicAuthCredentials(request.getBasicAuthUser(), request.getBasicAuthPassword());
List<String> urlList = new ArrayList<>();
urlList.addAll(routes.keySet());
List<String> urlList = new ArrayList<>(routes.keySet());
Collections.shuffle(urlList, rand);
LBSolrClient.Req req = new LBSolrClient.Req(nonRoutableRequest, urlList);
try {

View File

@ -194,8 +194,7 @@ public class Tuple implements Cloneable, MapWriter {
}
public Tuple clone() {
HashMap m = new HashMap();
m.putAll(fields);
HashMap m = new HashMap(fields);
Tuple clone = new Tuple(m);
return clone;
}

View File

@ -47,8 +47,7 @@ public class SetValueEvaluator extends RecursiveObjectEvaluator implements ManyV
value = ((String)value).replace("\"", "");
}
key = key.replace("\"", "");
Map map = new HashMap();
map.putAll(tuple.fields);
Map map = new HashMap(tuple.fields);
map.put(key, value);
return new Tuple(map);
} else {

View File

@ -87,7 +87,6 @@ public class TermVectorsEvaluator extends RecursiveObjectEvaluator implements Ma
List<Tuple> tuples = (List<Tuple>) objects[0];
TreeMap<String, Integer> docFreqs = new TreeMap();
List<String> features = new ArrayList();
List<String> rowLabels = new ArrayList();
for (Tuple tuple : tuples) {
@ -148,7 +147,7 @@ public class TermVectorsEvaluator extends RecursiveObjectEvaluator implements Ma
}
int totalTerms = docFreqs.size();
Set<String> keys = docFreqs.keySet();
features.addAll(keys);
List<String> features = new ArrayList(keys);
double[][] docVec = new double[tuples.size()][];
for (int t = 0; t < tuples.size(); t++) {
Tuple tuple = tuples.get(t);

View File

@ -391,8 +391,7 @@ public class ShortestPathStream extends TupleStream implements Expressible {
List<String> parents = v.get(p.peekFirst());
if (parents != null) {
for(String parent : parents) {
LinkedList newPath = new LinkedList();
newPath.addAll(p);
LinkedList newPath = new LinkedList(p);
newPath.addFirst(parent);
newPaths.add(newPath);
}

View File

@ -106,17 +106,15 @@ public class GroupOperation implements ReduceOperation {
}
public Tuple reduce() {
Map map = new HashMap();
List<Map> list = new ArrayList();
LinkedList ll = new LinkedList();
while(priorityQueue.size() > 0) {
ll.addFirst(priorityQueue.poll().getMap());
//This will clear priority queue and so it will be ready for the next group.
}
list.addAll(ll);
List<Map> list = new ArrayList(ll);
Map groupHead = list.get(0);
map.putAll(groupHead);
Map map = new HashMap(groupHead);
map.put("group", list);
return new Tuple(map);
}

View File

@ -169,8 +169,7 @@ public class SchemaResponse extends SolrResponseBase {
List<Map<String, Object>> fieldsAttributes = new LinkedList<>();
List<NamedList<Object>> fieldsResponse = (List<NamedList<Object>>) schemaNamedList.get("fields");
for (NamedList<Object> fieldNamedList : fieldsResponse) {
Map<String, Object> fieldAttributes = new LinkedHashMap<>();
fieldAttributes.putAll(extractAttributeMap(fieldNamedList));
Map<String, Object> fieldAttributes = new LinkedHashMap<>(extractAttributeMap(fieldNamedList));
fieldsAttributes.add(fieldAttributes);
}
@ -182,8 +181,7 @@ public class SchemaResponse extends SolrResponseBase {
List<Map<String, Object>> dynamicFieldsAttributes = new LinkedList<>();
List<NamedList<Object>> dynamicFieldsResponse = (List<NamedList<Object>>) schemaNamedList.get("dynamicFields");
for (NamedList<Object> fieldNamedList : dynamicFieldsResponse) {
Map<String, Object> dynamicFieldAttributes = new LinkedHashMap<>();
dynamicFieldAttributes.putAll(extractAttributeMap(fieldNamedList));
Map<String, Object> dynamicFieldAttributes = new LinkedHashMap<>(extractAttributeMap(fieldNamedList));
dynamicFieldsAttributes.add(dynamicFieldAttributes);
}
@ -195,8 +193,7 @@ public class SchemaResponse extends SolrResponseBase {
List<Map<String, Object>> copyFieldsAttributes = new LinkedList<>();
List<NamedList<Object>> copyFieldsResponse = (List<NamedList<Object>>) schemaNamedList.get("copyFields");
for (NamedList<Object> copyFieldNamedList : copyFieldsResponse) {
Map<String, Object> copyFieldAttributes = new LinkedHashMap<>();
copyFieldAttributes.putAll(extractAttributeMap(copyFieldNamedList));
Map<String, Object> copyFieldAttributes = new LinkedHashMap<>(extractAttributeMap(copyFieldNamedList));
copyFieldsAttributes.add(copyFieldAttributes);
}

View File

@ -2158,8 +2158,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
log.info("Took {} ms to see all replicas become active.", timer.getTime());
List<Replica> replicas = new ArrayList<>();
replicas.addAll(notLeaders.values());
List<Replica> replicas = new ArrayList<>(notLeaders.values());
return replicas;
}