Changing keyset() to entryset() and sometines values(). (#868)

This commit is contained in:
Koen De Groote 2019-09-13 14:42:38 +02:00 committed by Adrien Grand
parent ec1ef2bce6
commit ec78ef7852
22 changed files with 92 additions and 79 deletions

View File

@ -166,6 +166,8 @@ Other
* LUCENE-8758: QuadPrefixTree: removed levelS and levelN fields which weren't used. (Amish Shah)
* LUCENE-8975: Code Cleanup: Use entryset for map iteration wherever possible.
======================= Lucene 8.2.0 =======================
API Changes

View File

@ -194,11 +194,11 @@ public final class UserDictionary implements Dictionary {
*/
private int[][] toIndexArray(Map<Integer, int[]> input) {
ArrayList<int[]> result = new ArrayList<>();
for (int i : input.keySet()) {
int[] wordIdAndLength = input.get(i);
for (Map.Entry<Integer, int[]> entry : input.entrySet()) {
int[] wordIdAndLength = entry.getValue();
int wordId = wordIdAndLength[0];
// convert length to index
int current = i;
int current = entry.getKey();
for (int j = 1; j < wordIdAndLength.length; j++) { // first entry is wordId offset
int[] token = { wordId + j - 1, current, wordIdAndLength[j] };
result.add(token);

View File

@ -102,8 +102,8 @@ public abstract class SpanWeight extends Weight {
return null;
TermStatistics[] termStats = new TermStatistics[termStates.size()];
int termUpTo = 0;
for (Term term : termStates.keySet()) {
TermStatistics termStatistics = searcher.termStatistics(term, termStates.get(term));
for (Map.Entry<Term, TermStates> entry : termStates.entrySet()) {
TermStatistics termStatistics = searcher.termStatistics(entry.getKey(), entry.getValue());
if (termStatistics != null) {
termStats[termUpTo++] = termStatistics;
}

View File

@ -90,9 +90,9 @@ public abstract class PivotNode<T> extends SortableFacet implements Consumer<Str
*/
public void exportPivot(DataOutput output, Map<String,T> pivot) throws IOException {
output.writeInt(pivot.size());
for (String pivotValue : pivot.keySet()) {
output.writeUTF(pivotValue);
exportPivotValue(output, pivot.get(pivotValue));
for (Map.Entry<String, T> entry : pivot.entrySet()) {
output.writeUTF(entry.getKey());
exportPivotValue(output, entry.getValue());
}
}
/**

View File

@ -160,9 +160,10 @@ public class CreateCollectionCmd implements OverseerCollectionMessageHandler.Cmd
Map<String,String> collectionParams = new HashMap<>();
Map<String,Object> collectionProps = message.getProperties();
for (String propName : collectionProps.keySet()) {
for (Map.Entry<String, Object> entry : collectionProps.entrySet()) {
String propName = entry.getKey();
if (propName.startsWith(ZkController.COLLECTION_PARAM_PREFIX)) {
collectionParams.put(propName.substring(ZkController.COLLECTION_PARAM_PREFIX.length()), (String) collectionProps.get(propName));
collectionParams.put(propName.substring(ZkController.COLLECTION_PARAM_PREFIX.length()), (String) entry.getValue());
}
}

View File

@ -203,9 +203,9 @@ public class CoreDescriptor {
coreProperties.putAll(defaultProperties);
coreProperties.put(CORE_NAME, name);
for (String propname : coreProps.keySet()) {
String propvalue = coreProps.get(propname);
for (Map.Entry<String, String> entry : coreProps.entrySet()) {
String propname = entry.getKey();
String propvalue = entry.getValue();
if (isUserDefinedProperty(propname))
originalExtraProperties.put(propname, propvalue);

View File

@ -1060,8 +1060,9 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
copy(req.getParams().required(), m, COLLECTION_PROP);
addMapObject(m, RULE);
addMapObject(m, SNITCH);
for (String prop : m.keySet()) {
if ("".equals(m.get(prop))) {
for (Map.Entry<String, Object> entry : m.entrySet()) {
String prop = entry.getKey();
if ("".equals(entry.getValue())) {
// set to an empty string is equivalent to removing the property, see SOLR-12507
m.put(prop, null);
}

View File

@ -183,12 +183,12 @@ public final class ZookeeperInfoHandler extends RequestHandlerBase {
boolean replicaInRecovery = false;
Map<String, Object> shards = (Map<String, Object>) collectionState.get("shards");
for (String shardId : shards.keySet()) {
for (Object o : shards.values()) {
boolean hasActive = false;
Map<String, Object> shard = (Map<String, Object>) shards.get(shardId);
Map<String, Object> shard = (Map<String, Object>) o;
Map<String, Object> replicas = (Map<String, Object>) shard.get("replicas");
for (String replicaId : replicas.keySet()) {
Map<String, Object> replicaState = (Map<String, Object>) replicas.get(replicaId);
for (Object value : replicas.values()) {
Map<String, Object> replicaState = (Map<String, Object>) value;
Replica.State coreState = Replica.State.getState((String) replicaState.get(ZkStateReader.STATE_PROP));
String nodeName = (String) replicaState.get("node_name");

View File

@ -280,8 +280,8 @@ public class Log4j2Watcher extends LogWatcher<LogEvent> {
Map<String,String> contextMap = event.getContextMap();
if (contextMap != null) {
for (String key : contextMap.keySet())
doc.setField(key, contextMap.get(key));
for (Map.Entry<String, String> entry : contextMap.entrySet())
doc.setField(entry.getKey(), entry.getValue());
}
if (!doc.containsKey("core"))

View File

@ -179,13 +179,14 @@ public class RequestUtil {
}
mergeJSON(json, JSON, jsonS, new ObjectUtil.ConflictHandler());
}
for (String key : newMap.keySet()) {
for (Map.Entry<String, String[]> entry : newMap.entrySet()) {
String key = entry.getKey();
// json.nl, json.wrf are existing query parameters
if (key.startsWith("json.") && !("json.nl".equals(key) || "json.wrf".equals(key))) {
if (json == null) {
json = new LinkedHashMap<>();
}
mergeJSON(json, key, newMap.get(key), new ObjectUtil.ConflictHandler());
mergeJSON(json, key, entry.getValue(), new ObjectUtil.ConflictHandler());
}
}

View File

@ -137,7 +137,8 @@ public class ManagedSynonymGraphFilterFactory extends BaseManagedTokenFilterFact
synonymMappings = new TreeMap<>();
if (managedData != null) {
Map<String,Object> storedSyns = (Map<String,Object>)managedData;
for (String key : storedSyns.keySet()) {
for (Map.Entry<String, Object> entry : storedSyns.entrySet()) {
String key = entry.getKey();
String caseKey = applyCaseSetting(ignoreCase, key);
CasePreservedSynonymMappings cpsm = synonymMappings.get(caseKey);
@ -148,7 +149,7 @@ public class ManagedSynonymGraphFilterFactory extends BaseManagedTokenFilterFact
// give the nature of our JSON parsing solution, we really have
// no guarantees on what is in the file
Object mapping = storedSyns.get(key);
Object mapping = entry.getValue();
if (!(mapping instanceof List)) {
throw new SolrException(ErrorCode.SERVER_ERROR,
"Invalid synonym file format! Expected a list of synonyms for "+key+
@ -156,7 +157,7 @@ public class ManagedSynonymGraphFilterFactory extends BaseManagedTokenFilterFact
}
Set<String> sortedVals = new TreeSet<>();
sortedVals.addAll((List<String>)storedSyns.get(key));
sortedVals.addAll((List<String>) entry.getValue());
cpsm.mappings.put(key, sortedVals);
}
}
@ -264,8 +265,8 @@ public class ManagedSynonymGraphFilterFactory extends BaseManagedTokenFilterFact
protected Map<String,Set<String>> getStoredView() {
Map<String,Set<String>> storedView = new TreeMap<>();
for (CasePreservedSynonymMappings cpsm : synonymMappings.values()) {
for (String key : cpsm.mappings.keySet()) {
storedView.put(key, cpsm.mappings.get(key));
for (Map.Entry<String, Set<String>> entry : cpsm.mappings.entrySet()) {
storedView.put(entry.getKey(), entry.getValue());
}
}
return storedView;
@ -361,10 +362,10 @@ public class ManagedSynonymGraphFilterFactory extends BaseManagedTokenFilterFact
public void parse(Reader in) throws IOException, ParseException {
boolean ignoreCase = synonymManager.getIgnoreCase();
for (CasePreservedSynonymMappings cpsm : synonymManager.synonymMappings.values()) {
for (String term : cpsm.mappings.keySet()) {
for (String mapping : cpsm.mappings.get(term)) {
for (Map.Entry<String, Set<String>> entry : cpsm.mappings.entrySet()) {
for (String mapping : entry.getValue()) {
// apply the case setting to match the behavior of the SynonymMap builder
CharsRef casedTerm = analyze(synonymManager.applyCaseSetting(ignoreCase, term), new CharsRefBuilder());
CharsRef casedTerm = analyze(synonymManager.applyCaseSetting(ignoreCase, entry.getKey()), new CharsRefBuilder());
CharsRef casedMapping = analyze(synonymManager.applyCaseSetting(ignoreCase, mapping), new CharsRefBuilder());
add(casedTerm, casedMapping, false);
}

View File

@ -1004,9 +1004,10 @@ public abstract class FieldType extends FieldProperties {
if (showDefaults) {
Map<String,String> fieldTypeArgs = getNonFieldPropertyArgs();
if (null != fieldTypeArgs) {
for (String key : fieldTypeArgs.keySet()) {
for (Map.Entry<String, String> entry : fieldTypeArgs.entrySet()) {
String key = entry.getKey();
if ( ! CLASS_NAME.equals(key) && ! TYPE_NAME.equals(key)) {
namedPropertyValues.add(key, fieldTypeArgs.get(key));
namedPropertyValues.add(key, entry.getValue());
}
}
}
@ -1048,11 +1049,12 @@ public abstract class FieldType extends FieldProperties {
fieldProperties.add(propertyName);
}
for (String key : args.keySet()) {
for (Map.Entry<String, String> entry : args.entrySet()) {
String key = entry.getKey();
if (fieldProperties.contains(key)) {
namedPropertyValues.add(key, StrUtils.parseBool(args.get(key)));
namedPropertyValues.add(key, StrUtils.parseBool(entry.getValue()));
} else if (!CLASS_NAME.equals(key) && !TYPE_NAME.equals(key)) {
namedPropertyValues.add(key, args.get(key));
namedPropertyValues.add(key, entry.getValue());
}
}
}
@ -1114,14 +1116,15 @@ public abstract class FieldType extends FieldProperties {
props.add(CLASS_NAME, charFilterFactory.getClassArg());
}
if (null != factoryArgs) {
for (String key : factoryArgs.keySet()) {
for (Map.Entry<String, String> entry : factoryArgs.entrySet()) {
String key = entry.getKey();
if ( ! CLASS_NAME.equals(key)) {
if (LUCENE_MATCH_VERSION_PARAM.equals(key)) {
if (charFilterFactory.isExplicitLuceneMatchVersion()) {
props.add(key, factoryArgs.get(key));
props.add(key, entry.getValue());
}
} else {
props.add(key, factoryArgs.get(key));
props.add(key, entry.getValue());
}
}
}
@ -1138,14 +1141,15 @@ public abstract class FieldType extends FieldProperties {
tokenizerProps.add(CLASS_NAME, tokenizerFactory.getClassArg());
}
if (null != factoryArgs) {
for (String key : factoryArgs.keySet()) {
for (Map.Entry<String, String> entry : factoryArgs.entrySet()) {
String key = entry.getKey();
if ( ! CLASS_NAME.equals(key)) {
if (LUCENE_MATCH_VERSION_PARAM.equals(key)) {
if (tokenizerFactory.isExplicitLuceneMatchVersion()) {
tokenizerProps.add(key, factoryArgs.get(key));
tokenizerProps.add(key, entry.getValue());
}
} else {
tokenizerProps.add(key, factoryArgs.get(key));
tokenizerProps.add(key, entry.getValue());
}
}
}
@ -1162,14 +1166,15 @@ public abstract class FieldType extends FieldProperties {
props.add(CLASS_NAME, filterFactory.getClassArg());
}
if (null != factoryArgs) {
for (String key : factoryArgs.keySet()) {
for (Map.Entry<String, String> entry : factoryArgs.entrySet()) {
String key = entry.getKey();
if ( ! CLASS_NAME.equals(key)) {
if (LUCENE_MATCH_VERSION_PARAM.equals(key)) {
if (filterFactory.isExplicitLuceneMatchVersion()) {
props.add(key, factoryArgs.get(key));
props.add(key, entry.getValue());
}
} else {
props.add(key, factoryArgs.get(key));
props.add(key, entry.getValue());
}
}
}

View File

@ -122,9 +122,9 @@ public class DisMaxQParser extends QParser {
for (String boostFunc : boostFuncs) {
if (null == boostFunc || "".equals(boostFunc)) continue;
Map<String, Float> ff = SolrPluginUtils.parseFieldBoosts(boostFunc);
for (String f : ff.keySet()) {
Query fq = subQuery(f, FunctionQParserPlugin.NAME).getQuery();
Float b = ff.get(f);
for (Map.Entry<String, Float> entry : ff.entrySet()) {
Query fq = subQuery(entry.getKey(), FunctionQParserPlugin.NAME).getQuery();
Float b = entry.getValue();
if (null != b) {
fq = new BoostQuery(fq, b);
}

View File

@ -541,9 +541,9 @@ public class ExtendedDismaxQParser extends QParser {
for (String boostFunc : config.boostFuncs) {
if(null == boostFunc || "".equals(boostFunc)) continue;
Map<String,Float> ff = SolrPluginUtils.parseFieldBoosts(boostFunc);
for (String f : ff.keySet()) {
Query fq = subQuery(f, FunctionQParserPlugin.NAME).getQuery();
Float b = ff.get(f);
for (Map.Entry<String, Float> entry : ff.entrySet()) {
Query fq = subQuery(entry.getKey(), FunctionQParserPlugin.NAME).getQuery();
Float b = entry.getValue();
if (null != b && b.floatValue() != 1f) {
fq = new BoostQuery(fq, b);
}

View File

@ -367,12 +367,12 @@ public abstract class FacetRequest {
@Override
public String toString() {
Map<String, Object> descr = getFacetDescription();
String s = "facet request: { ";
for (String key : descr.keySet()) {
s += key + ":" + descr.get(key) + ",";
StringBuilder s = new StringBuilder("facet request: { ");
for (Map.Entry<String, Object> entry : descr.entrySet()) {
s.append(entry.getKey()).append(':').append(entry.getValue()).append(',');
}
s += "}";
return s;
s.append('}');
return s.toString();
}
/**

View File

@ -100,10 +100,10 @@ public class SimpleMLTQParser extends QParser {
} else {
Map<String, SchemaField> fieldDefinitions = req.getSearcher().getSchema().getFields();
ArrayList<String> fields = new ArrayList();
for (String fieldName : fieldDefinitions.keySet()) {
if (fieldDefinitions.get(fieldName).indexed() && fieldDefinitions.get(fieldName).stored())
if (fieldDefinitions.get(fieldName).getType().getNumberType() == null)
fields.add(fieldName);
for (Map.Entry<String, SchemaField> entry : fieldDefinitions.entrySet()) {
if (entry.getValue().indexed() && entry.getValue().stored())
if (entry.getValue().getType().getNumberType() == null)
fields.add(entry.getKey());
}
fieldNames = fields.toArray(new String[0]);
}

View File

@ -195,8 +195,9 @@ public class AtomicUpdateDocumentMerger {
}
// else it's a atomic update map...
Map<String, Object> fieldValueMap = (Map<String, Object>)fieldValue;
for (String op : fieldValueMap.keySet()) {
Object obj = fieldValueMap.get(op);
for (Entry<String, Object> entry : fieldValueMap.entrySet()) {
String op = entry.getKey();
Object obj = entry.getValue();
if (!op.equals("set") && !op.equals("inc")) {
// not a supported in-place update op
return Collections.emptySet();

View File

@ -3467,8 +3467,9 @@ public class SolrCLI implements CLIO {
Map<String,String> startEnv = new HashMap<>();
Map<String,String> procEnv = EnvironmentUtils.getProcEnvironment();
if (procEnv != null) {
for (String envVar : procEnv.keySet()) {
String envVarVal = procEnv.get(envVar);
for (Map.Entry<String, String> entry : procEnv.entrySet()) {
String envVar = entry.getKey();
String envVarVal = entry.getValue();
if (envVarVal != null && !"EXAMPLE".equals(envVar) && !envVar.startsWith("SOLR_")) {
startEnv.put(envVar, envVarVal);
}

View File

@ -227,8 +227,8 @@ public class KnnStream extends TupleStream implements Expressible {
if(documentIterator.hasNext()) {
Map map = new HashMap();
SolrDocument doc = documentIterator.next();
for(String key : doc.keySet()) {
map.put(key, doc.get(key));
for(Entry<String, Object> entry : doc.entrySet()) {
map.put(entry.getKey(), entry.getValue());
}
return new Tuple(map);
} else {
@ -241,9 +241,9 @@ public class KnnStream extends TupleStream implements Expressible {
private ModifiableSolrParams getParams(Map<String, String> props) {
ModifiableSolrParams params = new ModifiableSolrParams();
for(String key : props.keySet()) {
String value = props.get(key);
params.add(key, value);
for(Entry<String, String> entry : props.entrySet()) {
String value = entry.getValue();
params.add(entry.getKey(), value);
}
return params;
}

View File

@ -101,8 +101,8 @@ public class RandomFacadeStream extends TupleStream implements Expressible {
private SolrParams toSolrParams(Map<String, String> props) {
ModifiableSolrParams sp = new ModifiableSolrParams();
for(String key : props.keySet()) {
sp.add(key, props.get(key));
for(Map.Entry<String, String> entry : props.entrySet()) {
sp.add(entry.getKey(), entry.getValue());
}
return sp;
}

View File

@ -216,8 +216,8 @@ public class RandomStream extends TupleStream implements Expressible {
if(documentIterator.hasNext()) {
Map map = new HashMap();
SolrDocument doc = documentIterator.next();
for(String key : doc.keySet()) {
map.put(key, doc.get(key));
for(Entry<String, Object> entry : doc.entrySet()) {
map.put(entry.getKey(), entry.getValue());
}
return new Tuple(map);
} else {
@ -230,9 +230,9 @@ public class RandomStream extends TupleStream implements Expressible {
private ModifiableSolrParams getParams(Map<String, String> props) {
ModifiableSolrParams params = new ModifiableSolrParams();
for(String key : props.keySet()) {
String value = props.get(key);
params.add(key, value);
for(Entry<String, String> entry : props.entrySet()) {
String value = entry.getValue();
params.add(entry.getKey(), value);
}
return params;
}

View File

@ -210,8 +210,8 @@ public class SearchStream extends TupleStream implements Expressible {
if(documentIterator.hasNext()) {
Map map = new HashMap();
SolrDocument doc = documentIterator.next();
for(String key : doc.keySet()) {
map.put(key, doc.get(key));
for(Entry<String, Object> entry : doc.entrySet()) {
map.put(entry.getKey(), entry.getValue());
}
return new Tuple(map);
} else {