mirror of https://github.com/apache/lucene.git
Changing keyset() to entryset() and sometines values(). (#868)
This commit is contained in:
parent
ec1ef2bce6
commit
ec78ef7852
|
@ -166,6 +166,8 @@ Other
|
||||||
|
|
||||||
* LUCENE-8758: QuadPrefixTree: removed levelS and levelN fields which weren't used. (Amish Shah)
|
* LUCENE-8758: QuadPrefixTree: removed levelS and levelN fields which weren't used. (Amish Shah)
|
||||||
|
|
||||||
|
* LUCENE-8975: Code Cleanup: Use entryset for map iteration wherever possible.
|
||||||
|
|
||||||
======================= Lucene 8.2.0 =======================
|
======================= Lucene 8.2.0 =======================
|
||||||
|
|
||||||
API Changes
|
API Changes
|
||||||
|
|
|
@ -194,11 +194,11 @@ public final class UserDictionary implements Dictionary {
|
||||||
*/
|
*/
|
||||||
private int[][] toIndexArray(Map<Integer, int[]> input) {
|
private int[][] toIndexArray(Map<Integer, int[]> input) {
|
||||||
ArrayList<int[]> result = new ArrayList<>();
|
ArrayList<int[]> result = new ArrayList<>();
|
||||||
for (int i : input.keySet()) {
|
for (Map.Entry<Integer, int[]> entry : input.entrySet()) {
|
||||||
int[] wordIdAndLength = input.get(i);
|
int[] wordIdAndLength = entry.getValue();
|
||||||
int wordId = wordIdAndLength[0];
|
int wordId = wordIdAndLength[0];
|
||||||
// convert length to index
|
// convert length to index
|
||||||
int current = i;
|
int current = entry.getKey();
|
||||||
for (int j = 1; j < wordIdAndLength.length; j++) { // first entry is wordId offset
|
for (int j = 1; j < wordIdAndLength.length; j++) { // first entry is wordId offset
|
||||||
int[] token = { wordId + j - 1, current, wordIdAndLength[j] };
|
int[] token = { wordId + j - 1, current, wordIdAndLength[j] };
|
||||||
result.add(token);
|
result.add(token);
|
||||||
|
|
|
@ -102,8 +102,8 @@ public abstract class SpanWeight extends Weight {
|
||||||
return null;
|
return null;
|
||||||
TermStatistics[] termStats = new TermStatistics[termStates.size()];
|
TermStatistics[] termStats = new TermStatistics[termStates.size()];
|
||||||
int termUpTo = 0;
|
int termUpTo = 0;
|
||||||
for (Term term : termStates.keySet()) {
|
for (Map.Entry<Term, TermStates> entry : termStates.entrySet()) {
|
||||||
TermStatistics termStatistics = searcher.termStatistics(term, termStates.get(term));
|
TermStatistics termStatistics = searcher.termStatistics(entry.getKey(), entry.getValue());
|
||||||
if (termStatistics != null) {
|
if (termStatistics != null) {
|
||||||
termStats[termUpTo++] = termStatistics;
|
termStats[termUpTo++] = termStatistics;
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,9 +90,9 @@ public abstract class PivotNode<T> extends SortableFacet implements Consumer<Str
|
||||||
*/
|
*/
|
||||||
public void exportPivot(DataOutput output, Map<String,T> pivot) throws IOException {
|
public void exportPivot(DataOutput output, Map<String,T> pivot) throws IOException {
|
||||||
output.writeInt(pivot.size());
|
output.writeInt(pivot.size());
|
||||||
for (String pivotValue : pivot.keySet()) {
|
for (Map.Entry<String, T> entry : pivot.entrySet()) {
|
||||||
output.writeUTF(pivotValue);
|
output.writeUTF(entry.getKey());
|
||||||
exportPivotValue(output, pivot.get(pivotValue));
|
exportPivotValue(output, entry.getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -160,9 +160,10 @@ public class CreateCollectionCmd implements OverseerCollectionMessageHandler.Cmd
|
||||||
|
|
||||||
Map<String,String> collectionParams = new HashMap<>();
|
Map<String,String> collectionParams = new HashMap<>();
|
||||||
Map<String,Object> collectionProps = message.getProperties();
|
Map<String,Object> collectionProps = message.getProperties();
|
||||||
for (String propName : collectionProps.keySet()) {
|
for (Map.Entry<String, Object> entry : collectionProps.entrySet()) {
|
||||||
|
String propName = entry.getKey();
|
||||||
if (propName.startsWith(ZkController.COLLECTION_PARAM_PREFIX)) {
|
if (propName.startsWith(ZkController.COLLECTION_PARAM_PREFIX)) {
|
||||||
collectionParams.put(propName.substring(ZkController.COLLECTION_PARAM_PREFIX.length()), (String) collectionProps.get(propName));
|
collectionParams.put(propName.substring(ZkController.COLLECTION_PARAM_PREFIX.length()), (String) entry.getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -203,9 +203,9 @@ public class CoreDescriptor {
|
||||||
coreProperties.putAll(defaultProperties);
|
coreProperties.putAll(defaultProperties);
|
||||||
coreProperties.put(CORE_NAME, name);
|
coreProperties.put(CORE_NAME, name);
|
||||||
|
|
||||||
for (String propname : coreProps.keySet()) {
|
for (Map.Entry<String, String> entry : coreProps.entrySet()) {
|
||||||
|
String propname = entry.getKey();
|
||||||
String propvalue = coreProps.get(propname);
|
String propvalue = entry.getValue();
|
||||||
|
|
||||||
if (isUserDefinedProperty(propname))
|
if (isUserDefinedProperty(propname))
|
||||||
originalExtraProperties.put(propname, propvalue);
|
originalExtraProperties.put(propname, propvalue);
|
||||||
|
|
|
@ -1060,8 +1060,9 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
|
||||||
copy(req.getParams().required(), m, COLLECTION_PROP);
|
copy(req.getParams().required(), m, COLLECTION_PROP);
|
||||||
addMapObject(m, RULE);
|
addMapObject(m, RULE);
|
||||||
addMapObject(m, SNITCH);
|
addMapObject(m, SNITCH);
|
||||||
for (String prop : m.keySet()) {
|
for (Map.Entry<String, Object> entry : m.entrySet()) {
|
||||||
if ("".equals(m.get(prop))) {
|
String prop = entry.getKey();
|
||||||
|
if ("".equals(entry.getValue())) {
|
||||||
// set to an empty string is equivalent to removing the property, see SOLR-12507
|
// set to an empty string is equivalent to removing the property, see SOLR-12507
|
||||||
m.put(prop, null);
|
m.put(prop, null);
|
||||||
}
|
}
|
||||||
|
|
|
@ -183,12 +183,12 @@ public final class ZookeeperInfoHandler extends RequestHandlerBase {
|
||||||
boolean replicaInRecovery = false;
|
boolean replicaInRecovery = false;
|
||||||
|
|
||||||
Map<String, Object> shards = (Map<String, Object>) collectionState.get("shards");
|
Map<String, Object> shards = (Map<String, Object>) collectionState.get("shards");
|
||||||
for (String shardId : shards.keySet()) {
|
for (Object o : shards.values()) {
|
||||||
boolean hasActive = false;
|
boolean hasActive = false;
|
||||||
Map<String, Object> shard = (Map<String, Object>) shards.get(shardId);
|
Map<String, Object> shard = (Map<String, Object>) o;
|
||||||
Map<String, Object> replicas = (Map<String, Object>) shard.get("replicas");
|
Map<String, Object> replicas = (Map<String, Object>) shard.get("replicas");
|
||||||
for (String replicaId : replicas.keySet()) {
|
for (Object value : replicas.values()) {
|
||||||
Map<String, Object> replicaState = (Map<String, Object>) replicas.get(replicaId);
|
Map<String, Object> replicaState = (Map<String, Object>) value;
|
||||||
Replica.State coreState = Replica.State.getState((String) replicaState.get(ZkStateReader.STATE_PROP));
|
Replica.State coreState = Replica.State.getState((String) replicaState.get(ZkStateReader.STATE_PROP));
|
||||||
String nodeName = (String) replicaState.get("node_name");
|
String nodeName = (String) replicaState.get("node_name");
|
||||||
|
|
||||||
|
|
|
@ -280,8 +280,8 @@ public class Log4j2Watcher extends LogWatcher<LogEvent> {
|
||||||
|
|
||||||
Map<String,String> contextMap = event.getContextMap();
|
Map<String,String> contextMap = event.getContextMap();
|
||||||
if (contextMap != null) {
|
if (contextMap != null) {
|
||||||
for (String key : contextMap.keySet())
|
for (Map.Entry<String, String> entry : contextMap.entrySet())
|
||||||
doc.setField(key, contextMap.get(key));
|
doc.setField(entry.getKey(), entry.getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!doc.containsKey("core"))
|
if (!doc.containsKey("core"))
|
||||||
|
|
|
@ -179,13 +179,14 @@ public class RequestUtil {
|
||||||
}
|
}
|
||||||
mergeJSON(json, JSON, jsonS, new ObjectUtil.ConflictHandler());
|
mergeJSON(json, JSON, jsonS, new ObjectUtil.ConflictHandler());
|
||||||
}
|
}
|
||||||
for (String key : newMap.keySet()) {
|
for (Map.Entry<String, String[]> entry : newMap.entrySet()) {
|
||||||
|
String key = entry.getKey();
|
||||||
// json.nl, json.wrf are existing query parameters
|
// json.nl, json.wrf are existing query parameters
|
||||||
if (key.startsWith("json.") && !("json.nl".equals(key) || "json.wrf".equals(key))) {
|
if (key.startsWith("json.") && !("json.nl".equals(key) || "json.wrf".equals(key))) {
|
||||||
if (json == null) {
|
if (json == null) {
|
||||||
json = new LinkedHashMap<>();
|
json = new LinkedHashMap<>();
|
||||||
}
|
}
|
||||||
mergeJSON(json, key, newMap.get(key), new ObjectUtil.ConflictHandler());
|
mergeJSON(json, key, entry.getValue(), new ObjectUtil.ConflictHandler());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -137,7 +137,8 @@ public class ManagedSynonymGraphFilterFactory extends BaseManagedTokenFilterFact
|
||||||
synonymMappings = new TreeMap<>();
|
synonymMappings = new TreeMap<>();
|
||||||
if (managedData != null) {
|
if (managedData != null) {
|
||||||
Map<String,Object> storedSyns = (Map<String,Object>)managedData;
|
Map<String,Object> storedSyns = (Map<String,Object>)managedData;
|
||||||
for (String key : storedSyns.keySet()) {
|
for (Map.Entry<String, Object> entry : storedSyns.entrySet()) {
|
||||||
|
String key = entry.getKey();
|
||||||
|
|
||||||
String caseKey = applyCaseSetting(ignoreCase, key);
|
String caseKey = applyCaseSetting(ignoreCase, key);
|
||||||
CasePreservedSynonymMappings cpsm = synonymMappings.get(caseKey);
|
CasePreservedSynonymMappings cpsm = synonymMappings.get(caseKey);
|
||||||
|
@ -148,7 +149,7 @@ public class ManagedSynonymGraphFilterFactory extends BaseManagedTokenFilterFact
|
||||||
|
|
||||||
// give the nature of our JSON parsing solution, we really have
|
// give the nature of our JSON parsing solution, we really have
|
||||||
// no guarantees on what is in the file
|
// no guarantees on what is in the file
|
||||||
Object mapping = storedSyns.get(key);
|
Object mapping = entry.getValue();
|
||||||
if (!(mapping instanceof List)) {
|
if (!(mapping instanceof List)) {
|
||||||
throw new SolrException(ErrorCode.SERVER_ERROR,
|
throw new SolrException(ErrorCode.SERVER_ERROR,
|
||||||
"Invalid synonym file format! Expected a list of synonyms for "+key+
|
"Invalid synonym file format! Expected a list of synonyms for "+key+
|
||||||
|
@ -156,7 +157,7 @@ public class ManagedSynonymGraphFilterFactory extends BaseManagedTokenFilterFact
|
||||||
}
|
}
|
||||||
|
|
||||||
Set<String> sortedVals = new TreeSet<>();
|
Set<String> sortedVals = new TreeSet<>();
|
||||||
sortedVals.addAll((List<String>)storedSyns.get(key));
|
sortedVals.addAll((List<String>) entry.getValue());
|
||||||
cpsm.mappings.put(key, sortedVals);
|
cpsm.mappings.put(key, sortedVals);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -264,8 +265,8 @@ public class ManagedSynonymGraphFilterFactory extends BaseManagedTokenFilterFact
|
||||||
protected Map<String,Set<String>> getStoredView() {
|
protected Map<String,Set<String>> getStoredView() {
|
||||||
Map<String,Set<String>> storedView = new TreeMap<>();
|
Map<String,Set<String>> storedView = new TreeMap<>();
|
||||||
for (CasePreservedSynonymMappings cpsm : synonymMappings.values()) {
|
for (CasePreservedSynonymMappings cpsm : synonymMappings.values()) {
|
||||||
for (String key : cpsm.mappings.keySet()) {
|
for (Map.Entry<String, Set<String>> entry : cpsm.mappings.entrySet()) {
|
||||||
storedView.put(key, cpsm.mappings.get(key));
|
storedView.put(entry.getKey(), entry.getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return storedView;
|
return storedView;
|
||||||
|
@ -361,10 +362,10 @@ public class ManagedSynonymGraphFilterFactory extends BaseManagedTokenFilterFact
|
||||||
public void parse(Reader in) throws IOException, ParseException {
|
public void parse(Reader in) throws IOException, ParseException {
|
||||||
boolean ignoreCase = synonymManager.getIgnoreCase();
|
boolean ignoreCase = synonymManager.getIgnoreCase();
|
||||||
for (CasePreservedSynonymMappings cpsm : synonymManager.synonymMappings.values()) {
|
for (CasePreservedSynonymMappings cpsm : synonymManager.synonymMappings.values()) {
|
||||||
for (String term : cpsm.mappings.keySet()) {
|
for (Map.Entry<String, Set<String>> entry : cpsm.mappings.entrySet()) {
|
||||||
for (String mapping : cpsm.mappings.get(term)) {
|
for (String mapping : entry.getValue()) {
|
||||||
// apply the case setting to match the behavior of the SynonymMap builder
|
// apply the case setting to match the behavior of the SynonymMap builder
|
||||||
CharsRef casedTerm = analyze(synonymManager.applyCaseSetting(ignoreCase, term), new CharsRefBuilder());
|
CharsRef casedTerm = analyze(synonymManager.applyCaseSetting(ignoreCase, entry.getKey()), new CharsRefBuilder());
|
||||||
CharsRef casedMapping = analyze(synonymManager.applyCaseSetting(ignoreCase, mapping), new CharsRefBuilder());
|
CharsRef casedMapping = analyze(synonymManager.applyCaseSetting(ignoreCase, mapping), new CharsRefBuilder());
|
||||||
add(casedTerm, casedMapping, false);
|
add(casedTerm, casedMapping, false);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1004,9 +1004,10 @@ public abstract class FieldType extends FieldProperties {
|
||||||
if (showDefaults) {
|
if (showDefaults) {
|
||||||
Map<String,String> fieldTypeArgs = getNonFieldPropertyArgs();
|
Map<String,String> fieldTypeArgs = getNonFieldPropertyArgs();
|
||||||
if (null != fieldTypeArgs) {
|
if (null != fieldTypeArgs) {
|
||||||
for (String key : fieldTypeArgs.keySet()) {
|
for (Map.Entry<String, String> entry : fieldTypeArgs.entrySet()) {
|
||||||
if ( ! CLASS_NAME.equals(key) && ! TYPE_NAME.equals(key)) {
|
String key = entry.getKey();
|
||||||
namedPropertyValues.add(key, fieldTypeArgs.get(key));
|
if ( ! CLASS_NAME.equals(key) && ! TYPE_NAME.equals(key)) {
|
||||||
|
namedPropertyValues.add(key, entry.getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1048,11 +1049,12 @@ public abstract class FieldType extends FieldProperties {
|
||||||
fieldProperties.add(propertyName);
|
fieldProperties.add(propertyName);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (String key : args.keySet()) {
|
for (Map.Entry<String, String> entry : args.entrySet()) {
|
||||||
|
String key = entry.getKey();
|
||||||
if (fieldProperties.contains(key)) {
|
if (fieldProperties.contains(key)) {
|
||||||
namedPropertyValues.add(key, StrUtils.parseBool(args.get(key)));
|
namedPropertyValues.add(key, StrUtils.parseBool(entry.getValue()));
|
||||||
} else if (!CLASS_NAME.equals(key) && !TYPE_NAME.equals(key)) {
|
} else if (!CLASS_NAME.equals(key) && !TYPE_NAME.equals(key)) {
|
||||||
namedPropertyValues.add(key, args.get(key));
|
namedPropertyValues.add(key, entry.getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1114,14 +1116,15 @@ public abstract class FieldType extends FieldProperties {
|
||||||
props.add(CLASS_NAME, charFilterFactory.getClassArg());
|
props.add(CLASS_NAME, charFilterFactory.getClassArg());
|
||||||
}
|
}
|
||||||
if (null != factoryArgs) {
|
if (null != factoryArgs) {
|
||||||
for (String key : factoryArgs.keySet()) {
|
for (Map.Entry<String, String> entry : factoryArgs.entrySet()) {
|
||||||
|
String key = entry.getKey();
|
||||||
if ( ! CLASS_NAME.equals(key)) {
|
if ( ! CLASS_NAME.equals(key)) {
|
||||||
if (LUCENE_MATCH_VERSION_PARAM.equals(key)) {
|
if (LUCENE_MATCH_VERSION_PARAM.equals(key)) {
|
||||||
if (charFilterFactory.isExplicitLuceneMatchVersion()) {
|
if (charFilterFactory.isExplicitLuceneMatchVersion()) {
|
||||||
props.add(key, factoryArgs.get(key));
|
props.add(key, entry.getValue());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
props.add(key, factoryArgs.get(key));
|
props.add(key, entry.getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1138,14 +1141,15 @@ public abstract class FieldType extends FieldProperties {
|
||||||
tokenizerProps.add(CLASS_NAME, tokenizerFactory.getClassArg());
|
tokenizerProps.add(CLASS_NAME, tokenizerFactory.getClassArg());
|
||||||
}
|
}
|
||||||
if (null != factoryArgs) {
|
if (null != factoryArgs) {
|
||||||
for (String key : factoryArgs.keySet()) {
|
for (Map.Entry<String, String> entry : factoryArgs.entrySet()) {
|
||||||
|
String key = entry.getKey();
|
||||||
if ( ! CLASS_NAME.equals(key)) {
|
if ( ! CLASS_NAME.equals(key)) {
|
||||||
if (LUCENE_MATCH_VERSION_PARAM.equals(key)) {
|
if (LUCENE_MATCH_VERSION_PARAM.equals(key)) {
|
||||||
if (tokenizerFactory.isExplicitLuceneMatchVersion()) {
|
if (tokenizerFactory.isExplicitLuceneMatchVersion()) {
|
||||||
tokenizerProps.add(key, factoryArgs.get(key));
|
tokenizerProps.add(key, entry.getValue());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
tokenizerProps.add(key, factoryArgs.get(key));
|
tokenizerProps.add(key, entry.getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1162,14 +1166,15 @@ public abstract class FieldType extends FieldProperties {
|
||||||
props.add(CLASS_NAME, filterFactory.getClassArg());
|
props.add(CLASS_NAME, filterFactory.getClassArg());
|
||||||
}
|
}
|
||||||
if (null != factoryArgs) {
|
if (null != factoryArgs) {
|
||||||
for (String key : factoryArgs.keySet()) {
|
for (Map.Entry<String, String> entry : factoryArgs.entrySet()) {
|
||||||
|
String key = entry.getKey();
|
||||||
if ( ! CLASS_NAME.equals(key)) {
|
if ( ! CLASS_NAME.equals(key)) {
|
||||||
if (LUCENE_MATCH_VERSION_PARAM.equals(key)) {
|
if (LUCENE_MATCH_VERSION_PARAM.equals(key)) {
|
||||||
if (filterFactory.isExplicitLuceneMatchVersion()) {
|
if (filterFactory.isExplicitLuceneMatchVersion()) {
|
||||||
props.add(key, factoryArgs.get(key));
|
props.add(key, entry.getValue());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
props.add(key, factoryArgs.get(key));
|
props.add(key, entry.getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -122,9 +122,9 @@ public class DisMaxQParser extends QParser {
|
||||||
for (String boostFunc : boostFuncs) {
|
for (String boostFunc : boostFuncs) {
|
||||||
if (null == boostFunc || "".equals(boostFunc)) continue;
|
if (null == boostFunc || "".equals(boostFunc)) continue;
|
||||||
Map<String, Float> ff = SolrPluginUtils.parseFieldBoosts(boostFunc);
|
Map<String, Float> ff = SolrPluginUtils.parseFieldBoosts(boostFunc);
|
||||||
for (String f : ff.keySet()) {
|
for (Map.Entry<String, Float> entry : ff.entrySet()) {
|
||||||
Query fq = subQuery(f, FunctionQParserPlugin.NAME).getQuery();
|
Query fq = subQuery(entry.getKey(), FunctionQParserPlugin.NAME).getQuery();
|
||||||
Float b = ff.get(f);
|
Float b = entry.getValue();
|
||||||
if (null != b) {
|
if (null != b) {
|
||||||
fq = new BoostQuery(fq, b);
|
fq = new BoostQuery(fq, b);
|
||||||
}
|
}
|
||||||
|
|
|
@ -541,9 +541,9 @@ public class ExtendedDismaxQParser extends QParser {
|
||||||
for (String boostFunc : config.boostFuncs) {
|
for (String boostFunc : config.boostFuncs) {
|
||||||
if(null == boostFunc || "".equals(boostFunc)) continue;
|
if(null == boostFunc || "".equals(boostFunc)) continue;
|
||||||
Map<String,Float> ff = SolrPluginUtils.parseFieldBoosts(boostFunc);
|
Map<String,Float> ff = SolrPluginUtils.parseFieldBoosts(boostFunc);
|
||||||
for (String f : ff.keySet()) {
|
for (Map.Entry<String, Float> entry : ff.entrySet()) {
|
||||||
Query fq = subQuery(f, FunctionQParserPlugin.NAME).getQuery();
|
Query fq = subQuery(entry.getKey(), FunctionQParserPlugin.NAME).getQuery();
|
||||||
Float b = ff.get(f);
|
Float b = entry.getValue();
|
||||||
if (null != b && b.floatValue() != 1f) {
|
if (null != b && b.floatValue() != 1f) {
|
||||||
fq = new BoostQuery(fq, b);
|
fq = new BoostQuery(fq, b);
|
||||||
}
|
}
|
||||||
|
|
|
@ -367,12 +367,12 @@ public abstract class FacetRequest {
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
Map<String, Object> descr = getFacetDescription();
|
Map<String, Object> descr = getFacetDescription();
|
||||||
String s = "facet request: { ";
|
StringBuilder s = new StringBuilder("facet request: { ");
|
||||||
for (String key : descr.keySet()) {
|
for (Map.Entry<String, Object> entry : descr.entrySet()) {
|
||||||
s += key + ":" + descr.get(key) + ",";
|
s.append(entry.getKey()).append(':').append(entry.getValue()).append(',');
|
||||||
}
|
}
|
||||||
s += "}";
|
s.append('}');
|
||||||
return s;
|
return s.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -100,10 +100,10 @@ public class SimpleMLTQParser extends QParser {
|
||||||
} else {
|
} else {
|
||||||
Map<String, SchemaField> fieldDefinitions = req.getSearcher().getSchema().getFields();
|
Map<String, SchemaField> fieldDefinitions = req.getSearcher().getSchema().getFields();
|
||||||
ArrayList<String> fields = new ArrayList();
|
ArrayList<String> fields = new ArrayList();
|
||||||
for (String fieldName : fieldDefinitions.keySet()) {
|
for (Map.Entry<String, SchemaField> entry : fieldDefinitions.entrySet()) {
|
||||||
if (fieldDefinitions.get(fieldName).indexed() && fieldDefinitions.get(fieldName).stored())
|
if (entry.getValue().indexed() && entry.getValue().stored())
|
||||||
if (fieldDefinitions.get(fieldName).getType().getNumberType() == null)
|
if (entry.getValue().getType().getNumberType() == null)
|
||||||
fields.add(fieldName);
|
fields.add(entry.getKey());
|
||||||
}
|
}
|
||||||
fieldNames = fields.toArray(new String[0]);
|
fieldNames = fields.toArray(new String[0]);
|
||||||
}
|
}
|
||||||
|
|
|
@ -195,8 +195,9 @@ public class AtomicUpdateDocumentMerger {
|
||||||
}
|
}
|
||||||
// else it's a atomic update map...
|
// else it's a atomic update map...
|
||||||
Map<String, Object> fieldValueMap = (Map<String, Object>)fieldValue;
|
Map<String, Object> fieldValueMap = (Map<String, Object>)fieldValue;
|
||||||
for (String op : fieldValueMap.keySet()) {
|
for (Entry<String, Object> entry : fieldValueMap.entrySet()) {
|
||||||
Object obj = fieldValueMap.get(op);
|
String op = entry.getKey();
|
||||||
|
Object obj = entry.getValue();
|
||||||
if (!op.equals("set") && !op.equals("inc")) {
|
if (!op.equals("set") && !op.equals("inc")) {
|
||||||
// not a supported in-place update op
|
// not a supported in-place update op
|
||||||
return Collections.emptySet();
|
return Collections.emptySet();
|
||||||
|
|
|
@ -3467,8 +3467,9 @@ public class SolrCLI implements CLIO {
|
||||||
Map<String,String> startEnv = new HashMap<>();
|
Map<String,String> startEnv = new HashMap<>();
|
||||||
Map<String,String> procEnv = EnvironmentUtils.getProcEnvironment();
|
Map<String,String> procEnv = EnvironmentUtils.getProcEnvironment();
|
||||||
if (procEnv != null) {
|
if (procEnv != null) {
|
||||||
for (String envVar : procEnv.keySet()) {
|
for (Map.Entry<String, String> entry : procEnv.entrySet()) {
|
||||||
String envVarVal = procEnv.get(envVar);
|
String envVar = entry.getKey();
|
||||||
|
String envVarVal = entry.getValue();
|
||||||
if (envVarVal != null && !"EXAMPLE".equals(envVar) && !envVar.startsWith("SOLR_")) {
|
if (envVarVal != null && !"EXAMPLE".equals(envVar) && !envVar.startsWith("SOLR_")) {
|
||||||
startEnv.put(envVar, envVarVal);
|
startEnv.put(envVar, envVarVal);
|
||||||
}
|
}
|
||||||
|
|
|
@ -227,8 +227,8 @@ public class KnnStream extends TupleStream implements Expressible {
|
||||||
if(documentIterator.hasNext()) {
|
if(documentIterator.hasNext()) {
|
||||||
Map map = new HashMap();
|
Map map = new HashMap();
|
||||||
SolrDocument doc = documentIterator.next();
|
SolrDocument doc = documentIterator.next();
|
||||||
for(String key : doc.keySet()) {
|
for(Entry<String, Object> entry : doc.entrySet()) {
|
||||||
map.put(key, doc.get(key));
|
map.put(entry.getKey(), entry.getValue());
|
||||||
}
|
}
|
||||||
return new Tuple(map);
|
return new Tuple(map);
|
||||||
} else {
|
} else {
|
||||||
|
@ -241,9 +241,9 @@ public class KnnStream extends TupleStream implements Expressible {
|
||||||
|
|
||||||
private ModifiableSolrParams getParams(Map<String, String> props) {
|
private ModifiableSolrParams getParams(Map<String, String> props) {
|
||||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||||
for(String key : props.keySet()) {
|
for(Entry<String, String> entry : props.entrySet()) {
|
||||||
String value = props.get(key);
|
String value = entry.getValue();
|
||||||
params.add(key, value);
|
params.add(entry.getKey(), value);
|
||||||
}
|
}
|
||||||
return params;
|
return params;
|
||||||
}
|
}
|
||||||
|
|
|
@ -101,8 +101,8 @@ public class RandomFacadeStream extends TupleStream implements Expressible {
|
||||||
|
|
||||||
private SolrParams toSolrParams(Map<String, String> props) {
|
private SolrParams toSolrParams(Map<String, String> props) {
|
||||||
ModifiableSolrParams sp = new ModifiableSolrParams();
|
ModifiableSolrParams sp = new ModifiableSolrParams();
|
||||||
for(String key : props.keySet()) {
|
for(Map.Entry<String, String> entry : props.entrySet()) {
|
||||||
sp.add(key, props.get(key));
|
sp.add(entry.getKey(), entry.getValue());
|
||||||
}
|
}
|
||||||
return sp;
|
return sp;
|
||||||
}
|
}
|
||||||
|
|
|
@ -216,8 +216,8 @@ public class RandomStream extends TupleStream implements Expressible {
|
||||||
if(documentIterator.hasNext()) {
|
if(documentIterator.hasNext()) {
|
||||||
Map map = new HashMap();
|
Map map = new HashMap();
|
||||||
SolrDocument doc = documentIterator.next();
|
SolrDocument doc = documentIterator.next();
|
||||||
for(String key : doc.keySet()) {
|
for(Entry<String, Object> entry : doc.entrySet()) {
|
||||||
map.put(key, doc.get(key));
|
map.put(entry.getKey(), entry.getValue());
|
||||||
}
|
}
|
||||||
return new Tuple(map);
|
return new Tuple(map);
|
||||||
} else {
|
} else {
|
||||||
|
@ -230,9 +230,9 @@ public class RandomStream extends TupleStream implements Expressible {
|
||||||
|
|
||||||
private ModifiableSolrParams getParams(Map<String, String> props) {
|
private ModifiableSolrParams getParams(Map<String, String> props) {
|
||||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||||
for(String key : props.keySet()) {
|
for(Entry<String, String> entry : props.entrySet()) {
|
||||||
String value = props.get(key);
|
String value = entry.getValue();
|
||||||
params.add(key, value);
|
params.add(entry.getKey(), value);
|
||||||
}
|
}
|
||||||
return params;
|
return params;
|
||||||
}
|
}
|
||||||
|
|
|
@ -210,8 +210,8 @@ public class SearchStream extends TupleStream implements Expressible {
|
||||||
if(documentIterator.hasNext()) {
|
if(documentIterator.hasNext()) {
|
||||||
Map map = new HashMap();
|
Map map = new HashMap();
|
||||||
SolrDocument doc = documentIterator.next();
|
SolrDocument doc = documentIterator.next();
|
||||||
for(String key : doc.keySet()) {
|
for(Entry<String, Object> entry : doc.entrySet()) {
|
||||||
map.put(key, doc.get(key));
|
map.put(entry.getKey(), entry.getValue());
|
||||||
}
|
}
|
||||||
return new Tuple(map);
|
return new Tuple(map);
|
||||||
} else {
|
} else {
|
||||||
|
|
Loading…
Reference in New Issue