mirror of https://github.com/apache/lucene.git
LUCENE-8994: Code Cleanup - Pass values to list constructor instead of empty constructor followed by addAll(). (#919)
This commit is contained in:
parent
93f9a093cc
commit
575f8a6ad8
|
@ -35,6 +35,8 @@ Other
|
||||||
* LUCENE-8746: Refactor EdgeTree - Introduce a Component tree that represents the tree of components (e.g polygons).
|
* LUCENE-8746: Refactor EdgeTree - Introduce a Component tree that represents the tree of components (e.g polygons).
|
||||||
Edge tree is now just a tree of edges. (Ignacio Vera)
|
Edge tree is now just a tree of edges. (Ignacio Vera)
|
||||||
|
|
||||||
|
* LUCENE-8994: Code Cleanup - Pass values to list constructor instead of empty constructor followed by addAll(). (Koen De Groote)
|
||||||
|
|
||||||
Build
|
Build
|
||||||
|
|
||||||
* Upgrade forbiddenapis to version 2.7; upgrade Groovy to 2.4.17. (Uwe Schindler)
|
* Upgrade forbiddenapis to version 2.7; upgrade Groovy to 2.4.17. (Uwe Schindler)
|
||||||
|
@ -171,6 +173,7 @@ Other
|
||||||
* LUCENE-8999: LuceneTestCase.expectThrows now propogates assert/assumption failures up to the test
|
* LUCENE-8999: LuceneTestCase.expectThrows now propogates assert/assumption failures up to the test
|
||||||
w/o wrapping in a new assertion failure unless the caller has explicitly expected them (hossman)
|
w/o wrapping in a new assertion failure unless the caller has explicitly expected them (hossman)
|
||||||
|
|
||||||
|
|
||||||
======================= Lucene 8.2.0 =======================
|
======================= Lucene 8.2.0 =======================
|
||||||
|
|
||||||
API Changes
|
API Changes
|
||||||
|
|
|
@ -237,8 +237,7 @@ public final class OrdsBlockTreeTermsReader extends FieldsProducer {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Collection<Accountable> getChildResources() {
|
public Collection<Accountable> getChildResources() {
|
||||||
List<Accountable> resources = new ArrayList<>();
|
List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
|
||||||
resources.addAll(Accountables.namedAccountables("field", fields));
|
|
||||||
resources.add(Accountables.namedAccountable("delegate", postingsReader));
|
resources.add(Accountables.namedAccountable("delegate", postingsReader));
|
||||||
return Collections.unmodifiableList(resources);
|
return Collections.unmodifiableList(resources);
|
||||||
}
|
}
|
||||||
|
|
|
@ -392,8 +392,7 @@ public final class BloomFilteringPostingsFormat extends PostingsFormat {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Collection<Accountable> getChildResources() {
|
public Collection<Accountable> getChildResources() {
|
||||||
List<Accountable> resources = new ArrayList<>();
|
List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", bloomsByFieldName));
|
||||||
resources.addAll(Accountables.namedAccountables("field", bloomsByFieldName));
|
|
||||||
if (delegateFieldsProducer != null) {
|
if (delegateFieldsProducer != null) {
|
||||||
resources.add(Accountables.namedAccountable("delegate", delegateFieldsProducer));
|
resources.add(Accountables.namedAccountable("delegate", delegateFieldsProducer));
|
||||||
}
|
}
|
||||||
|
|
|
@ -864,8 +864,7 @@ public class FSTOrdTermsReader extends FieldsProducer {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Collection<Accountable> getChildResources() {
|
public Collection<Accountable> getChildResources() {
|
||||||
List<Accountable> resources = new ArrayList<>();
|
List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
|
||||||
resources.addAll(Accountables.namedAccountables("field", fields));
|
|
||||||
resources.add(Accountables.namedAccountable("delegate", postingsReader));
|
resources.add(Accountables.namedAccountable("delegate", postingsReader));
|
||||||
return Collections.unmodifiableList(resources);
|
return Collections.unmodifiableList(resources);
|
||||||
}
|
}
|
||||||
|
|
|
@ -765,8 +765,7 @@ public class FSTTermsReader extends FieldsProducer {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Collection<Accountable> getChildResources() {
|
public Collection<Accountable> getChildResources() {
|
||||||
List<Accountable> resources = new ArrayList<>();
|
List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
|
||||||
resources.addAll(Accountables.namedAccountables("field", fields));
|
|
||||||
resources.add(Accountables.namedAccountable("delegate", postingsReader));
|
resources.add(Accountables.namedAccountable("delegate", postingsReader));
|
||||||
return Collections.unmodifiableCollection(resources);
|
return Collections.unmodifiableCollection(resources);
|
||||||
}
|
}
|
||||||
|
|
|
@ -336,8 +336,7 @@ public final class BlockTreeTermsReader extends FieldsProducer {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Collection<Accountable> getChildResources() {
|
public Collection<Accountable> getChildResources() {
|
||||||
List<Accountable> resources = new ArrayList<>();
|
List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
|
||||||
resources.addAll(Accountables.namedAccountables("field", fields));
|
|
||||||
resources.add(Accountables.namedAccountable("delegate", postingsReader));
|
resources.add(Accountables.namedAccountable("delegate", postingsReader));
|
||||||
return Collections.unmodifiableList(resources);
|
return Collections.unmodifiableList(resources);
|
||||||
}
|
}
|
||||||
|
|
|
@ -144,8 +144,7 @@ public class IndexReplicationHandler implements ReplicationHandler {
|
||||||
// if there were any IO errors reading the expected commit point (i.e.
|
// if there were any IO errors reading the expected commit point (i.e.
|
||||||
// segments files mismatch), then ignore that commit either.
|
// segments files mismatch), then ignore that commit either.
|
||||||
if (commit != null && commit.getSegmentsFileName().equals(segmentsFile)) {
|
if (commit != null && commit.getSegmentsFileName().equals(segmentsFile)) {
|
||||||
Set<String> commitFiles = new HashSet<>();
|
Set<String> commitFiles = new HashSet<>(commit.getFileNames());
|
||||||
commitFiles.addAll(commit.getFileNames());
|
|
||||||
Matcher matcher = IndexFileNames.CODEC_FILE_PATTERN.matcher("");
|
Matcher matcher = IndexFileNames.CODEC_FILE_PATTERN.matcher("");
|
||||||
for (String file : dir.listAll()) {
|
for (String file : dir.listAll()) {
|
||||||
if (!commitFiles.contains(file)
|
if (!commitFiles.contains(file)
|
||||||
|
|
|
@ -233,8 +233,7 @@ public final class VersionBlockTreeTermsReader extends FieldsProducer {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Collection<Accountable> getChildResources() {
|
public Collection<Accountable> getChildResources() {
|
||||||
List<Accountable> resources = new ArrayList<>();
|
List<Accountable> resources = new ArrayList<>(Accountables.namedAccountables("field", fields));
|
||||||
resources.addAll(Accountables.namedAccountables("field", fields));
|
|
||||||
resources.add(Accountables.namedAccountable("delegate", postingsReader));
|
resources.add(Accountables.namedAccountable("delegate", postingsReader));
|
||||||
return Collections.unmodifiableList(resources);
|
return Collections.unmodifiableList(resources);
|
||||||
}
|
}
|
||||||
|
|
|
@ -443,8 +443,7 @@ public class GetMavenDependenciesTask extends Task {
|
||||||
private void appendAllInternalDependencies(StringBuilder builder) {
|
private void appendAllInternalDependencies(StringBuilder builder) {
|
||||||
for (Map.Entry<String, SortedSet<String>> entry : internalCompileScopeDependencies.entrySet()) {
|
for (Map.Entry<String, SortedSet<String>> entry : internalCompileScopeDependencies.entrySet()) {
|
||||||
String artifactId = entry.getKey();
|
String artifactId = entry.getKey();
|
||||||
List<String> exclusions = new ArrayList<>();
|
List<String> exclusions = new ArrayList<>(entry.getValue());
|
||||||
exclusions.addAll(entry.getValue());
|
|
||||||
SortedSet<ExternalDependency> extDeps = allExternalDependencies.get(artifactId);
|
SortedSet<ExternalDependency> extDeps = allExternalDependencies.get(artifactId);
|
||||||
if (null != extDeps) {
|
if (null != extDeps) {
|
||||||
for (ExternalDependency externalDependency : extDeps) {
|
for (ExternalDependency externalDependency : extDeps) {
|
||||||
|
|
|
@ -131,8 +131,7 @@ public class RegexTransformer extends Transformer {
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private List<String> readBySplit(String splitBy, String value) {
|
private List<String> readBySplit(String splitBy, String value) {
|
||||||
String[] vals = value.split(splitBy);
|
String[] vals = value.split(splitBy);
|
||||||
List<String> l = new ArrayList<>();
|
List<String> l = new ArrayList<>(Arrays.asList(vals));
|
||||||
l.addAll(Arrays.asList(vals));
|
|
||||||
return l;
|
return l;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2017,8 +2017,7 @@ public class ZkController implements Closeable {
|
||||||
ZkNodeProps props = null;
|
ZkNodeProps props = null;
|
||||||
if (data != null) {
|
if (data != null) {
|
||||||
props = ZkNodeProps.load(data);
|
props = ZkNodeProps.load(data);
|
||||||
Map<String, Object> newProps = new HashMap<>();
|
Map<String, Object> newProps = new HashMap<>(props.getProperties());
|
||||||
newProps.putAll(props.getProperties());
|
|
||||||
newProps.put(CONFIGNAME_PROP, confSetName);
|
newProps.put(CONFIGNAME_PROP, confSetName);
|
||||||
props = new ZkNodeProps(newProps);
|
props = new ZkNodeProps(newProps);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -521,11 +521,10 @@ public class ReindexCollectionCmd implements OverseerCollectionMessageHandler.Cm
|
||||||
private Map<String, Object> setReindexingState(String collection, State state, Map<String, Object> props) throws Exception {
|
private Map<String, Object> setReindexingState(String collection, State state, Map<String, Object> props) throws Exception {
|
||||||
String path = ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection + REINDEXING_STATE_PATH;
|
String path = ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection + REINDEXING_STATE_PATH;
|
||||||
DistribStateManager stateManager = ocmh.cloudManager.getDistribStateManager();
|
DistribStateManager stateManager = ocmh.cloudManager.getDistribStateManager();
|
||||||
Map<String, Object> copyProps = new HashMap<>();
|
|
||||||
if (props == null) { // retrieve existing props, if any
|
if (props == null) { // retrieve existing props, if any
|
||||||
props = Utils.getJson(stateManager, path);
|
props = Utils.getJson(stateManager, path);
|
||||||
}
|
}
|
||||||
copyProps.putAll(props);
|
Map<String, Object> copyProps = new HashMap<>(props);
|
||||||
copyProps.put("state", state.toLower());
|
copyProps.put("state", state.toLower());
|
||||||
if (stateManager.hasData(path)) {
|
if (stateManager.hasData(path)) {
|
||||||
stateManager.setData(path, Utils.toJSON(copyProps), -1);
|
stateManager.setData(path, Utils.toJSON(copyProps), -1);
|
||||||
|
|
|
@ -313,9 +313,8 @@ public class SimNodeStateProvider implements NodeStateProvider {
|
||||||
if (tags.isEmpty()) {
|
if (tags.isEmpty()) {
|
||||||
return new HashMap<>();
|
return new HashMap<>();
|
||||||
}
|
}
|
||||||
Map<String, Object> result = new HashMap<>();
|
|
||||||
Map<String, Object> metrics = getReplicaMetricsValues(node, tags.stream().filter(s -> s.startsWith("metrics:solr.core.")).collect(Collectors.toList()));
|
Map<String, Object> metrics = getReplicaMetricsValues(node, tags.stream().filter(s -> s.startsWith("metrics:solr.core.")).collect(Collectors.toList()));
|
||||||
result.putAll(metrics);
|
Map<String, Object> result = new HashMap<>(metrics);
|
||||||
Map<String, Object> values = nodeValues.get(node);
|
Map<String, Object> values = nodeValues.get(node);
|
||||||
if (values == null) {
|
if (values == null) {
|
||||||
return result;
|
return result;
|
||||||
|
|
|
@ -182,10 +182,9 @@ class SolrCores {
|
||||||
*/
|
*/
|
||||||
|
|
||||||
List<SolrCore> getCores() {
|
List<SolrCore> getCores() {
|
||||||
List<SolrCore> lst = new ArrayList<>();
|
|
||||||
|
|
||||||
synchronized (modifyLock) {
|
synchronized (modifyLock) {
|
||||||
lst.addAll(cores.values());
|
List<SolrCore> lst = new ArrayList<>(cores.values());
|
||||||
return lst;
|
return lst;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -201,10 +200,10 @@ class SolrCores {
|
||||||
* @return List of currently loaded cores.
|
* @return List of currently loaded cores.
|
||||||
*/
|
*/
|
||||||
Set<String> getLoadedCoreNames() {
|
Set<String> getLoadedCoreNames() {
|
||||||
Set<String> set = new TreeSet<>();
|
Set<String> set;
|
||||||
|
|
||||||
synchronized (modifyLock) {
|
synchronized (modifyLock) {
|
||||||
set.addAll(cores.keySet());
|
set = new TreeSet<>(cores.keySet());
|
||||||
if (getTransientCacheHandler() != null) {
|
if (getTransientCacheHandler() != null) {
|
||||||
set.addAll(getTransientCacheHandler().getLoadedCoreNames());
|
set.addAll(getTransientCacheHandler().getLoadedCoreNames());
|
||||||
}
|
}
|
||||||
|
@ -239,9 +238,9 @@ class SolrCores {
|
||||||
* @return all cores names, whether loaded or unloaded, transient or permanent.
|
* @return all cores names, whether loaded or unloaded, transient or permanent.
|
||||||
*/
|
*/
|
||||||
public Collection<String> getAllCoreNames() {
|
public Collection<String> getAllCoreNames() {
|
||||||
Set<String> set = new TreeSet<>();
|
Set<String> set;
|
||||||
synchronized (modifyLock) {
|
synchronized (modifyLock) {
|
||||||
set.addAll(cores.keySet());
|
set = new TreeSet<>(cores.keySet());
|
||||||
if (getTransientCacheHandler() != null) {
|
if (getTransientCacheHandler() != null) {
|
||||||
set.addAll(getTransientCacheHandler().getAllCoreNames());
|
set.addAll(getTransientCacheHandler().getAllCoreNames());
|
||||||
}
|
}
|
||||||
|
|
|
@ -170,8 +170,7 @@ public class SegmentsInfoRequestHandler extends RequestHandlerBase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
SimpleOrderedMap<Object> segmentInfo = null;
|
SimpleOrderedMap<Object> segmentInfo = null;
|
||||||
List<SegmentCommitInfo> sortable = new ArrayList<>();
|
List<SegmentCommitInfo> sortable = new ArrayList<>(infos.asList());
|
||||||
sortable.addAll(infos.asList());
|
|
||||||
// Order by the number of live docs. The display is logarithmic so it is a little jumbled visually
|
// Order by the number of live docs. The display is logarithmic so it is a little jumbled visually
|
||||||
sortable.sort((s1, s2) ->
|
sortable.sort((s1, s2) ->
|
||||||
(s2.info.maxDoc() - s2.getDelCount()) - (s1.info.maxDoc() - s1.getDelCount())
|
(s2.info.maxDoc() - s2.getDelCount()) - (s1.info.maxDoc() - s1.getDelCount())
|
||||||
|
|
|
@ -116,9 +116,8 @@ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware ,
|
||||||
public void inform(SolrCore core)
|
public void inform(SolrCore core)
|
||||||
{
|
{
|
||||||
this.core = core;
|
this.core = core;
|
||||||
Set<String> missing = new HashSet<>();
|
|
||||||
List<String> c = (List<String>) initArgs.get(INIT_COMPONENTS);
|
List<String> c = (List<String>) initArgs.get(INIT_COMPONENTS);
|
||||||
missing.addAll(core.getSearchComponents().checkContains(c));
|
Set<String> missing = new HashSet<>(core.getSearchComponents().checkContains(c));
|
||||||
List<String> first = (List<String>) initArgs.get(INIT_FIRST_COMPONENTS);
|
List<String> first = (List<String>) initArgs.get(INIT_FIRST_COMPONENTS);
|
||||||
missing.addAll(core.getSearchComponents().checkContains(first));
|
missing.addAll(core.getSearchComponents().checkContains(first));
|
||||||
List<String> last = (List<String>) initArgs.get(INIT_LAST_COMPONENTS);
|
List<String> last = (List<String>) initArgs.get(INIT_LAST_COMPONENTS);
|
||||||
|
|
|
@ -160,9 +160,8 @@ public class ManagedSynonymFilterFactory extends BaseManagedTokenFilterFactory {
|
||||||
"Invalid synonym file format! Expected a list of synonyms for "+key+
|
"Invalid synonym file format! Expected a list of synonyms for "+key+
|
||||||
" but got "+mapping.getClass().getName());
|
" but got "+mapping.getClass().getName());
|
||||||
}
|
}
|
||||||
|
|
||||||
Set<String> sortedVals = new TreeSet<>();
|
Set<String> sortedVals = new TreeSet<>((List<String>) entry.getValue());
|
||||||
sortedVals.addAll((List<String>) entry.getValue());
|
|
||||||
cpsm.mappings.put(key, sortedVals);
|
cpsm.mappings.put(key, sortedVals);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -195,8 +194,7 @@ public class ManagedSynonymFilterFactory extends BaseManagedTokenFilterFactory {
|
||||||
if (cpsm == null)
|
if (cpsm == null)
|
||||||
cpsm = new CasePreservedSynonymMappings();
|
cpsm = new CasePreservedSynonymMappings();
|
||||||
|
|
||||||
Set<String> treeTerms = new TreeSet<>();
|
Set<String> treeTerms = new TreeSet<>(jsonList);
|
||||||
treeTerms.addAll(jsonList);
|
|
||||||
cpsm.mappings.put(origTerm, treeTerms);
|
cpsm.mappings.put(origTerm, treeTerms);
|
||||||
madeChanges = true;
|
madeChanges = true;
|
||||||
// only add the cpsm to the synonymMappings if it has valid data
|
// only add the cpsm to the synonymMappings if it has valid data
|
||||||
|
|
|
@ -156,8 +156,7 @@ public class ManagedSynonymGraphFilterFactory extends BaseManagedTokenFilterFact
|
||||||
" but got "+mapping.getClass().getName());
|
" but got "+mapping.getClass().getName());
|
||||||
}
|
}
|
||||||
|
|
||||||
Set<String> sortedVals = new TreeSet<>();
|
Set<String> sortedVals = new TreeSet<>((List<String>) entry.getValue());
|
||||||
sortedVals.addAll((List<String>) entry.getValue());
|
|
||||||
cpsm.mappings.put(key, sortedVals);
|
cpsm.mappings.put(key, sortedVals);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -190,8 +189,7 @@ public class ManagedSynonymGraphFilterFactory extends BaseManagedTokenFilterFact
|
||||||
if (cpsm == null)
|
if (cpsm == null)
|
||||||
cpsm = new CasePreservedSynonymMappings();
|
cpsm = new CasePreservedSynonymMappings();
|
||||||
|
|
||||||
Set<String> treeTerms = new TreeSet<>();
|
Set<String> treeTerms = new TreeSet<>(jsonList);
|
||||||
treeTerms.addAll(jsonList);
|
|
||||||
cpsm.mappings.put(origTerm, treeTerms);
|
cpsm.mappings.put(origTerm, treeTerms);
|
||||||
madeChanges = true;
|
madeChanges = true;
|
||||||
// only add the cpsm to the synonymMappings if it has valid data
|
// only add the cpsm to the synonymMappings if it has valid data
|
||||||
|
|
|
@ -1012,8 +1012,7 @@ public class HttpSolrCall {
|
||||||
Collections.shuffle(slices, random);
|
Collections.shuffle(slices, random);
|
||||||
|
|
||||||
for (Slice slice : slices) {
|
for (Slice slice : slices) {
|
||||||
List<Replica> randomizedReplicas = new ArrayList<>();
|
List<Replica> randomizedReplicas = new ArrayList<>(slice.getReplicas());
|
||||||
randomizedReplicas.addAll(slice.getReplicas());
|
|
||||||
Collections.shuffle(randomizedReplicas, random);
|
Collections.shuffle(randomizedReplicas, random);
|
||||||
|
|
||||||
for (Replica replica : randomizedReplicas) {
|
for (Replica replica : randomizedReplicas) {
|
||||||
|
|
|
@ -987,8 +987,7 @@ public class SolrCLI {
|
||||||
cloudManager.saveSnapshot(targetDir, true, redact);
|
cloudManager.saveSnapshot(targetDir, true, redact);
|
||||||
System.err.println("- saved autoscaling snapshot to " + targetDir.getAbsolutePath());
|
System.err.println("- saved autoscaling snapshot to " + targetDir.getAbsolutePath());
|
||||||
}
|
}
|
||||||
HashSet<String> liveNodes = new HashSet<>();
|
HashSet<String> liveNodes = new HashSet<>(cloudManager.getClusterStateProvider().getLiveNodes());
|
||||||
liveNodes.addAll(cloudManager.getClusterStateProvider().getLiveNodes());
|
|
||||||
boolean withSuggestions = cli.hasOption("s");
|
boolean withSuggestions = cli.hasOption("s");
|
||||||
boolean withDiagnostics = cli.hasOption("d") || cli.hasOption("n");
|
boolean withDiagnostics = cli.hasOption("d") || cli.hasOption("n");
|
||||||
boolean withSortedNodes = cli.hasOption("n");
|
boolean withSortedNodes = cli.hasOption("n");
|
||||||
|
|
|
@ -587,8 +587,7 @@ public abstract class BaseCloudSolrClient extends SolrClient {
|
||||||
}
|
}
|
||||||
nonRoutableRequest.setParams(nonRoutableParams);
|
nonRoutableRequest.setParams(nonRoutableParams);
|
||||||
nonRoutableRequest.setBasicAuthCredentials(request.getBasicAuthUser(), request.getBasicAuthPassword());
|
nonRoutableRequest.setBasicAuthCredentials(request.getBasicAuthUser(), request.getBasicAuthPassword());
|
||||||
List<String> urlList = new ArrayList<>();
|
List<String> urlList = new ArrayList<>(routes.keySet());
|
||||||
urlList.addAll(routes.keySet());
|
|
||||||
Collections.shuffle(urlList, rand);
|
Collections.shuffle(urlList, rand);
|
||||||
LBSolrClient.Req req = new LBSolrClient.Req(nonRoutableRequest, urlList);
|
LBSolrClient.Req req = new LBSolrClient.Req(nonRoutableRequest, urlList);
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -194,8 +194,7 @@ public class Tuple implements Cloneable, MapWriter {
|
||||||
}
|
}
|
||||||
|
|
||||||
public Tuple clone() {
|
public Tuple clone() {
|
||||||
HashMap m = new HashMap();
|
HashMap m = new HashMap(fields);
|
||||||
m.putAll(fields);
|
|
||||||
Tuple clone = new Tuple(m);
|
Tuple clone = new Tuple(m);
|
||||||
return clone;
|
return clone;
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,8 +47,7 @@ public class SetValueEvaluator extends RecursiveObjectEvaluator implements ManyV
|
||||||
value = ((String)value).replace("\"", "");
|
value = ((String)value).replace("\"", "");
|
||||||
}
|
}
|
||||||
key = key.replace("\"", "");
|
key = key.replace("\"", "");
|
||||||
Map map = new HashMap();
|
Map map = new HashMap(tuple.fields);
|
||||||
map.putAll(tuple.fields);
|
|
||||||
map.put(key, value);
|
map.put(key, value);
|
||||||
return new Tuple(map);
|
return new Tuple(map);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -87,7 +87,6 @@ public class TermVectorsEvaluator extends RecursiveObjectEvaluator implements Ma
|
||||||
|
|
||||||
List<Tuple> tuples = (List<Tuple>) objects[0];
|
List<Tuple> tuples = (List<Tuple>) objects[0];
|
||||||
TreeMap<String, Integer> docFreqs = new TreeMap();
|
TreeMap<String, Integer> docFreqs = new TreeMap();
|
||||||
List<String> features = new ArrayList();
|
|
||||||
List<String> rowLabels = new ArrayList();
|
List<String> rowLabels = new ArrayList();
|
||||||
|
|
||||||
for (Tuple tuple : tuples) {
|
for (Tuple tuple : tuples) {
|
||||||
|
@ -148,7 +147,7 @@ public class TermVectorsEvaluator extends RecursiveObjectEvaluator implements Ma
|
||||||
}
|
}
|
||||||
int totalTerms = docFreqs.size();
|
int totalTerms = docFreqs.size();
|
||||||
Set<String> keys = docFreqs.keySet();
|
Set<String> keys = docFreqs.keySet();
|
||||||
features.addAll(keys);
|
List<String> features = new ArrayList(keys);
|
||||||
double[][] docVec = new double[tuples.size()][];
|
double[][] docVec = new double[tuples.size()][];
|
||||||
for (int t = 0; t < tuples.size(); t++) {
|
for (int t = 0; t < tuples.size(); t++) {
|
||||||
Tuple tuple = tuples.get(t);
|
Tuple tuple = tuples.get(t);
|
||||||
|
|
|
@ -391,8 +391,7 @@ public class ShortestPathStream extends TupleStream implements Expressible {
|
||||||
List<String> parents = v.get(p.peekFirst());
|
List<String> parents = v.get(p.peekFirst());
|
||||||
if (parents != null) {
|
if (parents != null) {
|
||||||
for(String parent : parents) {
|
for(String parent : parents) {
|
||||||
LinkedList newPath = new LinkedList();
|
LinkedList newPath = new LinkedList(p);
|
||||||
newPath.addAll(p);
|
|
||||||
newPath.addFirst(parent);
|
newPath.addFirst(parent);
|
||||||
newPaths.add(newPath);
|
newPaths.add(newPath);
|
||||||
}
|
}
|
||||||
|
|
|
@ -106,17 +106,15 @@ public class GroupOperation implements ReduceOperation {
|
||||||
}
|
}
|
||||||
|
|
||||||
public Tuple reduce() {
|
public Tuple reduce() {
|
||||||
Map map = new HashMap();
|
|
||||||
List<Map> list = new ArrayList();
|
|
||||||
LinkedList ll = new LinkedList();
|
LinkedList ll = new LinkedList();
|
||||||
while(priorityQueue.size() > 0) {
|
while(priorityQueue.size() > 0) {
|
||||||
ll.addFirst(priorityQueue.poll().getMap());
|
ll.addFirst(priorityQueue.poll().getMap());
|
||||||
//This will clear priority queue and so it will be ready for the next group.
|
//This will clear priority queue and so it will be ready for the next group.
|
||||||
}
|
}
|
||||||
|
|
||||||
list.addAll(ll);
|
List<Map> list = new ArrayList(ll);
|
||||||
Map groupHead = list.get(0);
|
Map groupHead = list.get(0);
|
||||||
map.putAll(groupHead);
|
Map map = new HashMap(groupHead);
|
||||||
map.put("group", list);
|
map.put("group", list);
|
||||||
return new Tuple(map);
|
return new Tuple(map);
|
||||||
}
|
}
|
||||||
|
|
|
@ -169,8 +169,7 @@ public class SchemaResponse extends SolrResponseBase {
|
||||||
List<Map<String, Object>> fieldsAttributes = new LinkedList<>();
|
List<Map<String, Object>> fieldsAttributes = new LinkedList<>();
|
||||||
List<NamedList<Object>> fieldsResponse = (List<NamedList<Object>>) schemaNamedList.get("fields");
|
List<NamedList<Object>> fieldsResponse = (List<NamedList<Object>>) schemaNamedList.get("fields");
|
||||||
for (NamedList<Object> fieldNamedList : fieldsResponse) {
|
for (NamedList<Object> fieldNamedList : fieldsResponse) {
|
||||||
Map<String, Object> fieldAttributes = new LinkedHashMap<>();
|
Map<String, Object> fieldAttributes = new LinkedHashMap<>(extractAttributeMap(fieldNamedList));
|
||||||
fieldAttributes.putAll(extractAttributeMap(fieldNamedList));
|
|
||||||
fieldsAttributes.add(fieldAttributes);
|
fieldsAttributes.add(fieldAttributes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -182,8 +181,7 @@ public class SchemaResponse extends SolrResponseBase {
|
||||||
List<Map<String, Object>> dynamicFieldsAttributes = new LinkedList<>();
|
List<Map<String, Object>> dynamicFieldsAttributes = new LinkedList<>();
|
||||||
List<NamedList<Object>> dynamicFieldsResponse = (List<NamedList<Object>>) schemaNamedList.get("dynamicFields");
|
List<NamedList<Object>> dynamicFieldsResponse = (List<NamedList<Object>>) schemaNamedList.get("dynamicFields");
|
||||||
for (NamedList<Object> fieldNamedList : dynamicFieldsResponse) {
|
for (NamedList<Object> fieldNamedList : dynamicFieldsResponse) {
|
||||||
Map<String, Object> dynamicFieldAttributes = new LinkedHashMap<>();
|
Map<String, Object> dynamicFieldAttributes = new LinkedHashMap<>(extractAttributeMap(fieldNamedList));
|
||||||
dynamicFieldAttributes.putAll(extractAttributeMap(fieldNamedList));
|
|
||||||
dynamicFieldsAttributes.add(dynamicFieldAttributes);
|
dynamicFieldsAttributes.add(dynamicFieldAttributes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -195,8 +193,7 @@ public class SchemaResponse extends SolrResponseBase {
|
||||||
List<Map<String, Object>> copyFieldsAttributes = new LinkedList<>();
|
List<Map<String, Object>> copyFieldsAttributes = new LinkedList<>();
|
||||||
List<NamedList<Object>> copyFieldsResponse = (List<NamedList<Object>>) schemaNamedList.get("copyFields");
|
List<NamedList<Object>> copyFieldsResponse = (List<NamedList<Object>>) schemaNamedList.get("copyFields");
|
||||||
for (NamedList<Object> copyFieldNamedList : copyFieldsResponse) {
|
for (NamedList<Object> copyFieldNamedList : copyFieldsResponse) {
|
||||||
Map<String, Object> copyFieldAttributes = new LinkedHashMap<>();
|
Map<String, Object> copyFieldAttributes = new LinkedHashMap<>(extractAttributeMap(copyFieldNamedList));
|
||||||
copyFieldAttributes.putAll(extractAttributeMap(copyFieldNamedList));
|
|
||||||
copyFieldsAttributes.add(copyFieldAttributes);
|
copyFieldsAttributes.add(copyFieldAttributes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2158,8 +2158,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
|
||||||
|
|
||||||
log.info("Took {} ms to see all replicas become active.", timer.getTime());
|
log.info("Took {} ms to see all replicas become active.", timer.getTime());
|
||||||
|
|
||||||
List<Replica> replicas = new ArrayList<>();
|
List<Replica> replicas = new ArrayList<>(notLeaders.values());
|
||||||
replicas.addAll(notLeaders.values());
|
|
||||||
return replicas;
|
return replicas;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue