SOLR-6615: use constants for 'sort', 'distrib'

This commit is contained in:
Noble Paul 2017-03-23 18:12:20 +10:30 committed by Shalin Shekhar Mangar
parent 0214722db5
commit 09739b66cb
37 changed files with 151 additions and 93 deletions

View File

@ -58,6 +58,8 @@ import org.apache.velocity.tools.generic.SortTool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.common.params.CommonParams.SORT;
public class VelocityResponseWriter implements QueryResponseWriter, SolrCoreAware {
// init param names, these are _only_ loaded at init time (no per-request control of these)
// - multiple different named writers could be created with different init params
@ -204,7 +206,7 @@ public class VelocityResponseWriter implements QueryResponseWriter, SolrCoreAwar
context.put("esc", new EscapeTool());
context.put("date", new ComparisonDateTool());
context.put("list", new ListTool());
context.put("sort", new SortTool());
context.put(SORT, new SortTool());
MathTool mathTool = new MathTool();
mathTool.configure(toolConfig);

View File

@ -44,6 +44,8 @@ import org.apache.solr.update.UpdateShardHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.common.params.CommonParams.DISTRIB;
public class SyncStrategy {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@ -258,7 +260,7 @@ public class SyncStrategy {
sreq.actualShards = sreq.shards;
sreq.params = new ModifiableSolrParams();
sreq.params.set("qt","/get");
sreq.params.set("distrib",false);
sreq.params.set(DISTRIB,false);
sreq.params.set("getVersions",Integer.toString(nUpdates));
sreq.params.set("sync",leaderUrl);

View File

@ -16,21 +16,23 @@
*/
package org.apache.solr.core;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestInfo;
import org.apache.solr.response.ResultContext;
import org.apache.solr.search.SolrIndexSearcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.solr.search.DocList;
import org.apache.solr.search.DocIterator;
import java.lang.invoke.MethodHandles;
import java.util.List;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestInfo;
import org.apache.solr.response.ResultContext;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.search.DocIterator;
import org.apache.solr.search.DocList;
import org.apache.solr.search.SolrIndexSearcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.invoke.MethodHandles;
import java.util.List;
import static org.apache.solr.common.params.CommonParams.DISTRIB;
/**
*
@ -55,8 +57,8 @@ public class QuerySenderListener extends AbstractSolrEventListener {
// bind the request to a particular searcher (the newSearcher)
NamedList params = addEventParms(currentSearcher, nlst);
// for this, we default to distrib = false
if (params.get("distrib") == null) {
params.add("distrib", false);
if (params.get(DISTRIB) == null) {
params.add(DISTRIB, false);
}
req = new LocalSolrQueryRequest(getCore(),params) {
@Override public SolrIndexSearcher getSearcher() { return searcher; }

View File

@ -66,6 +66,7 @@ import org.slf4j.LoggerFactory;
import static java.util.Collections.singletonMap;
import static org.apache.solr.common.params.CommonParams.ID;
import static org.apache.solr.common.params.CommonParams.JSON;
import static org.apache.solr.common.params.CommonParams.SORT;
import static org.apache.solr.common.params.CommonParams.VERSION;
import static org.apache.solr.common.util.Utils.makeMap;
@ -204,7 +205,7 @@ public class BlobHandler extends RequestHandlerBase implements PluginInfoInitial
new MapSolrParams((Map) makeMap(
"q", StrUtils.formatString(q, blobName, version),
"fl", "id,size,version,timestamp,blobName,md5",
"sort", "version desc"))
SORT, "version desc"))
, rsp);
}
}

View File

@ -57,6 +57,8 @@ import org.apache.solr.util.plugin.SolrCoreAware;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.common.params.CommonParams.SORT;
public class GraphHandler extends RequestHandlerBase implements SolrCoreAware, PermissionNameProvider {
private StreamFactory streamFactory = new StreamFactory();
@ -118,7 +120,7 @@ public class GraphHandler extends RequestHandlerBase implements SolrCoreAware, P
.withFunctionName("shortestPath", ShortestPathStream.class)
.withFunctionName("gatherNodes", GatherNodesStream.class)
.withFunctionName("nodes", GatherNodesStream.class)
.withFunctionName("sort", SortStream.class)
.withFunctionName(SORT, SortStream.class)
.withFunctionName("scoreNodes", ScoreNodesStream.class)
.withFunctionName("random", RandomStream.class)

View File

@ -38,6 +38,8 @@ import org.apache.solr.util.plugin.SolrCoreAware;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.common.params.CommonParams.DISTRIB;
/**
* Ping Request Handler for reporting SolrCore health to a Load Balancer.
*
@ -180,10 +182,10 @@ public class PingRequestHandler extends RequestHandlerBase implements SolrCoreAw
// in this case, we want to default distrib to false so
// we only ping the single node
Boolean distrib = params.getBool("distrib");
Boolean distrib = params.getBool(DISTRIB);
if (distrib == null) {
ModifiableSolrParams mparams = new ModifiableSolrParams(params);
mparams.set("distrib", false);
mparams.set(DISTRIB, false);
req.setParams(mparams);
}

View File

@ -81,6 +81,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.common.params.CommonParams.ID;
import static org.apache.solr.common.params.CommonParams.SORT;
public class StreamHandler extends RequestHandlerBase implements SolrCoreAware, PermissionNameProvider {
@ -148,7 +149,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
.withFunctionName("outerHashJoin", OuterHashJoinStream.class)
.withFunctionName("intersect", IntersectStream.class)
.withFunctionName("complement", ComplementStream.class)
.withFunctionName("sort", SortStream.class)
.withFunctionName(SORT, SortStream.class)
.withFunctionName("train", TextLogitStream.class)
.withFunctionName("features", FeaturesSelectionStream.class)
.withFunctionName("daemon", DaemonStream.class)

View File

@ -38,6 +38,8 @@ import org.apache.http.client.HttpClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.common.params.CommonParams.DISTRIB;
public abstract class IterativeMergeStrategy implements MergeStrategy {
protected ExecutorService executorService;
@ -89,7 +91,7 @@ public abstract class IterativeMergeStrategy implements MergeStrategy {
this.originalShardResponse = originalShardResponse;
req.setMethod(SolrRequest.METHOD.POST);
ModifiableSolrParams params = (ModifiableSolrParams)req.getParams();
params.add("distrib", "false");
params.add(DISTRIB, "false");
}
public QueryResponse getResponse() {

View File

@ -51,6 +51,8 @@ import org.apache.solr.search.SolrReturnFields;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.common.params.CommonParams.SORT;
/**
* TODO!
*
@ -339,7 +341,7 @@ public class MoreLikeThisComponent extends SearchComponent {
String id = rb.req.getSchema().getUniqueKeyField()
.getName();
s.params.set(CommonParams.FL, "score," + id);
s.params.set("sort", "score desc");
s.params.set(SORT, "score desc");
// MLT Query is submitted as normal query to shards.
s.params.set(CommonParams.Q, q);

View File

@ -82,6 +82,7 @@ import org.apache.solr.util.RefCounted;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.common.params.CommonParams.DISTRIB;
import static org.apache.solr.common.params.CommonParams.ID;
import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
@ -840,7 +841,7 @@ public class RealTimeGetComponent extends SearchComponent
// TODO: how to avoid hardcoding this and hit the same handler?
sreq.params.set(ShardParams.SHARDS_QT,"/get");
sreq.params.set("distrib",false);
sreq.params.set(DISTRIB,false);
sreq.params.remove(ShardParams.SHARDS);
sreq.params.remove(ID);

View File

@ -53,6 +53,7 @@ import org.apache.solr.util.plugin.SolrCoreAware;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.common.params.CommonParams.DISTRIB;
import static org.apache.solr.common.params.CommonParams.PATH;
@ -212,7 +213,7 @@ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware ,
CoreContainer cc = req.getCore().getCoreDescriptor().getCoreContainer();
boolean isZkAware = cc.isZooKeeperAware();
rb.isDistrib = req.getParams().getBool("distrib", isZkAware);
rb.isDistrib = req.getParams().getBool(DISTRIB, isZkAware);
if (!rb.isDistrib) {
// for back compat, a shards param with URLs like localhost:8983/solr will mean that this
// search is distributed.
@ -361,7 +362,7 @@ public class SearchHandler extends RequestHandlerBase implements SolrCoreAware ,
for (String shard : sreq.actualShards) {
ModifiableSolrParams params = new ModifiableSolrParams(sreq.params);
params.remove(ShardParams.SHARDS); // not a top-level request
params.set(CommonParams.DISTRIB, "false"); // not a top-level request
params.set(DISTRIB, "false"); // not a top-level request
params.remove("indent");
params.remove(CommonParams.HEADER_ECHO_PARAMS);
params.set(ShardParams.IS_SHARD, true); // a sub (shard) request

View File

@ -55,6 +55,7 @@ import org.apache.solr.common.params.ModifiableSolrParams;
import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;
import static org.apache.solr.common.params.CommonParams.SORT;
/**
* Table based on a Solr collection
@ -271,13 +272,13 @@ class SolrTable extends AbstractQueryableTable implements TranslatableTable {
String fl = getFields(fields);
if(orders.size() > 0) {
params.add(CommonParams.SORT, getSort(orders));
params.add(SORT, getSort(orders));
} else {
if(limit == null) {
params.add(CommonParams.SORT, "_version_ desc");
params.add(SORT, "_version_ desc");
fl = fl+",_version_";
} else {
params.add(CommonParams.SORT, "score desc");
params.add(SORT, "score desc");
if(fl.indexOf("score") == -1) {
fl = fl + ",score";
}
@ -460,7 +461,7 @@ class SolrTable extends AbstractQueryableTable implements TranslatableTable {
params.set("partitionKeys", getPartitionKeys(buckets));
}
params.set("sort", sort);
params.set(SORT, sort);
TupleStream tupleStream = null;
@ -699,7 +700,7 @@ class SolrTable extends AbstractQueryableTable implements TranslatableTable {
params.set("partitionKeys", getPartitionKeys(buckets));
}
params.set("sort", sort);
params.set(SORT, sort);
TupleStream tupleStream = null;

View File

@ -18,25 +18,24 @@ package org.apache.solr.index;
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.search.Sort;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.search.SortSpecParsing;
import static org.apache.solr.common.params.CommonParams.SORT;
/**
* A {@link MergePolicyFactory} for {@code SortingMergePolicy} objects.
*/
public class SortingMergePolicyFactory extends WrapperMergePolicyFactory {
static final String SORT = "sort"; // not private so that test(s) can use it
protected final Sort mergeSort;
public SortingMergePolicyFactory(SolrResourceLoader resourceLoader, MergePolicyFactoryArgs args, IndexSchema schema) {
super(resourceLoader, args, schema);
final String sortArg = (String) args.remove(SORT);
if (sortArg == null) {
throw new IllegalArgumentException(SortingMergePolicyFactory.class.getSimpleName()+" requires a '"+SORT+ "' argument.");
throw new IllegalArgumentException(SortingMergePolicyFactory.class.getSimpleName()+" requires a '"+ SORT + "' argument.");
}
this.mergeSort = SortSpecParsing.parseSortSpec(sortArg, schema).getSort();
}

View File

@ -96,6 +96,8 @@ import org.apache.solr.util.BoundedTreeSet;
import org.apache.solr.util.DefaultSolrThreadFactory;
import org.apache.solr.util.RTimer;
import static org.apache.solr.common.params.CommonParams.SORT;
/**
* A class that generates simple Facet information for a request.
*
@ -531,7 +533,7 @@ public class SimpleFacets {
default:
sortVal = sort;
}
jsonFacet.put("sort", sortVal );
jsonFacet.put(SORT, sortVal );
Map<String, Object> topLevel = new HashMap<>();
topLevel.put(field, jsonFacet);

View File

@ -36,6 +36,7 @@ import org.noggit.JSONParser;
import org.noggit.ObjectBuilder;
import static org.apache.solr.common.params.CommonParams.JSON;
import static org.apache.solr.common.params.CommonParams.SORT;
public class RequestUtil {
/**
@ -206,8 +207,8 @@ public class RequestUtil {
out = "start";
} else if ("limit".equals(key)) {
out = "rows";
} else if ("sort".equals(key)) {
out = "sort";
} else if (SORT.equals(key)) {
out = SORT;
} else if ("params".equals(key) || "facet".equals(key) ) {
// handled elsewhere
continue;

View File

@ -25,6 +25,12 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import com.carrotsearch.hppc.FloatArrayList;
import com.carrotsearch.hppc.IntArrayList;
import com.carrotsearch.hppc.IntIntHashMap;
import com.carrotsearch.hppc.IntLongHashMap;
import com.carrotsearch.hppc.cursors.IntIntCursor;
import com.carrotsearch.hppc.cursors.IntLongCursor;
import org.apache.commons.lang.StringUtils;
import org.apache.lucene.codecs.DocValuesProducer;
import org.apache.lucene.index.DocValues;
@ -72,12 +78,7 @@ import org.apache.solr.schema.TrieIntField;
import org.apache.solr.schema.TrieLongField;
import org.apache.solr.uninverting.UninvertingReader;
import com.carrotsearch.hppc.FloatArrayList;
import com.carrotsearch.hppc.IntArrayList;
import com.carrotsearch.hppc.IntIntHashMap;
import com.carrotsearch.hppc.IntLongHashMap;
import com.carrotsearch.hppc.cursors.IntIntCursor;
import com.carrotsearch.hppc.cursors.IntLongCursor;
import static org.apache.solr.common.params.CommonParams.SORT;
/**
@ -187,7 +188,7 @@ public class CollapsingQParserPlugin extends QParserPlugin {
* returns a new GroupHeadSelector based on the specified local params
*/
public static GroupHeadSelector build(final SolrParams localParams) {
final String sortString = StringUtils.defaultIfBlank(localParams.get("sort"), null);
final String sortString = StringUtils.defaultIfBlank(localParams.get(SORT), null);
final String max = StringUtils.defaultIfBlank(localParams.get("max"), null);
final String min = StringUtils.defaultIfBlank(localParams.get("min"), null);

View File

@ -36,6 +36,7 @@ import org.apache.solr.search.QueryContext;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.SyntaxError;
import static org.apache.solr.common.params.CommonParams.SORT;
import static org.apache.solr.search.facet.FacetRequest.RefineMethod.NONE;
@ -645,7 +646,7 @@ class FacetFieldParser extends FacetParser<FacetField> {
Object o = m.get("facet");
parseSubs(o);
parseSort( m.get("sort") );
parseSort( m.get(SORT) );
}
return facet;

View File

@ -35,6 +35,8 @@ import org.apache.solr.search.SolrReturnFields;
import org.apache.solr.search.StrParser;
import org.apache.solr.search.SyntaxError;
import static org.apache.solr.common.params.CommonParams.SORT;
public class LegacyFacet {
private SolrParams params;
private Map<String,Object> json;
@ -172,7 +174,7 @@ public class LegacyFacet {
String sort = params.getFieldParam(f, FacetParams.FACET_SORT, limit>0 ? FacetParams.FACET_SORT_COUNT : FacetParams.FACET_SORT_INDEX);
String prefix = params.getFieldParam(f, FacetParams.FACET_PREFIX);
Map<String,Object> cmd = new HashMap<String,Object>();
Map<String,Object> cmd = new HashMap<>();
cmd.put("field", facetValue);
if (offset != 0) cmd.put("offset", offset);
if (limit != 10) cmd.put("limit", limit);
@ -182,9 +184,9 @@ public class LegacyFacet {
if (sort.equals("count")) {
// our default
} else if (sort.equals("index")) {
cmd.put("sort", "index asc");
cmd.put(SORT, "index asc");
} else {
cmd.put("sort", sort); // can be sort by one of our stats
cmd.put(SORT, sort); // can be sort by one of our stats
}
Map<String,Object> type = new HashMap<>(1);

View File

@ -60,6 +60,7 @@ import org.apache.solr.update.processor.UpdateRequestProcessorChain;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.common.params.CommonParams.DISTRIB;
import static org.apache.solr.common.params.CommonParams.ID;
import static org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase.FROMLEADER;
import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
@ -405,7 +406,7 @@ public class PeerSync implements SolrMetricProducer {
sreq.params = new ModifiableSolrParams();
sreq.params = new ModifiableSolrParams();
sreq.params.set("qt","/get");
sreq.params.set("distrib",false);
sreq.params.set(DISTRIB,false);
sreq.params.set("getFingerprint", String.valueOf(Long.MAX_VALUE));
shardHandler.submit(sreq, replica, sreq.params);
@ -421,7 +422,7 @@ public class PeerSync implements SolrMetricProducer {
sreq.actualShards = sreq.shards;
sreq.params = new ModifiableSolrParams();
sreq.params.set("qt","/get");
sreq.params.set("distrib",false);
sreq.params.set(DISTRIB,false);
sreq.params.set("getVersions",nUpdates);
sreq.params.set("fingerprint",doFingerprint);
shardHandler.submit(sreq, replica, sreq.params);
@ -506,7 +507,7 @@ public class PeerSync implements SolrMetricProducer {
sreq.actualShards = sreq.shards;
sreq.params = new ModifiableSolrParams();
sreq.params.set("qt", "/get");
sreq.params.set("distrib", false);
sreq.params.set(DISTRIB, false);
sreq.params.set("checkCanHandleVersionRanges", false);
ShardHandler sh = shardHandlerFactory.getShardHandler(client);
@ -725,7 +726,7 @@ public class PeerSync implements SolrMetricProducer {
sreq.purpose = 0;
sreq.params = new ModifiableSolrParams();
sreq.params.set("qt", "/get");
sreq.params.set("distrib", false);
sreq.params.set(DISTRIB, false);
sreq.params.set("getUpdates", versionsAndRanges);
sreq.params.set("onlyIfActive", onlyIfActive);
@ -890,7 +891,7 @@ public class PeerSync implements SolrMetricProducer {
sreq.shards = new String[]{replica};
sreq.params = new ModifiableSolrParams();
sreq.params.set("qt","/get");
sreq.params.set("distrib", false);
sreq.params.set(DISTRIB, false);
sreq.params.set("getVersions",nUpdates);
shardHandler.submit(sreq, replica, sreq.params);
}

View File

@ -16,6 +16,7 @@
*/
package org.apache.solr.update.processor;
import static org.apache.solr.common.params.CommonParams.DISTRIB;
import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
import java.io.IOException;
@ -1303,7 +1304,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
String id = inplaceAdd.getPrintableId();
UpdateShardHandler updateShardHandler = inplaceAdd.getReq().getCore().getCoreDescriptor().getCoreContainer().getUpdateShardHandler();
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("distrib", false);
params.set(DISTRIB, false);
params.set("getInputDocument", id);
params.set("onlyIfActive", true);
SolrRequest<SimpleSolrResponse> ur = new GenericSolrRequest(METHOD.GET, "/get", params);

View File

@ -113,6 +113,7 @@ import org.slf4j.LoggerFactory;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.solr.common.SolrException.ErrorCode.FORBIDDEN;
import static org.apache.solr.common.SolrException.ErrorCode.UNAUTHORIZED;
import static org.apache.solr.common.params.CommonParams.DISTRIB;
import static org.apache.solr.common.params.CommonParams.NAME;
/**
@ -1193,7 +1194,7 @@ public class SolrCLI {
// query this replica directly to get doc count and assess health
q = new SolrQuery("*:*");
q.setRows(0);
q.set("distrib", "false");
q.set(DISTRIB, "false");
try (HttpSolrClient solr = new HttpSolrClient.Builder(coreUrl).build()) {
String solrUrl = solr.getBaseURL();

View File

@ -25,6 +25,7 @@ import org.apache.solr.client.solrj.io.stream.CloudSolrStream;
import org.apache.solr.client.solrj.io.stream.StreamContext;
import org.apache.solr.common.params.ModifiableSolrParams;
import static org.apache.solr.common.params.CommonParams.SORT;
import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
@ -74,7 +75,7 @@ public class ModelCache {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("q","name_s:"+modelID);
params.set("fl", "terms_ss, idfs_ds, weights_ds, iteration_i, _version_");
params.set("sort", "iteration_i desc");
params.set(SORT, "iteration_i desc");
StreamContext streamContext = new StreamContext();
streamContext.setSolrClientCache(solrClientCache);
CloudSolrStream stream = new CloudSolrStream(zkHost, collection, params);

View File

@ -51,6 +51,8 @@ import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SolrjNamedThreadFactory;
import static org.apache.solr.common.params.CommonParams.SORT;
public class GatherNodesStream extends TupleStream implements Expressible {
private String zkHost;
@ -449,7 +451,7 @@ public class GatherNodesStream extends TupleStream implements Expressible {
ModifiableSolrParams joinSParams = new ModifiableSolrParams(SolrParams.toMultiMap(new NamedList(queryParams)));
joinSParams.set("fl", buf.toString());
joinSParams.set("qt", "/export");
joinSParams.set("sort", gather + " asc,"+traverseTo +" asc");
joinSParams.set(SORT, gather + " asc,"+traverseTo +" asc");
StringBuffer nodeQuery = new StringBuffer();

View File

@ -52,6 +52,8 @@ import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.SolrjNamedThreadFactory;
import static org.apache.solr.common.params.CommonParams.SORT;
public class ShortestPathStream extends TupleStream implements Expressible {
private static final long serialVersionUID = 1;
@ -450,7 +452,7 @@ public class ShortestPathStream extends TupleStream implements Expressible {
joinParams.set("fl", fl);
joinParams.set("qt", "/export");
joinParams.set("sort", toField + " asc,"+fromField +" asc");
joinParams.set(SORT, toField + " asc,"+fromField +" asc");
StringBuffer nodeQuery = new StringBuffer();

View File

@ -39,6 +39,8 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
import static org.apache.solr.common.params.CommonParams.SORT;
public class GroupOperation implements ReduceOperation {
private static final long serialVersionUID = 1L;
@ -52,7 +54,7 @@ public class GroupOperation implements ReduceOperation {
public GroupOperation(StreamExpression expression, StreamFactory factory) throws IOException {
StreamExpressionNamedParameter nParam = factory.getNamedOperand(expression, "n");
StreamExpressionNamedParameter sortExpression = factory.getNamedOperand(expression, "sort");
StreamExpressionNamedParameter sortExpression = factory.getNamedOperand(expression, SORT);
StreamComparator streamComparator = factory.constructComparator(((StreamExpressionValue) sortExpression.getParameter()).getValue(), FieldComparator.class);
String nStr = ((StreamExpressionValue)nParam.getParameter()).getValue();
@ -87,7 +89,7 @@ public class GroupOperation implements ReduceOperation {
expression.addParameter(new StreamExpressionNamedParameter("n", Integer.toString(size)));
// sort
expression.addParameter(new StreamExpressionNamedParameter("sort", streamComparator.toExpression(factory)));
expression.addParameter(new StreamExpressionNamedParameter(SORT, streamComparator.toExpression(factory)));
return expression;
}

View File

@ -63,6 +63,9 @@ import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.SolrjNamedThreadFactory;
import org.apache.solr.common.util.StrUtils;
import static org.apache.solr.common.params.CommonParams.DISTRIB;
import static org.apache.solr.common.params.CommonParams.SORT;
/**
* Connects to Zookeeper to pick replicas from a specific collection to send the query to.
* Under the covers the SolrStream instances send the query to the replicas.
@ -269,10 +272,10 @@ public class CloudSolrStream extends TupleStream implements Expressible {
}
String fls = String.join(",", params.getParams("fl"));
if (params.getParams("sort") == null) {
if (params.getParams(SORT) == null) {
throw new IOException("sort param expected for search function");
}
String sorts = String.join(",", params.getParams("sort"));
String sorts = String.join(",", params.getParams(SORT));
this.comp = parseComp(sorts, fls);
}
@ -403,7 +406,7 @@ public class CloudSolrStream extends TupleStream implements Expressible {
Collection<Slice> slices = CloudSolrStream.getSlices(this.collection, zkStateReader, true);
ModifiableSolrParams mParams = new ModifiableSolrParams(params);
mParams.set("distrib", "false"); // We are the aggregator.
mParams.set(DISTRIB, "false"); // We are the aggregator.
Set<String> liveNodes = clusterState.getLiveNodes();
for(Slice slice : slices) {

View File

@ -59,6 +59,7 @@ import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SolrjNamedThreadFactory;
import static org.apache.solr.common.params.CommonParams.DISTRIB;
import static org.apache.solr.common.params.CommonParams.ID;
public class FeaturesSelectionStream extends TupleStream implements Expressible{
@ -415,7 +416,7 @@ public class FeaturesSelectionStream extends TupleStream implements Expressible{
ModifiableSolrParams params = new ModifiableSolrParams();
HttpSolrClient solrClient = cache.getHttpSolrClient(baseUrl);
params.add("distrib", "false");
params.add(DISTRIB, "false");
params.add("fq","{!igain}");
for(String key : paramsMap.keySet()) {

View File

@ -37,6 +37,7 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
import org.apache.solr.common.params.ModifiableSolrParams;
import static org.apache.solr.common.params.CommonParams.SORT;
import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
/**
@ -238,7 +239,7 @@ public class FetchStream extends TupleStream implements Expressible {
params.add("q", buf.toString());
params.add("fl", fieldList+appendFields());
params.add("rows", Integer.toString(batchSize));
params.add("sort", "_version_ desc");
params.add(SORT, "_version_ desc");
CloudSolrStream cloudSolrStream = new CloudSolrStream(zkHost, collection, params);
StreamContext newContext = new StreamContext();

View File

@ -45,6 +45,8 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
import static org.apache.solr.common.params.CommonParams.SORT;
/**
* Connects to a datasource using a registered JDBC driver and execute a query. The results of
* that query will be returned as tuples. An EOF tuple will indicate that all have been read.
@ -104,7 +106,7 @@ public class JDBCStream extends TupleStream implements Expressible {
List<StreamExpressionNamedParameter> namedParams = factory.getNamedOperands(expression);
StreamExpressionNamedParameter connectionUrlExpression = factory.getNamedOperand(expression, "connection");
StreamExpressionNamedParameter sqlQueryExpression = factory.getNamedOperand(expression, "sql");
StreamExpressionNamedParameter definedSortExpression = factory.getNamedOperand(expression, "sort");
StreamExpressionNamedParameter definedSortExpression = factory.getNamedOperand(expression, SORT);
StreamExpressionNamedParameter driverClassNameExpression = factory.getNamedOperand(expression, "driver");
// Validate there are no unknown parameters - zkHost and alias are namedParameter so we don't need to count it twice
@ -115,7 +117,7 @@ public class JDBCStream extends TupleStream implements Expressible {
// All named params we don't care about will be passed to the driver on connection
Properties connectionProperties = new Properties();
for(StreamExpressionNamedParameter namedParam : namedParams){
if(!namedParam.getName().equals("driver") && !namedParam.getName().equals("connection") && !namedParam.getName().equals("sql") && !namedParam.getName().equals("sort")){
if(!namedParam.getName().equals("driver") && !namedParam.getName().equals("connection") && !namedParam.getName().equals("sql") && !namedParam.getName().equals(SORT)){
connectionProperties.put(namedParam.getName(), namedParam.getParameter().toString().trim());
}
}
@ -367,7 +369,7 @@ public class JDBCStream extends TupleStream implements Expressible {
expression.addParameter(new StreamExpressionNamedParameter("sql", sqlQuery));
// sort
expression.addParameter(new StreamExpressionNamedParameter("sort", definedSort.toExpression(factory)));
expression.addParameter(new StreamExpressionNamedParameter(SORT, definedSort.toExpression(factory)));
// driver class
if(null != driverClassName){

View File

@ -21,23 +21,23 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Set;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import org.apache.solr.client.solrj.io.Tuple;
import org.apache.solr.client.solrj.io.comp.FieldComparator;
import org.apache.solr.client.solrj.io.comp.StreamComparator;
import org.apache.solr.client.solrj.io.stream.expr.Explanation;
import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
import org.apache.solr.client.solrj.io.stream.expr.Expressible;
import org.apache.solr.client.solrj.io.stream.expr.StreamExplanation;
import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParameter;
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.Slice;
@ -45,6 +45,9 @@ import org.apache.solr.common.cloud.ZkCoreNodeProps;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.ModifiableSolrParams;
import static org.apache.solr.common.params.CommonParams.DISTRIB;
import static org.apache.solr.common.params.CommonParams.SORT;
/**
* The ParallelStream decorates a TupleStream implementation and pushes it to N workers for parallel execution.
* Workers are chosen from a SolrCloud collection.
@ -85,7 +88,7 @@ public class ParallelStream extends CloudSolrStream implements Expressible {
String collectionName = factory.getValueOperand(expression, 0);
StreamExpressionNamedParameter workersParam = factory.getNamedOperand(expression, "workers");
List<StreamExpression> streamExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Expressible.class, TupleStream.class);
StreamExpressionNamedParameter sortExpression = factory.getNamedOperand(expression, "sort");
StreamExpressionNamedParameter sortExpression = factory.getNamedOperand(expression, SORT);
StreamExpressionNamedParameter zkHostExpression = factory.getNamedOperand(expression, "zkHost");
// validate expression contains only what we want.
@ -188,7 +191,7 @@ public class ParallelStream extends CloudSolrStream implements Expressible {
}
// sort
expression.addParameter(new StreamExpressionNamedParameter("sort",comp.toExpression(factory)));
expression.addParameter(new StreamExpressionNamedParameter(SORT,comp.toExpression(factory)));
// zkHost
expression.addParameter(new StreamExpressionNamedParameter("zkHost", zkHost));
@ -284,7 +287,7 @@ public class ParallelStream extends CloudSolrStream implements Expressible {
for(int w=0; w<workers; w++) {
ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
paramsLoc.set("distrib","false"); // We are the aggregator.
paramsLoc.set(DISTRIB,"false"); // We are the aggregator.
paramsLoc.set("numWorkers", workers);
paramsLoc.set("workerID", w);

View File

@ -18,21 +18,22 @@
package org.apache.solr.client.solrj.io.stream;
import java.io.IOException;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.stream.Collectors;
import java.util.Random;
import java.util.Iterator;
import java.util.stream.Collectors;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.io.SolrClientCache;
import org.apache.solr.client.solrj.io.Tuple;
import org.apache.solr.client.solrj.io.comp.StreamComparator;
import org.apache.solr.client.solrj.io.stream.expr.Explanation;
import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
import org.apache.solr.client.solrj.io.stream.expr.Expressible;
import org.apache.solr.client.solrj.io.stream.expr.StreamExplanation;
import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
@ -40,13 +41,14 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParamete
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.params.ModifiableSolrParams;
import static org.apache.solr.common.params.CommonParams.SORT;
/**
* The RandomStream emits a stream of psuedo random Tuples that match the query parameters. Sample expression syntax:
* random(collection, q="Hello word", rows="50", fl="title, body")
@ -177,13 +179,13 @@ public class RandomStream extends TupleStream implements Expressible {
ModifiableSolrParams params = getParams(this.props);
params.remove("sort"); //Override any sort.
params.remove(SORT); //Override any sort.
Random rand = new Random();
int seed = rand.nextInt();
String sortField = "random_"+seed;
params.add("sort", sortField+" asc");
params.add(SORT, sortField+" asc");
QueryRequest request = new QueryRequest(params);
try {

View File

@ -37,6 +37,8 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParamete
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
import static org.apache.solr.common.params.CommonParams.SORT;
/**
* Iterates over a TupleStream and Ranks the topN tuples based on a Comparator.
@ -61,7 +63,7 @@ public class RankStream extends TupleStream implements Expressible {
// grab all parameters out
List<StreamExpression> streamExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Expressible.class, TupleStream.class);
StreamExpressionNamedParameter nParam = factory.getNamedOperand(expression, "n");
StreamExpressionNamedParameter sortExpression = factory.getNamedOperand(expression, "sort");
StreamExpressionNamedParameter sortExpression = factory.getNamedOperand(expression, SORT);
// validate expression contains only what we want.
if(expression.getParameters().size() != streamExpressions.size() + 2){
@ -129,7 +131,7 @@ public class RankStream extends TupleStream implements Expressible {
}
// sort
expression.addParameter(new StreamExpressionNamedParameter("sort",comp.toExpression(factory)));
expression.addParameter(new StreamExpressionNamedParameter(SORT, comp.toExpression(factory)));
return expression;
}

View File

@ -19,11 +19,11 @@ package org.apache.solr.client.solrj.io.stream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.HashMap;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.io.SolrClientCache;
@ -42,6 +42,8 @@ import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.TermsParams;
import org.apache.solr.common.util.NamedList;
import static org.apache.solr.common.params.CommonParams.DISTRIB;
/**
* Iterates over a gatherNodes() expression and scores the Tuples based on tf-idf.
*
@ -211,7 +213,7 @@ public class ScoreNodesStream extends TupleStream implements Expressible
params.add(TermsParams.TERMS_STATS, "true");
params.add(TermsParams.TERMS_LIST, builder.toString());
params.add(TermsParams.TERMS_LIMIT, Integer.toString(nodes.size()));
params.add("distrib", "true");
params.add(DISTRIB, "true");
QueryRequest request = new QueryRequest(params);

View File

@ -49,6 +49,8 @@ import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SolrjNamedThreadFactory;
import static org.apache.solr.common.params.CommonParams.DISTRIB;
public class SignificantTermsStream extends TupleStream implements Expressible{
private static final long serialVersionUID = 1;
@ -376,7 +378,7 @@ public class SignificantTermsStream extends TupleStream implements Expressible{
ModifiableSolrParams params = new ModifiableSolrParams();
HttpSolrClient solrClient = cache.getHttpSolrClient(baseUrl);
params.add("distrib", "false");
params.add(DISTRIB, "false");
params.add("fq","{!sigificantTerms}");
for(String key : paramsMap.keySet()) {

View File

@ -60,6 +60,7 @@ import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SolrjNamedThreadFactory;
import static org.apache.solr.common.params.CommonParams.DISTRIB;
import static org.apache.solr.common.params.CommonParams.ID;
public class TextLogitStream extends TupleStream implements Expressible {
@ -615,7 +616,7 @@ public class TextLogitStream extends TupleStream implements Expressible {
ModifiableSolrParams params = new ModifiableSolrParams();
HttpSolrClient solrClient = cache.getHttpSolrClient(baseUrl);
params.add("distrib", "false");
params.add(DISTRIB, "false");
params.add("fq","{!tlogit}");
params.add("feature", feature);
params.add("terms", TextLogitStream.toString(terms));

View File

@ -63,7 +63,9 @@ import org.apache.solr.common.util.SolrjNamedThreadFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.common.params.CommonParams.DISTRIB;
import static org.apache.solr.common.params.CommonParams.ID;
import static org.apache.solr.common.params.CommonParams.SORT;
import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
public class TopicStream extends CloudSolrStream implements Expressible {
@ -436,8 +438,8 @@ public class TopicStream extends CloudSolrStream implements Expressible {
long checkpoint = -1;
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("q","*:*");
params.set("sort", "_version_ desc");
params.set("distrib", "false");
params.set(SORT, "_version_ desc");
params.set(DISTRIB, "false");
params.set("rows", 1);
for(Replica replica : replicas) {
if(replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName())) {
@ -523,9 +525,9 @@ public class TopicStream extends CloudSolrStream implements Expressible {
Collection<Slice> slices = CloudSolrStream.getSlices(this.collection, zkStateReader, false);
ModifiableSolrParams mParams = new ModifiableSolrParams(params);
mParams.set("distrib", "false"); // We are the aggregator.
mParams.set(DISTRIB, "false"); // We are the aggregator.
String fl = mParams.get("fl");
mParams.set("sort", "_version_ asc");
mParams.set(SORT, "_version_ asc");
if(!fl.contains(VERSION_FIELD)) {
fl += ",_version_";
}

View File

@ -18,6 +18,8 @@ package org.apache.solr.common.params;
import java.util.regex.Pattern;
import static org.apache.solr.common.params.CommonParams.SORT;
/**
*
*
@ -112,20 +114,20 @@ public interface TermsParams {
/**
* Optional. The maximum value of docFreq to be returned. -1 by default means no boundary
*/
public static final String TERMS_MAXCOUNT = TERMS_PREFIX + "maxcount";
String TERMS_MAXCOUNT = TERMS_PREFIX + "maxcount";
/**
* Optional. If true, return the raw characters of the indexed term, regardless of if it is readable.
* For instance, the index form of numeric numbers is not human readable. The default is false.
*/
public static final String TERMS_RAW = TERMS_PREFIX + "raw";
String TERMS_RAW = TERMS_PREFIX + "raw";
/**
* Optional. If sorting by frequency is enabled. Defaults to sorting by count.
*/
public static final String TERMS_SORT = TERMS_PREFIX + "sort";
String TERMS_SORT = TERMS_PREFIX + SORT;
public static final String TERMS_SORT_COUNT = "count";
public static final String TERMS_SORT_INDEX = "index";
String TERMS_SORT_COUNT = "count";
String TERMS_SORT_INDEX = "index";
}