From f4359ff8ffd96253ba610865c5e29172307c3c7a Mon Sep 17 00:00:00 2001 From: Erick Erickson Date: Mon, 9 May 2016 12:37:32 -0700 Subject: [PATCH] SOLR-8467: CloudSolrStream and FacetStream should take a SolrParams object rather than a Map to allow more complex Solr queries to be specified --- solr/CHANGES.txt | 3 + .../org/apache/solr/handler/SQLHandler.java | 53 +- .../apache/solr/handler/TestSQLHandler.java | 551 ++++++++---------- .../solrj/io/graph/GatherNodesStream.java | 21 +- .../solrj/io/graph/ShortestPathStream.java | 57 +- .../client/solrj/io/sql/StatementImpl.java | 11 +- .../solrj/io/stream/CloudSolrStream.java | 89 ++- .../client/solrj/io/stream/FacetStream.java | 64 +- .../solrj/io/stream/ParallelStream.java | 23 +- .../client/solrj/io/stream/SolrStream.java | 33 +- .../client/solrj/io/stream/StatsStream.java | 58 +- .../client/solrj/io/stream/TopicStream.java | 80 ++- .../solrj/io/graph/GraphExpressionTest.java | 1 - .../solr/client/solrj/io/graph/GraphTest.java | 23 +- .../solrj/io/stream/StreamExpressionTest.java | 70 ++- .../client/solrj/io/stream/StreamingTest.java | 250 ++++---- 16 files changed, 717 insertions(+), 670 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 97cf672e361..39b029ae1e0 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -273,6 +273,9 @@ Other Changes * SOLR-8458: Add Streaming Expressions tests for parameter substitution (Joel Bernstein, Cao Manh Dat, Dennis Gove, Kevin Risden) +* SOLR-8467: CloudSolrStream and FacetStream should take a SolrParams object rather than a + Map to allow more complex Solr queries to be specified. (Erick Erickson) + ================== 6.0.0 ================== Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release diff --git a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java index b8df7f2dc90..bc46a7870d9 100644 --- a/solr/core/src/java/org/apache/solr/handler/SQLHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/SQLHandler.java @@ -144,9 +144,8 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware , Pe } private SolrParams adjustParams(SolrParams params) { - ModifiableSolrParams adjustedParams = new ModifiableSolrParams(); - adjustedParams.add(params); - adjustedParams.add(CommonParams.OMIT_HEADER, "true"); + ModifiableSolrParams adjustedParams = new ModifiableSolrParams(params); + adjustedParams.set(CommonParams.OMIT_HEADER, "true"); return adjustedParams; } @@ -230,18 +229,18 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware , Pe String zkHost = tableSpec.zkHost; String collection = tableSpec.collection; - Map params = new HashMap(); + ModifiableSolrParams params = new ModifiableSolrParams(); - params.put(CommonParams.FL, fl); - params.put(CommonParams.Q, sqlVisitor.query); + params.set(CommonParams.FL, fl); + params.set(CommonParams.Q, sqlVisitor.query); //Always use the /export handler for Group By Queries because it requires exporting full result sets. - params.put(CommonParams.QT, "/export"); + params.set(CommonParams.QT, "/export"); if(numWorkers > 1) { - params.put("partitionKeys", getPartitionKeys(buckets)); + params.set("partitionKeys", getPartitionKeys(buckets)); } - params.put("sort", sort); + params.set("sort", sort); TupleStream tupleStream = null; @@ -370,18 +369,18 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware , Pe String zkHost = tableSpec.zkHost; String collection = tableSpec.collection; - Map params = new HashMap(); + ModifiableSolrParams params = new ModifiableSolrParams(); - params.put(CommonParams.FL, fl); - params.put(CommonParams.Q, sqlVisitor.query); + params.set(CommonParams.FL, fl); + params.set(CommonParams.Q, sqlVisitor.query); //Always use the /export handler for Distinct Queries because it requires exporting full result sets. - params.put(CommonParams.QT, "/export"); + params.set(CommonParams.QT, "/export"); if(numWorkers > 1) { - params.put("partitionKeys", getPartitionKeys(buckets)); + params.set("partitionKeys", getPartitionKeys(buckets)); } - params.put("sort", sort); + params.set("sort", sort); TupleStream tupleStream = null; @@ -463,9 +462,9 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware , Pe String zkHost = tableSpec.zkHost; String collection = tableSpec.collection; - Map params = new HashMap(); + ModifiableSolrParams params = new ModifiableSolrParams(); - params.put(CommonParams.Q, sqlVisitor.query); + params.set(CommonParams.Q, sqlVisitor.query); int limit = sqlVisitor.limit > 0 ? sqlVisitor.limit : 100; @@ -512,9 +511,9 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware , Pe String zkHost = tableSpec.zkHost; String collection = tableSpec.collection; - Map params = new HashMap(); + ModifiableSolrParams params = new ModifiableSolrParams(); - params.put(CommonParams.Q, sqlVisitor.query); + params.set(CommonParams.Q, sqlVisitor.query); int limit = sqlVisitor.limit > 0 ? sqlVisitor.limit : 100; @@ -628,22 +627,22 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware , Pe } } - Map params = new HashMap(); - params.put("fl", fl.toString()); - params.put("q", sqlVisitor.query); + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set("fl", fl.toString()); + params.set("q", sqlVisitor.query); if(siBuf.length() > 0) { - params.put("sort", siBuf.toString()); + params.set("sort", siBuf.toString()); } TupleStream tupleStream; if(sqlVisitor.limit > -1) { - params.put("rows", Integer.toString(sqlVisitor.limit)); + params.set("rows", Integer.toString(sqlVisitor.limit)); tupleStream = new LimitStream(new CloudSolrStream(zkHost, collection, params), sqlVisitor.limit); } else { //Only use the export handler when no limit is specified. - params.put(CommonParams.QT, "/export"); + params.set(CommonParams.QT, "/export"); tupleStream = new CloudSolrStream(zkHost, collection, params); } @@ -681,9 +680,9 @@ public class SQLHandler extends RequestHandlerBase implements SolrCoreAware , Pe String zkHost = tableSpec.zkHost; String collection = tableSpec.collection; - Map params = new HashMap(); + ModifiableSolrParams params = new ModifiableSolrParams(); - params.put(CommonParams.Q, sqlVisitor.query); + params.set(CommonParams.Q, sqlVisitor.query); TupleStream tupleStream = new StatsStream(zkHost, collection, diff --git a/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java b/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java index 1a7d2873738..6876b26bf08 100644 --- a/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java +++ b/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java @@ -19,9 +19,7 @@ package org.apache.solr.handler; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.List; -import java.util.Map; import com.facebook.presto.sql.parser.SqlParser; import com.facebook.presto.sql.tree.Statement; @@ -30,9 +28,11 @@ import org.apache.solr.client.solrj.io.stream.ExceptionStream; import org.apache.solr.client.solrj.io.stream.SolrStream; import org.apache.solr.client.solrj.io.stream.TupleStream; import org.apache.solr.cloud.AbstractFullDistribZkTestBase; -import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.params.CommonParams; +import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.params.SolrParams; + import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -235,11 +235,11 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { indexDoc(sdoc("id", "7", "text", "XXXX XXXX", "str_s", "c", "field_i", "50")); indexDoc(sdoc("id", "8", "text", "XXXX XXXX", "str_s", "c", "field_i", "60")); commit(); - Map params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select 'id', field_i, str_s from collection1 where 'text'='XXXX' order by field_i desc"); + + SolrParams sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select 'id', field_i, str_s from collection1 where 'text'='XXXX' order by field_i desc"); - SolrStream solrStream = new SolrStream(jetty.url, params); + SolrStream solrStream = new SolrStream(jetty.url, sParams); List tuples = getTuples(solrStream); assert(tuples.size() == 8); @@ -286,14 +286,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getLong("field_i") == 7); assert(tuple.get("str_s").equals("a")); - - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - //Test unlimited unsorted result. Should sort on _version_ desc - params.put("stmt", "select 'id', field_i, str_s from collection1 where 'text'='XXXX'"); + sParams = mapParams(CommonParams.QT, "/sql", "stmt", "select 'id', field_i, str_s from collection1 where 'text'='XXXX'"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 8); @@ -339,11 +335,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.get("str_s").equals("a")); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select id, field_i, str_s from collection1 where text='XXXX' order by field_i desc limit 1"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select id, field_i, str_s from collection1 where text='XXXX' order by field_i desc limit 1"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 1); @@ -353,11 +348,9 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getLong("field_i") == 60); assert(tuple.get("str_s").equals("c")); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select id, field_i, str_s from collection1 where text='XXXX' AND id='(1 2 3)' order by field_i desc"); + sParams = mapParams(CommonParams.QT, "/sql", "stmt", "select id, field_i, str_s from collection1 where text='XXXX' AND id='(1 2 3)' order by field_i desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 3); @@ -377,11 +370,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getLong("field_i") == 7); assert(tuple.get("str_s").equals("a")); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select id as myId, field_i as myInt, str_s as myString from collection1 where text='XXXX' AND id='(1 2 3)' order by myInt desc"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select id as myId, field_i as myInt, str_s as myString from collection1 where text='XXXX' AND id='(1 2 3)' order by myInt desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 3); @@ -402,11 +394,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.get("myString").equals("a")); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select id as myId, field_i as myInt, str_s as myString from collection1 where text='XXXX' AND id='(1 2 3)' order by field_i desc"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select id as myId, field_i as myInt, str_s as myString from collection1 where text='XXXX' AND id='(1 2 3)' order by field_i desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 3); @@ -433,11 +424,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { boolean wasReloaded = reloadCollection(leader, "collection1"); assertTrue(wasReloaded); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select id as myId, field_i as myInt, str_s as myString from collection1 where text='XXXX' AND id='(1 2 3)' order by field_i desc"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select id as myId, field_i as myInt, str_s as myString from collection1 where text='XXXX' AND id='(1 2 3)' order by field_i desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 3); @@ -481,11 +471,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { indexDoc(sdoc("id", "7", "Text_t", "XXXX XXXX", "Str_s", "c", "Field_i", "50")); indexDoc(sdoc("id", "8", "Text_t", "XXXX XXXX", "Str_s", "c", "Field_i", "60")); commit(); - Map params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select id, Field_i, Str_s from Collection1 where Text_t='XXXX' order by Field_i desc"); + SolrParams sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select id, Field_i, Str_s from Collection1 where Text_t='XXXX' order by Field_i desc"); - SolrStream solrStream = new SolrStream(jetty.url, params); + SolrStream solrStream = new SolrStream(jetty.url, sParams); List tuples = getTuples(solrStream); assert(tuples.size() == 8); @@ -532,11 +521,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getLong("Field_i") == 7); assert(tuple.get("Str_s").equals("a")); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select Str_s, sum(Field_i) from Collection1 where 'id'='(1 8)' group by Str_s having (sum(Field_i) = 7 OR 'sum(Field_i)' = 60) order by 'sum(Field_i)' desc"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select Str_s, sum(Field_i) from Collection1 where 'id'='(1 8)' group by Str_s having (sum(Field_i) = 7 OR 'sum(Field_i)' = 60) order by 'sum(Field_i)' desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 2); @@ -549,11 +537,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.get("Str_s").equals("a")); assert(tuple.getDouble("sum(Field_i)") == 7); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select Str_s, sum(Field_i) from Collection1 where 'id'='(1 8)' group by 'Str_s' having (sum(Field_i) = 7 OR 'sum(Field_i)' = 60) order by 'sum(Field_i)' desc"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select Str_s, sum(Field_i) from Collection1 where 'id'='(1 8)' group by 'Str_s' having (sum(Field_i) = 7 OR 'sum(Field_i)' = 60) order by 'sum(Field_i)' desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 2); @@ -591,55 +578,50 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { indexDoc(sdoc("id", "8", "text", "XXXX XXXX", "str_s", "c", "field_i", "60")); commit(); - Map params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select id, field_i, str_s from collection1 where text='XXXX' order by field_iff desc"); + SolrParams sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select id, field_i, str_s from collection1 where text='XXXX' order by field_iff desc"); - SolrStream solrStream = new SolrStream(jetty.url, params); + SolrStream solrStream = new SolrStream(jetty.url, sParams); Tuple tuple = getTuple(new ExceptionStream(solrStream)); assert(tuple.EOF); assert(tuple.EXCEPTION); //A parse exception detected before being sent to the search engine assert(tuple.getException().contains("Fields in the sort spec must be included in the field list")); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select id, field_iff, str_s from collection1 where text='XXXX' order by field_iff desc"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select id, field_iff, str_s from collection1 where text='XXXX' order by field_iff desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuple = getTuple(new ExceptionStream(solrStream)); assert(tuple.EOF); assert(tuple.EXCEPTION); //An exception not detected by the parser thrown from the /select handler assert(tuple.getException().contains("sort param field can't be found:")); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select str_s, count(*), sum(field_iff), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having ((sum(field_iff) = 19) AND (min(field_i) = 8))"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select str_s, count(*), sum(field_iff), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having ((sum(field_iff) = 19) AND (min(field_i) = 8))"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuple = getTuple(new ExceptionStream(solrStream)); assert(tuple.EOF); assert(tuple.EXCEPTION); //An exception not detected by the parser thrown from the /export handler assert(tuple.getException().contains("undefined field:")); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select str_s, count(*), blah(field_iff), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having ((sum(field_iff) = 19) AND (min(field_i) = 8))"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select str_s, count(*), blah(field_iff), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having ((sum(field_iff) = 19) AND (min(field_i) = 8))"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuple = getTuple(new ExceptionStream(solrStream)); assert(tuple.EOF); assert(tuple.EXCEPTION); //An exception not detected by the parser thrown from the /export handler assert(tuple.getException().contains("Invalid function: blah")); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select str_s from collection1 where text='XXXX' group by str_s"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select str_s from collection1 where text='XXXX' group by str_s"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuple = getTuple(new ExceptionStream(solrStream)); assert(tuple.EOF); assert(tuple.EXCEPTION); @@ -668,11 +650,11 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { indexr("id", "7", "text", "XXXX XXXX", "str_s", "c", "field_i", "50"); indexr("id", "8", "text", "XXXX XXXX", "str_s", "c", "field_i", "60"); commit(); - Map params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select str_s, 'count(*)', sum('field_i'), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by 'str_s' order by 'sum(field_i)' asc limit 2"); + + SolrParams sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select str_s, 'count(*)', sum('field_i'), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by 'str_s' order by 'sum(field_i)' asc limit 2"); - SolrStream solrStream = new SolrStream(jetty.url, params); + SolrStream solrStream = new SolrStream(jetty.url, sParams); List tuples = getTuples(solrStream); //Only two results because of the limit. @@ -697,10 +679,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("avg(field_i)") == 13.5D); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select str_s as myString, 'count(*)', sum('field_i') as sum, min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by 'str_s' order by sum asc limit 2"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select str_s as myString, 'count(*)', sum('field_i') as sum, min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by 'str_s' order by sum asc limit 2"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //Only two results because of the limit. @@ -723,11 +705,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("avg(field_i)") == 13.5D); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where (text='XXXX' AND NOT text='XXXX XXX') group by str_s order by str_s desc"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where (text='XXXX' AND NOT text='XXXX XXX') group by str_s order by str_s desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //The sort by and order by match and no limit is applied. All the Tuples should be returned in @@ -760,11 +741,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("avg(field_i)") == 13.5D); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select str_s as myString, count(*) as count, sum(field_i) as sum, min(field_i) as min, max(field_i) as max, avg(field_i) as avg from collection1 where (text='XXXX' AND NOT text='XXXX XXX') group by str_s order by str_s desc"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select str_s as myString, count(*) as count, sum(field_i) as sum, min(field_i) as min, max(field_i) as max, avg(field_i) as avg from collection1 where (text='XXXX' AND NOT text='XXXX XXX') group by str_s order by str_s desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //The sort by and order by match and no limit is applied. All the Tuples should be returned in @@ -798,11 +778,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having sum(field_i) = 19"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having sum(field_i) = 19"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 1); @@ -815,11 +794,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("max(field_i)") == 11); assert(tuple.getDouble("avg(field_i)") == 9.5D); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having ((sum(field_i) = 19) AND (min(field_i) = 8))"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having ((sum(field_i) = 19) AND (min(field_i) = 8))"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //Only two results because of the limit. @@ -833,11 +811,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("max(field_i)") == 11); assert(tuple.getDouble("avg(field_i)") == 9.5D); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select str_s as myString, count(*), sum(field_i) as sum, min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by myString having ((sum = 19) AND (min(field_i) = 8))"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select str_s as myString, count(*), sum(field_i) as sum, min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by myString having ((sum = 19) AND (min(field_i) = 8))"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //Only two results because of the limit. @@ -852,11 +829,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("avg(field_i)") == 9.5D); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having ((sum(field_i) = 19) AND (min(field_i) = 100))"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having ((sum(field_i) = 19) AND (min(field_i) = 100))"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 0); @@ -886,12 +862,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { indexr("id", "7", "text", "XXXX XXXX", "str_s", "c", "field_i", "50"); indexr("id", "8", "text", "XXXX XXXX", "str_s", "c", "field_i", "60"); commit(); - Map params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("aggregationMode", "facet"); - params.put("stmt", "select distinct 'str_s', 'field_i' from collection1 order by 'str_s' asc, 'field_i' asc"); + SolrParams sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "facet", + "stmt", "select distinct 'str_s', 'field_i' from collection1 order by 'str_s' asc, 'field_i' asc"); - SolrStream solrStream = new SolrStream(jetty.url, params); + SolrStream solrStream = new SolrStream(jetty.url, sParams); List tuples = getTuples(solrStream); assert(tuples.size() == 6); @@ -924,12 +898,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { //reverse the sort - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("aggregationMode", "facet"); - params.put("stmt", "select distinct str_s, field_i from collection1 order by str_s desc, field_i desc"); + sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "facet", + "stmt", "select distinct str_s, field_i from collection1 order by str_s desc, field_i desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 6); @@ -962,12 +934,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { //reverse the sort - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("aggregationMode", "facet"); - params.put("stmt", "select distinct str_s as myString, field_i as myInt from collection1 order by str_s desc, myInt desc"); + sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "facet", + "stmt", "select distinct str_s as myString, field_i as myInt from collection1 order by str_s desc, myInt desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 6); @@ -999,12 +969,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getLong("myInt") == 1); //test with limit - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("aggregationMode", "facet"); - params.put("stmt", "select distinct str_s, field_i from collection1 order by str_s desc, field_i desc limit 2"); + sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "facet", + "stmt", "select distinct str_s, field_i from collection1 order by str_s desc, field_i desc limit 2"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 2); @@ -1020,12 +988,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { // Test without a sort. Sort should be asc by default. - new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("aggregationMode", "facet"); - params.put("stmt", "select distinct str_s, field_i from collection1"); + sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "facet", + "stmt", "select distinct str_s, field_i from collection1"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 6); @@ -1057,12 +1023,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { // Test with a predicate. - new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("aggregationMode", "facet"); - params.put("stmt", "select distinct str_s, field_i from collection1 where str_s = 'a'"); + sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "facet", + "stmt", "select distinct str_s, field_i from collection1 where str_s = 'a'"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 2); @@ -1100,11 +1064,11 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { indexr("id", "7", "text", "XXXX XXXX", "str_s", "c", "field_i", "50"); indexr("id", "8", "text", "XXXX XXXX", "str_s", "c", "field_i", "60"); commit(); - Map params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select distinct 'str_s', 'field_i' from collection1 order by 'str_s' asc, 'field_i' asc"); + + SolrParams sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select distinct 'str_s', 'field_i' from collection1 order by 'str_s' asc, 'field_i' asc"); - SolrStream solrStream = new SolrStream(jetty.url, params); + SolrStream solrStream = new SolrStream(jetty.url, sParams); List tuples = getTuples(solrStream); assert(tuples.size() == 6); @@ -1137,11 +1101,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { //reverse the sort - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select distinct str_s, field_i from collection1 order by str_s desc, field_i desc"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select distinct str_s, field_i from collection1 order by str_s desc, field_i desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 6); @@ -1173,11 +1136,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getLong("field_i") == 1); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select distinct str_s as myString, field_i from collection1 order by myString desc, field_i desc"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select distinct str_s as myString, field_i from collection1 order by myString desc, field_i desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 6); @@ -1210,11 +1172,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { //test with limit - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select distinct str_s, field_i from collection1 order by str_s desc, field_i desc limit 2"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select distinct str_s, field_i from collection1 order by str_s desc, field_i desc limit 2"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 2); @@ -1230,11 +1191,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { // Test without a sort. Sort should be asc by default. - new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select distinct str_s, field_i from collection1"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select distinct str_s, field_i from collection1"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 6); @@ -1265,11 +1225,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { // Test with a predicate. - new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select distinct str_s, field_i from collection1 where str_s = 'a'"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select distinct str_s, field_i from collection1 where str_s = 'a'"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 2); @@ -1306,12 +1265,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { indexr("id", "7", "text", "XXXX XXXX", "str_s", "c", "field_i", "50"); indexr("id", "8", "text", "XXXX XXXX", "str_s", "c", "field_i", "60"); commit(); - Map params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", "2"); - params.put("stmt", "select distinct str_s, field_i from collection1 order by str_s asc, field_i asc"); + SolrParams sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select distinct str_s, field_i from collection1 order by str_s asc, field_i asc"); - SolrStream solrStream = new SolrStream(jetty.url, params); + SolrStream solrStream = new SolrStream(jetty.url, sParams); List tuples = getTuples(solrStream); assert(tuples.size() == 6); @@ -1344,12 +1301,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { //reverse the sort - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", "2"); - params.put("stmt", "select distinct str_s, field_i from collection1 order by str_s desc, field_i desc"); + sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select distinct str_s, field_i from collection1 order by str_s desc, field_i desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 6); @@ -1382,12 +1337,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { //reverse the sort - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", "2"); - params.put("stmt", "select distinct str_s as myString, field_i from collection1 order by myString desc, field_i desc"); + sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select distinct str_s as myString, field_i from collection1 order by myString desc, field_i desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 6); @@ -1420,12 +1373,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { //test with limit - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", "2"); - params.put("stmt", "select distinct str_s, field_i from collection1 order by str_s desc, field_i desc limit 2"); + sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select distinct str_s, field_i from collection1 order by str_s desc, field_i desc limit 2"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 2); @@ -1441,12 +1392,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { // Test without a sort. Sort should be asc by default. - new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", "2"); - params.put("stmt", "select distinct str_s, field_i from collection1"); + sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select distinct str_s, field_i from collection1"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 6); @@ -1477,12 +1426,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { // Test with a predicate. - new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", "2"); - params.put("stmt", "select distinct str_s, field_i from collection1 where str_s = 'a'"); + sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select distinct str_s, field_i from collection1 where str_s = 'a'"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 2); @@ -1520,12 +1467,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { indexr("id", "7", "text", "XXXX XXXX", "str_s", "c", "field_i", "50"); indexr("id", "8", "text", "XXXX XXXX", "str_s", "c", "field_i", "60"); commit(); - Map params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("aggregationMode", "facet"); - params.put("stmt", "select 'str_s', 'count(*)', sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by 'str_s' order by 'sum(field_i)' asc limit 2"); + SolrParams sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "facet", + "stmt", "select 'str_s', 'count(*)', sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by 'str_s' order by 'sum(field_i)' asc limit 2"); - SolrStream solrStream = new SolrStream(jetty.url, params); + SolrStream solrStream = new SolrStream(jetty.url, sParams); List tuples = getTuples(solrStream); //Only two results because of the limit. @@ -1549,12 +1494,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("max(field_i)") == 20); assert(tuple.getDouble("avg(field_i)") == 13.5D); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("aggregationMode", "facet"); - params.put("stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where (text='XXXX' AND NOT text='XXXX XXX') group by str_s order by str_s desc"); + sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "facet", + "stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where (text='XXXX' AND NOT text='XXXX XXX') group by str_s order by str_s desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //The sort by and order by match and no limit is applied. All the Tuples should be returned in @@ -1586,12 +1529,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("max(field_i)") == 20); assert(tuple.getDouble("avg(field_i)") == 13.5D); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("aggregationMode", "facet"); - params.put("stmt", "select str_s as myString, count(*), sum(field_i) as sum, min(field_i), max(field_i), avg(field_i) from collection1 where (text='XXXX' AND NOT text='XXXX XXX') group by myString order by myString desc"); + sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "facet", + "stmt", "select str_s as myString, count(*), sum(field_i) as sum, min(field_i), max(field_i), avg(field_i) from collection1 where (text='XXXX' AND NOT text='XXXX XXX') group by myString order by myString desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //The sort by and order by match and no limit is applied. All the Tuples should be returned in @@ -1626,12 +1567,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("aggregationMode", "facet"); - params.put("stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having 'sum(field_i)' = 19"); + sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "facet", + "stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having 'sum(field_i)' = 19"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 1); @@ -1644,12 +1583,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("max(field_i)") == 11); assert(tuple.getDouble("avg(field_i)") == 9.5D); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("aggregationMode", "facet"); - params.put("stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having (('sum(field_i)' = 19) AND (min(field_i) = 8))"); + sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "facet", + "stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having (('sum(field_i)' = 19) AND (min(field_i) = 8))"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //Only two results because of the limit. @@ -1664,12 +1601,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("avg(field_i)") == 9.5D); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("aggregationMode", "facet"); - params.put("stmt", "select str_s myString, count(*), sum(field_i) as sum, min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by myString having ((sum = 19) AND (min(field_i) = 8))"); + sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "facet", + "stmt", "select str_s myString, count(*), sum(field_i) as sum, min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by myString having ((sum = 19) AND (min(field_i) = 8))"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //Only two results because of the limit. @@ -1683,12 +1618,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("max(field_i)") == 11); assert(tuple.getDouble("avg(field_i)") == 9.5D); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("aggregationMode", "facet"); - params.put("stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having ((sum(field_i) = 19) AND (min(field_i) = 100))"); + sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "facet", + "stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having ((sum(field_i) = 19) AND (min(field_i) = 100))"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 0); @@ -1721,12 +1654,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { indexr("id", "7", "text", "XXXX XXXX", "str_s", "c", "field_i", "50"); indexr("id", "8", "text", "XXXX XXXX", "str_s", "c", "field_i", "60"); commit(); - Map params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", "2"); - params.put("stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s order by sum(field_i) asc limit 2"); + SolrParams sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s order by sum(field_i) asc limit 2"); - SolrStream solrStream = new SolrStream(jetty.url, params); + SolrStream solrStream = new SolrStream(jetty.url, sParams); List tuples = getTuples(solrStream); //Only two results because of the limit. @@ -1750,14 +1681,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("max(field_i)") == 20); assert(tuple.getDouble("avg(field_i)") == 13.5D); + sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select str_s, count(*), sum(field_i) as sum, min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s order by sum asc limit 2"); - - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", "2"); - params.put("stmt", "select str_s, count(*), sum(field_i) as sum, min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s order by sum asc limit 2"); - - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //Only two results because of the limit. @@ -1780,12 +1707,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("avg(field_i)") == 13.5D); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", "2"); - params.put("stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s order by str_s desc"); + sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s order by str_s desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //The sort by and order by match and no limit is applied. All the Tuples should be returned in @@ -1818,12 +1743,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("avg(field_i)") == 13.5D); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", "2"); - params.put("stmt", "select str_s as myString, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by myString order by myString desc"); + sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select str_s as myString, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by myString order by myString desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //The sort by and order by match and no limit is applied. All the Tuples should be returned in @@ -1856,12 +1779,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("avg(field_i)") == 13.5D); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", "2"); - params.put("stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having sum(field_i) = 19"); + sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having sum(field_i) = 19"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //Only two results because of the limit. @@ -1883,12 +1804,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("max(field_i)") == 11); assert(tuple.getDouble("avg(field_i)") == 9.5D); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", "2"); - params.put("stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having ((sum(field_i) = 19) AND (min(field_i) = 8))"); + sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having ((sum(field_i) = 19) AND (min(field_i) = 8))"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //Only two results because of the limit. @@ -1902,12 +1821,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("max(field_i)") == 11); assert(tuple.getDouble("avg(field_i)") == 9.5D); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", "2"); - params.put("stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having ((sum(field_i) = 19) AND (min(field_i) = 100))"); + sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select str_s, count(*), sum(field_i), min(field_i), max(field_i), avg(field_i) from collection1 where text='XXXX' group by str_s having ((sum(field_i) = 19) AND (min(field_i) = 100))"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); assert(tuples.size() == 0); @@ -1939,11 +1856,9 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { commit(); - Map params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select count(*), sum(a_i), min(a_i), max(a_i), avg(a_i), sum(a_f), min(a_f), max(a_f), avg(a_f) from collection1"); + SolrParams sParams = mapParams(CommonParams.QT, "/sql", "stmt", "select count(*), sum(a_i), min(a_i), max(a_i), avg(a_i), sum(a_f), min(a_f), max(a_f), avg(a_f) from collection1"); - SolrStream solrStream = new SolrStream(jetty.url, params); + SolrStream solrStream = new SolrStream(jetty.url, sParams); List tuples = getTuples(solrStream); @@ -1976,12 +1891,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select count(*) as count, sum(a_i) as sum, min(a_i) as min, max(a_i) as max, avg(a_i) as avg, sum(a_f), min(a_f), max(a_f), avg(a_f) from collection1"); - - solrStream = new SolrStream(jetty.url, params); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select count(*) as count, sum(a_i) as sum, min(a_i) as min, max(a_i) as max, avg(a_i) as avg, sum(a_f), min(a_f), max(a_f), avg(a_f) from collection1"); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); @@ -2016,11 +1929,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { // Test where clause hits - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select count(*), sum(a_i), min(a_i), max(a_i), avg(a_i), sum(a_f), min(a_f), max(a_f), avg(a_f) from collection1 where id = 2"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select count(*), sum(a_i), min(a_i), max(a_i), avg(a_i), sum(a_f), min(a_f), max(a_f), avg(a_f) from collection1 where id = 2"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); @@ -2051,11 +1963,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { // Test zero hits - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select count(*), sum(a_i), min(a_i), max(a_i), avg(a_i), sum(a_f), min(a_f), max(a_f), avg(a_f) from collection1 where a_s = 'blah'"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select count(*), sum(a_i), min(a_i), max(a_i), avg(a_i), sum(a_f), min(a_f), max(a_f), avg(a_f) from collection1 where a_s = 'blah'"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); @@ -2109,11 +2020,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { indexr("id", "8", "year_i", "2014", "month_i", "4", "day_i", "2", "item_i", "1"); commit(); - Map params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select year_i, sum(item_i) from collection1 group by year_i order by year_i desc"); + SolrParams sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select year_i, sum(item_i) from collection1 group by year_i order by year_i desc"); - SolrStream solrStream = new SolrStream(jetty.url, params); + SolrStream solrStream = new SolrStream(jetty.url, sParams); List tuples = getTuples(solrStream); //Only two results because of the limit. @@ -2129,9 +2039,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getLong("year_i") == 2014); assert(tuple.getDouble("sum(item_i)") == 7); - params.put("stmt", "select year_i, month_i, sum(item_i) from collection1 group by year_i, month_i order by year_i desc, month_i desc"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select year_i, month_i, sum(item_i) from collection1 group by year_i, month_i order by year_i desc, month_i desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //Only two results because of the limit. @@ -2154,11 +2065,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getLong("month_i") == 4); assert(tuple.getDouble("sum(item_i)") == 7); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", "select year_i, month_i, day_i, sum(item_i) from collection1 group by year_i, month_i, day_i order by year_i desc, month_i desc, day_i desc"); + sParams = mapParams(CommonParams.QT, "/sql", + "stmt", "select year_i, month_i, day_i, sum(item_i) from collection1 group by year_i, month_i, day_i order by year_i desc, month_i desc, day_i desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //Only two results because of the limit. @@ -2227,12 +2137,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { indexr("id", "8", "year_i", "2014", "month_i", "4", "day_i", "2", "item_i", "1"); commit(); - Map params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("aggregationMode", "facet"); - params.put("stmt", "select year_i, sum(item_i) from collection1 group by year_i order by year_i desc"); + SolrParams sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "facet", + "stmt", "select year_i, sum(item_i) from collection1 group by year_i order by year_i desc"); - SolrStream solrStream = new SolrStream(jetty.url, params); + SolrStream solrStream = new SolrStream(jetty.url, sParams); List tuples = getTuples(solrStream); //Only two results because of the limit. @@ -2249,12 +2157,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("sum(item_i)") == 7); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("aggregationMode", "facet"); - params.put("stmt", "select year_i, month_i, sum(item_i) from collection1 group by year_i, month_i order by year_i desc, month_i desc"); + sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "facet", + "stmt", "select year_i, month_i, sum(item_i) from collection1 group by year_i, month_i order by year_i desc, month_i desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //Only two results because of the limit. @@ -2276,12 +2182,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("sum(item_i)") == 7); - params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("aggregationMode", "facet"); - params.put("stmt", "select year_i, month_i, day_i, sum(item_i) from collection1 group by year_i, month_i, day_i order by year_i desc, month_i desc, day_i desc"); + sParams = mapParams(CommonParams.QT, "/sql", "aggregationMode", "facet", + "stmt", "select year_i, month_i, day_i, sum(item_i) from collection1 group by year_i, month_i, day_i order by year_i desc, month_i desc, day_i desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //Only two results because of the limit. @@ -2346,12 +2250,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { indexr("id", "8", "year_i", "2014", "month_i", "4", "day_i", "2", "item_i", "1"); commit(); - Map params = new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", 2); - params.put("stmt", "select year_i, sum(item_i) from collection1 group by year_i order by year_i desc"); + SolrParams sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select year_i, sum(item_i) from collection1 group by year_i order by year_i desc"); - SolrStream solrStream = new SolrStream(jetty.url, params); + SolrStream solrStream = new SolrStream(jetty.url, sParams); List tuples = getTuples(solrStream); //Only two results because of the limit. @@ -2368,12 +2270,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getLong("year_i") == 2014); assert(tuple.getDouble("sum(item_i)") == 7); - new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", 2); - params.put("stmt", "select year_i, month_i, sum(item_i) from collection1 group by year_i, month_i order by year_i desc, month_i desc"); + sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select year_i, month_i, sum(item_i) from collection1 group by year_i, month_i order by year_i desc, month_i desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //Only two results because of the limit. @@ -2399,12 +2299,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { assert(tuple.getDouble("sum(item_i)") == 7); - new HashMap(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", 2); - params.put("stmt", "select year_i, month_i, day_i, sum(item_i) from collection1 group by year_i, month_i, day_i order by year_i desc, month_i desc, day_i desc"); + sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select year_i, month_i, day_i, sum(item_i) from collection1 group by year_i, month_i, day_i order by year_i desc, month_i desc, day_i desc"); - solrStream = new SolrStream(jetty.url, params); + solrStream = new SolrStream(jetty.url, sParams); tuples = getTuples(solrStream); //Only two results because of the limit. @@ -2456,12 +2354,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { private void testCatalogStream() throws Exception { CloudJettyRunner jetty = this.cloudJettys.get(0); - Map params = new HashMap<>(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", 2); - params.put("stmt", "select TABLE_CAT from _CATALOGS_"); + SolrParams sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select TABLE_CAT from _CATALOGS_"); - SolrStream solrStream = new SolrStream(jetty.url, params); + SolrStream solrStream = new SolrStream(jetty.url, sParams); List tuples = getTuples(solrStream); assertEquals(tuples.size(), 1); @@ -2471,12 +2367,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { private void testSchemasStream() throws Exception { CloudJettyRunner jetty = this.cloudJettys.get(0); - Map params = new HashMap<>(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", 2); - params.put("stmt", "select TABLE_SCHEM, TABLE_CATALOG from _SCHEMAS_"); + SolrParams sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select TABLE_SCHEM, TABLE_CATALOG from _SCHEMAS_"); - SolrStream solrStream = new SolrStream(jetty.url, params); + SolrStream solrStream = new SolrStream(jetty.url, sParams); List tuples = getTuples(solrStream); assertEquals(tuples.size(), 0); @@ -2485,12 +2379,10 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { private void testTablesStream() throws Exception { CloudJettyRunner jetty = this.cloudJettys.get(0); - Map params = new HashMap<>(); - params.put(CommonParams.QT, "/sql"); - params.put("numWorkers", 2); - params.put("stmt", "select TABLE_CAT, TABLE_SCHEM, TABLE_NAME, TABLE_TYPE, REMARKS from _TABLES_"); + SolrParams sParams = mapParams(CommonParams.QT, "/sql", "numWorkers", "2", + "stmt", "select TABLE_CAT, TABLE_SCHEM, TABLE_NAME, TABLE_TYPE, REMARKS from _TABLES_"); - SolrStream solrStream = new SolrStream(jetty.url, params); + SolrStream solrStream = new SolrStream(jetty.url, sParams); List tuples = getTuples(solrStream); assertEquals(2, tuples.size()); @@ -2542,4 +2434,15 @@ public class TestSQLHandler extends AbstractFullDistribZkTestBase { tupleStream.close(); return t; } + + public static SolrParams mapParams(String... vals) { + ModifiableSolrParams params = new ModifiableSolrParams(); + assertEquals("Parameters passed in here must be in pairs!", 0, (vals.length % 2)); + for (int idx = 0; idx < vals.length; idx += 2) { + params.add(vals[idx], vals[idx + 1]); + } + + return params; + } + } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/GatherNodesStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/GatherNodesStream.java index 90ac1cb5d4a..10bd6340378 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/GatherNodesStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/GatherNodesStream.java @@ -45,7 +45,10 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParamete import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType; +import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.ExecutorUtil; +import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SolrjNamedThreadFactory; public class GatherNodesStream extends TupleStream implements Expressible { @@ -404,7 +407,7 @@ public class GatherNodesStream extends TupleStream implements Expressible { public List call() { - Map joinParams = new HashMap(); + Set flSet = new HashSet(); flSet.add(gather); flSet.add(traverseTo); @@ -435,11 +438,11 @@ public class GatherNodesStream extends TupleStream implements Expressible { buf.append(","); } } - - joinParams.putAll(queryParams); - joinParams.put("fl", buf.toString()); - joinParams.put("qt", "/export"); - joinParams.put("sort", gather + " asc,"+traverseTo +" asc"); + + ModifiableSolrParams joinSParams = new ModifiableSolrParams(SolrParams.toMultiMap(new NamedList(queryParams))); + joinSParams.set("fl", buf.toString()); + joinSParams.set("qt", "/export"); + joinSParams.set("sort", gather + " asc,"+traverseTo +" asc"); StringBuffer nodeQuery = new StringBuffer(); @@ -454,14 +457,14 @@ public class GatherNodesStream extends TupleStream implements Expressible { if(maxDocFreq > -1) { String docFreqParam = " maxDocFreq="+maxDocFreq; - joinParams.put("q", "{!graphTerms f=" + traverseTo + docFreqParam + "}" + nodeQuery.toString()); + joinSParams.set("q", "{!graphTerms f=" + traverseTo + docFreqParam + "}" + nodeQuery.toString()); } else { - joinParams.put("q", "{!terms f=" + traverseTo+"}" + nodeQuery.toString()); + joinSParams.set("q", "{!terms f=" + traverseTo+"}" + nodeQuery.toString()); } TupleStream stream = null; try { - stream = new UniqueStream(new CloudSolrStream(zkHost, collection, joinParams), new MultipleFieldEqualitor(new FieldEqualitor(gather), new FieldEqualitor(traverseTo))); + stream = new UniqueStream(new CloudSolrStream(zkHost, collection, joinSParams), new MultipleFieldEqualitor(new FieldEqualitor(gather), new FieldEqualitor(traverseTo))); stream.setStreamContext(streamContext); stream.open(); BATCH: diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java index 7418e0f2523..6d7b32af9a0 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/graph/ShortestPathStream.java @@ -46,6 +46,9 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter; import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType; +import org.apache.solr.common.params.MapSolrParams; +import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.SolrjNamedThreadFactory; @@ -65,8 +68,9 @@ public class ShortestPathStream extends TupleStream implements Expressible { private boolean found; private StreamContext streamContext; private int threads; - private Map queryParams; + private SolrParams queryParams; + @Deprecated public ShortestPathStream(String zkHost, String collection, String fromNode, @@ -78,6 +82,29 @@ public class ShortestPathStream extends TupleStream implements Expressible { int threads, int maxDepth) { + init(zkHost, + collection, + fromNode, + toNode, + fromField, + toField, + new MapSolrParams(queryParams), + joinBatchSize, + threads, + maxDepth); + } + + public ShortestPathStream(String zkHost, + String collection, + String fromNode, + String toNode, + String fromField, + String toField, + SolrParams queryParams, + int joinBatchSize, + int threads, + int maxDepth) { + init(zkHost, collection, fromNode, @@ -162,7 +189,7 @@ public class ShortestPathStream extends TupleStream implements Expressible { maxDepth = Integer.parseInt(((StreamExpressionValue) depthExpression.getParameter()).getValue()); } - Map params = new HashMap(); + ModifiableSolrParams params = new ModifiableSolrParams(); for(StreamExpressionNamedParameter namedParam : namedParams){ if(!namedParam.getName().equals("zkHost") && !namedParam.getName().equals("to") && @@ -172,7 +199,7 @@ public class ShortestPathStream extends TupleStream implements Expressible { !namedParam.getName().equals("threads") && !namedParam.getName().equals("partitionSize")) { - params.put(namedParam.getName(), namedParam.getParameter().toString().trim()); + params.set(namedParam.getName(), namedParam.getParameter().toString().trim()); } } @@ -201,7 +228,7 @@ public class ShortestPathStream extends TupleStream implements Expressible { String toNode, String fromField, String toField, - Map queryParams, + SolrParams queryParams, int joinBatchSize, int threads, int maxDepth) { @@ -225,10 +252,10 @@ public class ShortestPathStream extends TupleStream implements Expressible { // collection expression.addParameter(collection); - Set> entries = queryParams.entrySet(); // parameters - for(Map.Entry param : entries){ - String value = param.getValue().toString(); + ModifiableSolrParams mParams = new ModifiableSolrParams(queryParams); + for(Map.Entry param : mParams.getMap().entrySet()){ + String value = String.join(",", param.getValue()); // SOLR-8409: This is a special case where the params contain a " character // Do note that in any other BASE streams with parameters where a " might come into play @@ -262,8 +289,9 @@ public class ShortestPathStream extends TupleStream implements Expressible { StreamExplanation child = new StreamExplanation(getStreamNodeId() + "-datastore"); child.setFunctionName("solr (graph)"); child.setImplementingClass("Solr/Lucene"); - child.setExpressionType(ExpressionType.DATASTORE); - child.setExpression(queryParams.entrySet().stream().map(e -> String.format(Locale.ROOT, "%s=%s", e.getKey(), e.getValue())).collect(Collectors.joining(","))); + child.setExpressionType(ExpressionType.DATASTORE); + ModifiableSolrParams mParams = new ModifiableSolrParams(queryParams); + child.setExpression(mParams.getMap().entrySet().stream().map(e -> String.format(Locale.ROOT, "%s=%s", e.getKey(), e.getValue())).collect(Collectors.joining(","))); explanation.addChild(child); return explanation; @@ -417,13 +445,12 @@ public class ShortestPathStream extends TupleStream implements Expressible { public List call() { - Map joinParams = new HashMap(); + ModifiableSolrParams joinParams = new ModifiableSolrParams(queryParams); String fl = fromField + "," + toField; - joinParams.putAll(queryParams); - joinParams.put("fl", fl); - joinParams.put("qt", "/export"); - joinParams.put("sort", toField + " asc,"+fromField +" asc"); + joinParams.set("fl", fl); + joinParams.set("qt", "/export"); + joinParams.set("sort", toField + " asc,"+fromField +" asc"); StringBuffer nodeQuery = new StringBuffer(); @@ -433,7 +460,7 @@ public class ShortestPathStream extends TupleStream implements Expressible { String q = fromField + ":(" + nodeQuery.toString().trim() + ")"; - joinParams.put("q", q); + joinParams.set("q", q); TupleStream stream = null; try { stream = new UniqueStream(new CloudSolrStream(zkHost, collection, joinParams), new MultipleFieldEqualitor(new FieldEqualitor(toField), new FieldEqualitor(fromField))); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/StatementImpl.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/StatementImpl.java index 1b1200d2842..c05028deb58 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/StatementImpl.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/sql/StatementImpl.java @@ -26,8 +26,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.Map; -import java.util.HashMap; import java.util.Random; import org.apache.solr.client.solrj.io.stream.SolrStream; @@ -37,6 +35,7 @@ import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkCoreNodeProps; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.params.CommonParams; +import org.apache.solr.common.params.ModifiableSolrParams; class StatementImpl implements Statement { @@ -96,11 +95,11 @@ class StatementImpl implements Statement { Collections.shuffle(shuffler, new Random()); - Map params = new HashMap<>(); - params.put(CommonParams.QT, "/sql"); - params.put("stmt", sql); + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set(CommonParams.QT, "/sql"); + params.set("stmt", sql); for(String propertyName : this.connection.getProperties().stringPropertyNames()) { - params.put(propertyName, this.connection.getProperties().getProperty(propertyName)); + params.set(propertyName, this.connection.getProperties().getProperty(propertyName)); } Replica rep = shuffler.get(0); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java index b1659c91b1f..dd02175e963 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java @@ -36,7 +36,6 @@ import java.util.stream.Collectors; import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.impl.CloudSolrClient.Builder; -import org.apache.solr.client.solrj.io.SolrClientCache; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.comp.ComparatorOrder; import org.apache.solr.client.solrj.io.comp.FieldComparator; @@ -56,6 +55,9 @@ import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkCoreNodeProps; import org.apache.solr.common.cloud.ZkStateReader; +import org.apache.solr.common.params.MapSolrParams; +import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.SolrjNamedThreadFactory; @@ -72,16 +74,11 @@ public class CloudSolrStream extends TupleStream implements Expressible { protected String zkHost; protected String collection; - protected Map params; + protected SolrParams params; private Map fieldMappings; protected StreamComparator comp; - private int zkConnectTimeout = 10000; - private int zkClientTimeout = 10000; - private int numWorkers; - private int workerID; private boolean trace; protected transient Map eofTuples; - protected transient SolrClientCache cache; protected transient CloudSolrClient cloudSolrClient; protected transient List solrStreams; protected transient TreeSet tuples; @@ -91,7 +88,34 @@ public class CloudSolrStream extends TupleStream implements Expressible { protected CloudSolrStream(){ } + + + /** + * @param zkHost Zookeeper ensemble connection string + * @param collectionName Name of the collection to operate on + * @param params Map<String, String> of parameter/value pairs + * @throws IOException Something went wrong + *

+ * This form does not allow specifying multiple clauses, say "fq" clauses, use the form that + * takes a SolrParams. Transition code can call the preferred method that takes SolrParams + * by calling CloudSolrStream(zkHost, collectionName, + * new ModifiableSolrParams(SolrParams.toMultiMap(new NamedList(Map<String, String>))); + * @deprecated Use the constructor that has a SolrParams obj rather than a Map + */ + + @Deprecated public CloudSolrStream(String zkHost, String collectionName, Map params) throws IOException { + init(collectionName, zkHost, new MapSolrParams(params)); + } + + /** + * @param zkHost Zookeeper ensemble connection string + * @param collectionName Name of the collection to operate on + * @param params Map<String, String[]> of parameter/value pairs + * @throws IOException Something went wrong + */ + + public CloudSolrStream(String zkHost, String collectionName, SolrParams params) throws IOException { init(collectionName, zkHost, params); } @@ -117,16 +141,16 @@ public class CloudSolrStream extends TupleStream implements Expressible { throw new IOException(String.format(Locale.ROOT,"invalid expression %s - at least one named parameter expected. eg. 'q=*:*'",expression)); } - Map params = new HashMap(); + ModifiableSolrParams mParams = new ModifiableSolrParams(); for(StreamExpressionNamedParameter namedParam : namedParams){ if(!namedParam.getName().equals("zkHost") && !namedParam.getName().equals("aliases")){ - params.put(namedParam.getName(), namedParam.getParameter().toString().trim()); + mParams.add(namedParam.getName(), namedParam.getParameter().toString().trim()); } } // Aliases, optional, if provided then need to split if(null != aliasExpression && aliasExpression.getParameter() instanceof StreamExpressionValue){ - fieldMappings = new HashMap(); + fieldMappings = new HashMap<>(); for(String mapping : ((StreamExpressionValue)aliasExpression.getParameter()).getValue().split(",")){ String[] parts = mapping.trim().split("="); if(2 == parts.length){ @@ -154,7 +178,7 @@ public class CloudSolrStream extends TupleStream implements Expressible { } // We've got all the required items - init(collectionName, zkHost, params); + init(collectionName, zkHost, mParams); } @Override @@ -168,14 +192,16 @@ public class CloudSolrStream extends TupleStream implements Expressible { expression.addParameter(collection); // parameters - for(Entry param : params.entrySet()){ - String value = param.getValue(); + + ModifiableSolrParams mParams = new ModifiableSolrParams(SolrParams.toMultiMap(params.toNamedList())); + for (Entry param : mParams.getMap().entrySet()) { + String value = String.join(",", param.getValue()); // SOLR-8409: This is a special case where the params contain a " character // Do note that in any other BASE streams with parameters where a " might come into play // that this same replacement needs to take place. value = value.replace("\"", "\\\""); - + expression.addParameter(new StreamExpressionNamedParameter(param.getKey(), value)); } @@ -213,29 +239,34 @@ public class CloudSolrStream extends TupleStream implements Expressible { child.setFunctionName(String.format(Locale.ROOT, "solr (%s)", collection)); child.setImplementingClass("Solr/Lucene"); child.setExpressionType(ExpressionType.DATASTORE); + if(null != params){ - child.setExpression(params.entrySet().stream().map(e -> String.format(Locale.ROOT, "%s=%s", e.getKey(), e.getValue())).collect(Collectors.joining(","))); + ModifiableSolrParams mParams = new ModifiableSolrParams(params); + child.setExpression(mParams.getMap().entrySet().stream().map(e -> String.format(Locale.ROOT, "%s=%s", e.getKey(), e.getValue())).collect(Collectors.joining(","))); } explanation.addChild(child); return explanation; } - - private void init(String collectionName, String zkHost, Map params) throws IOException { + + private void init(String collectionName, String zkHost, SolrParams params) throws IOException { this.zkHost = zkHost; this.collection = collectionName; - this.params = params; + this.params = new ModifiableSolrParams(params); // If the comparator is null then it was not explicitly set so we will create one using the sort parameter // of the query. While doing this we will also take into account any aliases such that if we are sorting on // fieldA but fieldA is aliased to alias.fieldA then the comparater will be against alias.fieldA. - if(!params.containsKey("fl")){ + String fls = String.join(",", params.getParams("fl")); + if (fls == null) { throw new IOException("fl param expected for a stream"); } - if(!params.containsKey("sort")){ + + String sorts = String.join(",", params.getParams("sort")); + if (sorts == null) { throw new IOException("sort param expected for a stream"); } - this.comp = parseComp((String)params.get("sort"), (String)params.get("fl")); + this.comp = parseComp(sorts, fls); } public void setFieldMappings(Map fieldMappings) { @@ -247,9 +278,6 @@ public class CloudSolrStream extends TupleStream implements Expressible { } public void setStreamContext(StreamContext context) { - this.numWorkers = context.numWorkers; - this.workerID = context.workerID; - this.cache = context.getSolrClientCache(); this.streamContext = context; } @@ -261,8 +289,8 @@ public class CloudSolrStream extends TupleStream implements Expressible { this.tuples = new TreeSet(); this.solrStreams = new ArrayList(); this.eofTuples = Collections.synchronizedMap(new HashMap()); - if(this.cache != null) { - this.cloudSolrClient = this.cache.getCloudSolrClient(zkHost); + if (this.streamContext != null && this.streamContext.getSolrClientCache() != null) { + this.cloudSolrClient = this.streamContext.getSolrClientCache().getCloudSolrClient(zkHost); } else { this.cloudSolrClient = new Builder() .withZkHost(zkHost) @@ -345,7 +373,8 @@ public class CloudSolrStream extends TupleStream implements Expressible { } } - params.put("distrib","false"); // We are the aggregator. + ModifiableSolrParams mParams = new ModifiableSolrParams(params); + mParams.set("distrib", "false"); // We are the aggregator. for(Slice slice : slices) { Collection replicas = slice.getReplicas(); @@ -359,7 +388,7 @@ public class CloudSolrStream extends TupleStream implements Expressible { Replica rep = shuffler.get(0); ZkCoreNodeProps zkProps = new ZkCoreNodeProps(rep); String url = zkProps.getCoreUrl(); - SolrStream solrStream = new SolrStream(url, params); + SolrStream solrStream = new SolrStream(url, mParams); if(streamContext != null) { solrStream.setStreamContext(streamContext); } @@ -406,7 +435,9 @@ public class CloudSolrStream extends TupleStream implements Expressible { } } - if(cache == null && cloudSolrClient != null) { + if ((this.streamContext == null || this.streamContext.getSolrClientCache() == null) && + cloudSolrClient != null) { + cloudSolrClient.close(); } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java index ceaf13ccf61..ae04a85970c 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java @@ -46,7 +46,9 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; import org.apache.solr.client.solrj.io.stream.metrics.Bucket; import org.apache.solr.client.solrj.io.stream.metrics.Metric; import org.apache.solr.client.solrj.request.QueryRequest; +import org.apache.solr.common.params.MapSolrParams; import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; /** @@ -65,11 +67,16 @@ public class FacetStream extends TupleStream implements Expressible { private List tuples = new ArrayList(); private int index; private String zkHost; - private Map props; + private SolrParams params; private String collection; protected transient SolrClientCache cache; protected transient CloudSolrClient cloudSolrClient; + /* + * + * @deprecated. Use the form that takes a SolrParams rather than Map<String, String> + */ + @Deprecated public FacetStream(String zkHost, String collection, Map props, @@ -77,7 +84,17 @@ public class FacetStream extends TupleStream implements Expressible { Metric[] metrics, FieldComparator[] bucketSorts, int bucketSizeLimit) throws IOException { - init(collection, props, buckets, bucketSorts, metrics, bucketSizeLimit, zkHost); + init(collection, new MapSolrParams(props), buckets, bucketSorts, metrics, bucketSizeLimit, zkHost); + } + + public FacetStream(String zkHost, + String collection, + SolrParams params, + Bucket[] buckets, + Metric[] metrics, + FieldComparator[] bucketSorts, + int bucketSizeLimit) throws IOException { + init(collection, params, buckets, bucketSorts, metrics, bucketSizeLimit, zkHost); } public FacetStream(StreamExpression expression, StreamFactory factory) throws IOException{ @@ -106,10 +123,10 @@ public class FacetStream extends TupleStream implements Expressible { } // pull out known named params - Map params = new HashMap(); + ModifiableSolrParams params = new ModifiableSolrParams(); for(StreamExpressionNamedParameter namedParam : namedParams){ if(!namedParam.getName().equals("zkHost") && !namedParam.getName().equals("buckets") && !namedParam.getName().equals("bucketSorts") && !namedParam.getName().equals("limit")){ - params.put(namedParam.getName(), namedParam.getParameter().toString().trim()); + params.add(namedParam.getName(), namedParam.getParameter().toString().trim()); } } @@ -204,10 +221,10 @@ public class FacetStream extends TupleStream implements Expressible { return comps; } - - private void init(String collection, Map props, Bucket[] buckets, FieldComparator[] bucketSorts, Metric[] metrics, int bucketSizeLimit, String zkHost) throws IOException { + + private void init(String collection, SolrParams params, Bucket[] buckets, FieldComparator[] bucketSorts, Metric[] metrics, int bucketSizeLimit, String zkHost) throws IOException { this.zkHost = zkHost; - this.props = props; + this.params = params; this.buckets = buckets; this.metrics = metrics; this.bucketSizeLimit = bucketSizeLimit; @@ -233,8 +250,11 @@ public class FacetStream extends TupleStream implements Expressible { expression.addParameter(collection); // parameters - for(Entry param : props.entrySet()){ - expression.addParameter(new StreamExpressionNamedParameter(param.getKey(), param.getValue())); + ModifiableSolrParams tmpParams = new ModifiableSolrParams(params); + + for (Entry param : tmpParams.getMap().entrySet()) { + expression.addParameter(new StreamExpressionNamedParameter(param.getKey(), + String.join(",", param.getValue()))); } // buckets @@ -288,8 +308,10 @@ public class FacetStream extends TupleStream implements Expressible { // parallel stream. child.setImplementingClass("Solr/Lucene"); - child.setExpressionType(ExpressionType.DATASTORE); - child.setExpression(props.entrySet().stream().map(e -> String.format(Locale.ROOT, "%s=%s", e.getKey(), e.getValue())).collect(Collectors.joining(","))); + child.setExpressionType(ExpressionType.DATASTORE); + ModifiableSolrParams tmpParams = new ModifiableSolrParams(SolrParams.toMultiMap(params.toNamedList())); + + child.setExpression(tmpParams.getMap().entrySet().stream().map(e -> String.format(Locale.ROOT, "%s=%s", e.getKey(), e.getValue())).collect(Collectors.joining(","))); explanation.addChild(child); @@ -301,8 +323,7 @@ public class FacetStream extends TupleStream implements Expressible { } public List children() { - List l = new ArrayList(); - return l; + return new ArrayList(); } public void open() throws IOException { @@ -317,11 +338,11 @@ public class FacetStream extends TupleStream implements Expressible { FieldComparator[] adjustedSorts = adjustSorts(buckets, bucketSorts); String json = getJsonFacetString(buckets, metrics, adjustedSorts, bucketSizeLimit); - ModifiableSolrParams params = getParams(this.props); - params.add("json.facet", json); - params.add("rows", "0"); + ModifiableSolrParams paramsLoc = new ModifiableSolrParams(params); + paramsLoc.set("json.facet", json); + paramsLoc.set("rows", "0"); - QueryRequest request = new QueryRequest(params); + QueryRequest request = new QueryRequest(paramsLoc); try { NamedList response = cloudSolrClient.request(request, collection); getTuples(response, buckets, metrics); @@ -350,15 +371,6 @@ public class FacetStream extends TupleStream implements Expressible { } } - private ModifiableSolrParams getParams(Map props) { - ModifiableSolrParams params = new ModifiableSolrParams(); - for(String key : props.keySet()) { - String value = props.get(key); - params.add(key, value); - } - return params; - } - private String getJsonFacetString(Bucket[] _buckets, Metric[] _metrics, FieldComparator[] _sorts, int _limit) { StringBuilder buf = new StringBuilder(); appendJson(buf, _buckets, _metrics, _sorts, _limit, 0); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java index 957064300d2..779cc31d3aa 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ParallelStream.java @@ -16,21 +16,15 @@ */ package org.apache.solr.client.solrj.io.stream; -import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.io.ObjectOutputStream; -import java.net.URLEncoder; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Set; -import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.Map.Entry; -import java.util.stream.Collectors; import java.util.Random; import org.apache.solr.client.solrj.io.Tuple; @@ -49,7 +43,7 @@ import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkCoreNodeProps; import org.apache.solr.common.cloud.ZkStateReader; -import org.apache.solr.common.util.Base64; +import org.apache.solr.common.params.ModifiableSolrParams; /** * The ParallelStream decorates a TupleStream implementation and pushes it to N workers for parallel execution. @@ -287,16 +281,17 @@ public class ParallelStream extends CloudSolrStream implements Expressible { Collections.shuffle(shuffler, new Random()); for(int w=0; w it = params.entrySet().iterator(); - while(it.hasNext()) { - Map.Entry entry = it.next(); - solrParams.add((String)entry.getKey(), entry.getValue().toString()); - } - return solrParams; } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java index c128a027473..c0f4b438ee0 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java @@ -42,7 +42,9 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; import org.apache.solr.client.solrj.io.stream.metrics.Metric; import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.common.SolrDocumentList; +import org.apache.solr.common.params.MapSolrParams; import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; public class StatsStream extends TupleStream implements Expressible { @@ -52,7 +54,7 @@ public class StatsStream extends TupleStream implements Expressible { private Metric[] metrics; private String zkHost; private Tuple tuple; - private Map props; + private SolrParams params; private String collection; private boolean done; private long count; @@ -60,20 +62,29 @@ public class StatsStream extends TupleStream implements Expressible { protected transient SolrClientCache cache; protected transient CloudSolrClient cloudSolrClient; + // Use StatsStream(String, String, SolrParams, Metric[] + @Deprecated public StatsStream(String zkHost, String collection, Map props, Metric[] metrics) { - init(zkHost, collection, props, metrics); + init(zkHost, collection, new MapSolrParams(props), metrics); } - - private void init(String zkHost, String collection, Map props, Metric[] metrics) { + + public StatsStream(String zkHost, + String collection, + SolrParams params, + Metric[] metrics) { + init(zkHost, collection, params, metrics); + } + + private void init(String zkHost, String collection, SolrParams params, Metric[] metrics) { this.zkHost = zkHost; - this.props = props; + this.params = params; this.metrics = metrics; this.collection = collection; } - + public StatsStream(StreamExpression expression, StreamFactory factory) throws IOException{ // grab all parameters out String collectionName = factory.getValueOperand(expression, 0); @@ -95,11 +106,11 @@ public class StatsStream extends TupleStream implements Expressible { if(0 == namedParams.size()){ throw new IOException(String.format(Locale.ROOT,"invalid expression %s - at least one named parameter expected. eg. 'q=*:*'",expression)); } - - Map params = new HashMap(); + + ModifiableSolrParams params = new ModifiableSolrParams(); for(StreamExpressionNamedParameter namedParam : namedParams){ if(!namedParam.getName().equals("zkHost")){ - params.put(namedParam.getName(), namedParam.getParameter().toString().trim()); + params.set(namedParam.getName(), namedParam.getParameter().toString().trim()); } } @@ -139,8 +150,9 @@ public class StatsStream extends TupleStream implements Expressible { expression.addParameter(collection); // parameters - for(Entry param : props.entrySet()){ - expression.addParameter(new StreamExpressionNamedParameter(param.getKey(), param.getValue())); + ModifiableSolrParams mParams = new ModifiableSolrParams(params); + for (Entry param : mParams.getMap().entrySet()) { + expression.addParameter(new StreamExpressionNamedParameter(param.getKey(), String.join(",", param.getValue()))); } // zkHost @@ -170,8 +182,9 @@ public class StatsStream extends TupleStream implements Expressible { // parallel stream. child.setImplementingClass("Solr/Lucene"); - child.setExpressionType(ExpressionType.DATASTORE); - child.setExpression(props.entrySet().stream().map(e -> String.format(Locale.ROOT, "%s=%s", e.getKey(), e.getValue())).collect(Collectors.joining(","))); + child.setExpressionType(ExpressionType.DATASTORE); + ModifiableSolrParams mParams = new ModifiableSolrParams(params); + child.setExpression(mParams.getMap().entrySet().stream().map(e -> String.format(Locale.ROOT, "%s=%s", e.getKey(), e.getValue())).collect(Collectors.joining(","))); explanation.addChild(child); return explanation; @@ -195,12 +208,12 @@ public class StatsStream extends TupleStream implements Expressible { .build(); } - ModifiableSolrParams params = getParams(this.props); - addStats(params, metrics); - params.add("stats", "true"); - params.add("rows", "0"); + ModifiableSolrParams paramsLoc = new ModifiableSolrParams(this.params); + addStats(paramsLoc, metrics); + paramsLoc.set("stats", "true"); + paramsLoc.set("rows", "0"); - QueryRequest request = new QueryRequest(params); + QueryRequest request = new QueryRequest(paramsLoc); try { NamedList response = cloudSolrClient.request(request, collection); this.tuple = getTuple(response); @@ -275,15 +288,6 @@ public class StatsStream extends TupleStream implements Expressible { } } - private ModifiableSolrParams getParams(Map props) { - ModifiableSolrParams params = new ModifiableSolrParams(); - for(String key : props.keySet()) { - String value = props.get(key); - params.add(key, value); - } - return params; - } - private Tuple getTuple(NamedList response) { Map map = new HashMap(); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java index ff441093187..8d3279ab55c 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java @@ -56,6 +56,9 @@ import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkCoreNodeProps; import org.apache.solr.common.cloud.ZkStateReader; +import org.apache.solr.common.params.MapSolrParams; +import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.SolrjNamedThreadFactory; import org.slf4j.Logger; @@ -75,6 +78,8 @@ public class TopicStream extends CloudSolrStream implements Expressible { private Map checkpoints = new HashMap(); private String checkpointCollection; + // Use TopicStream that takes a SolrParams + @Deprecated public TopicStream(String zkHost, String checkpointCollection, String collection, @@ -86,25 +91,42 @@ public class TopicStream extends CloudSolrStream implements Expressible { collection, id, checkpointEvery, - params); + new MapSolrParams(params)); } + public TopicStream(String zkHost, + String checkpointCollection, + String collection, + String id, + long checkpointEvery, + SolrParams params) { + init(zkHost, + checkpointCollection, + collection, + id, + checkpointEvery, + params); + } + + private void init(String zkHost, String checkpointCollection, String collection, String id, long checkpointEvery, - Map params) { + SolrParams params) { this.zkHost = zkHost; - this.params = params; + ModifiableSolrParams mParams = new ModifiableSolrParams(params); + + if(mParams.getParams("rows") == null) { + mParams.set("rows", "500"); + } + this.params = mParams; this.collection = collection; this.checkpointCollection = checkpointCollection; this.checkpointEvery = checkpointEvery; this.id = id; this.comp = new FieldComparator("_version_", ComparatorOrder.ASCENDING); - if(!params.containsKey("rows")) { - params.put("rows", "500"); - } } public TopicStream(StreamExpression expression, StreamFactory factory) throws IOException{ @@ -147,12 +169,12 @@ public class TopicStream extends CloudSolrStream implements Expressible { throw new IOException(String.format(Locale.ROOT,"invalid expression %s - at least one named parameter expected. eg. 'q=*:*'",expression)); } - Map params = new HashMap(); + ModifiableSolrParams params = new ModifiableSolrParams(); for(StreamExpressionNamedParameter namedParam : namedParams){ if(!namedParam.getName().equals("zkHost") && !namedParam.getName().equals("id") && !namedParam.getName().equals("checkpointEvery")) { - params.put(namedParam.getName(), namedParam.getParameter().toString().trim()); + params.set(namedParam.getName(), namedParam.getParameter().toString().trim()); } } @@ -189,8 +211,9 @@ public class TopicStream extends CloudSolrStream implements Expressible { // collection expression.addParameter(collection); - for(Entry param : params.entrySet()) { - String value = param.getValue(); + ModifiableSolrParams mParams = new ModifiableSolrParams(params); + for(Entry param : mParams.getMap().entrySet()) { + String value = String.join(",", param.getValue()); // SOLR-8409: This is a special case where the params contain a " character // Do note that in any other BASE streams with parameters where a " might come into play @@ -226,8 +249,10 @@ public class TopicStream extends CloudSolrStream implements Expressible { // parallel stream. child.setImplementingClass("Solr/Lucene"); - child.setExpressionType(ExpressionType.DATASTORE); - child.setExpression(params.entrySet().stream().map(e -> String.format(Locale.ROOT, "%s=%s", e.getKey(), e.getValue())).collect(Collectors.joining(","))); + child.setExpressionType(ExpressionType.DATASTORE); + + ModifiableSolrParams mParams = new ModifiableSolrParams(params); + child.setExpression(mParams.getMap().entrySet().stream().map(e -> String.format(Locale.ROOT, "%s=%s", e.getKey(), e.getValue())).collect(Collectors.joining(","))); explanation.addChild(child); } @@ -254,8 +279,8 @@ public class TopicStream extends CloudSolrStream implements Expressible { this.solrStreams = new ArrayList(); this.eofTuples = Collections.synchronizedMap(new HashMap()); - if(cache != null) { - cloudSolrClient = cache.getCloudSolrClient(zkHost); + if(streamContext.getSolrClientCache() != null) { + cloudSolrClient = streamContext.getSolrClientCache().getCloudSolrClient(zkHost); } else { cloudSolrClient = new Builder() .withZkHost(zkHost) @@ -313,7 +338,7 @@ public class TopicStream extends CloudSolrStream implements Expressible { } } - if (cache == null) { + if (streamContext.getSolrClientCache() == null) { cloudSolrClient.close(); } } @@ -369,11 +394,11 @@ public class TopicStream extends CloudSolrStream implements Expressible { private long getCheckpoint(Slice slice, Set liveNodes) throws IOException { Collection replicas = slice.getReplicas(); long checkpoint = -1; - Map params = new HashMap(); - params.put("q","*:*"); - params.put("sort", "_version_ desc"); - params.put("distrib", "false"); - params.put("rows", 1); + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set("q","*:*"); + params.set("sort", "_version_ desc"); + params.set("distrib", "false"); + params.set("rows", 1); for(Replica replica : replicas) { if(replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName())) { String coreUrl = replica.getCoreUrl(); @@ -432,7 +457,7 @@ public class TopicStream extends CloudSolrStream implements Expressible { if(replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName())){ - HttpSolrClient httpClient = cache.getHttpSolrClient(replica.getCoreUrl()); + HttpSolrClient httpClient = streamContext.getSolrClientCache().getHttpSolrClient(replica.getCoreUrl()); try { SolrDocument doc = httpClient.getById(id); @@ -477,20 +502,19 @@ public class TopicStream extends CloudSolrStream implements Expressible { throw new Exception("Collection not found:" + this.collection); } } - - params.put("distrib", "false"); // We are the aggregator. - String fl = params.get("fl"); - params.put("sort", "_version_ asc"); + ModifiableSolrParams mParams = new ModifiableSolrParams(params); + mParams.set("distrib", "false"); // We are the aggregator. + String fl = mParams.get("fl"); + mParams.set("sort", "_version_ asc"); if(!fl.contains("_version_")) { fl += ",_version_"; } - params.put("fl", fl); + mParams.set("fl", fl); Random random = new Random(); for(Slice slice : slices) { - Map localParams = new HashMap(); - localParams.putAll(params); + ModifiableSolrParams localParams = new ModifiableSolrParams(mParams); long checkpoint = checkpoints.get(slice.getName()); Collection replicas = slice.getReplicas(); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java index c429fe806ab..79579d16732 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphExpressionTest.java @@ -397,7 +397,6 @@ public class GraphExpressionTest extends SolrCloudTestCase { .commit(cluster.getSolrClient(), COLLECTION); List tuples = null; - Set paths = null; GatherNodesStream stream = null; StreamContext context = new StreamContext(); SolrClientCache cache = new SolrClientCache(); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java index 27c9dca9477..b9b6ed5adac 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/graph/GraphTest.java @@ -29,11 +29,13 @@ import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.client.solrj.io.SolrClientCache; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.stream.StreamContext; +import org.apache.solr.client.solrj.io.stream.StreamingTest; import org.apache.solr.client.solrj.io.stream.TupleStream; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.cloud.AbstractDistribZkTestBase; import org.apache.solr.cloud.SolrCloudTestCase; +import org.apache.solr.common.params.SolrParams; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -100,8 +102,7 @@ public class GraphTest extends SolrCloudTestCase { SolrClientCache cache = new SolrClientCache(); context.setSolrClientCache(cache); - Map params = new HashMap(); - params.put("fq", "predicate_s:knows"); + SolrParams sParams = StreamingTest.mapParams("fq", "predicate_s:knows"); stream = new ShortestPathStream(zkHost, "collection1", @@ -109,7 +110,7 @@ public class GraphTest extends SolrCloudTestCase { "steve", "from_s", "to_s", - params, + sParams, 20, 3, 6); @@ -131,7 +132,7 @@ public class GraphTest extends SolrCloudTestCase { //Test with batch size of 1 - params.put("fq", "predicate_s:knows"); + sParams = StreamingTest.mapParams("fq", "predicate_s:knows"); stream = new ShortestPathStream(zkHost, "collection1", @@ -139,7 +140,7 @@ public class GraphTest extends SolrCloudTestCase { "steve", "from_s", "to_s", - params, + sParams, 1, 3, 6); @@ -159,7 +160,7 @@ public class GraphTest extends SolrCloudTestCase { //Test with bad predicate - params.put("fq", "predicate_s:crap"); + sParams = StreamingTest.mapParams("fq", "predicate_s:crap"); stream = new ShortestPathStream(zkHost, "collection1", @@ -167,7 +168,7 @@ public class GraphTest extends SolrCloudTestCase { "steve", "from_s", "to_s", - params, + sParams, 1, 3, 6); @@ -180,7 +181,7 @@ public class GraphTest extends SolrCloudTestCase { //Test with depth 2 - params.put("fq", "predicate_s:knows"); + sParams = StreamingTest.mapParams("fq", "predicate_s:knows"); stream = new ShortestPathStream(zkHost, "collection1", @@ -188,7 +189,7 @@ public class GraphTest extends SolrCloudTestCase { "steve", "from_s", "to_s", - params, + sParams, 1, 3, 2); @@ -202,7 +203,7 @@ public class GraphTest extends SolrCloudTestCase { //Take out alex - params.put("fq", "predicate_s:knows NOT to_s:alex"); + sParams = StreamingTest.mapParams("fq", "predicate_s:knows NOT to_s:alex"); stream = new ShortestPathStream(zkHost, "collection1", @@ -210,7 +211,7 @@ public class GraphTest extends SolrCloudTestCase { "steve", "from_s", "to_s", - params, + sParams, 10, 3, 6); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java index 1f1a5bfc118..c853e39de98 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java @@ -19,7 +19,6 @@ package org.apache.solr.client.solrj.io.stream; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; @@ -47,6 +46,7 @@ import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.cloud.AbstractDistribZkTestBase; import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.params.CommonParams; +import org.apache.solr.common.params.ModifiableSolrParams; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -175,6 +175,26 @@ public class StreamExpressionTest extends SolrCloudTestCase { assert(tuples.size() == 3); assertOrder(tuples, 0, 3, 4); assertLong(tuples.get(1), "a_i", 3); + + + // Test a couple of multile field lists. + expression = StreamExpressionParser.parse("search(collection1, fq=\"a_s:hello0\", fq=\"a_s:hello1\", q=\"id:(*)\", " + + "zkHost=" + cluster.getZkServer().getZkAddress()+ ", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")"); + stream = new CloudSolrStream(expression, factory); + tuples = getTuples(stream); + + assertEquals("fq clauses should have prevented any docs from coming back", tuples.size(), 0); + + + expression = StreamExpressionParser.parse("search(collection1, fq=\"a_s:(hello0 OR hello1)\", q=\"id:(*)\", " + + "zkHost=" + cluster.getZkServer().getZkAddress() + ", fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\")"); + stream = new CloudSolrStream(expression, factory); + tuples = getTuples(stream); + + assertEquals("Combining an f1 clause should show us 2 docs", tuples.size(), 2); + + + } @Test @@ -193,33 +213,33 @@ public class StreamExpressionTest extends SolrCloudTestCase { TupleStream stream; // Basic test - Map params = new HashMap<>(); - params.put("expr","merge(" + ModifiableSolrParams sParams = new ModifiableSolrParams(); + sParams.set("expr", "merge(" + "${q1}," + "${q2}," + "on=${mySort})"); - params.put(CommonParams.QT, "/stream"); - params.put("q1", "search(" + COLLECTION + ", q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", sort=${mySort})"); - params.put("q2", "search(" + COLLECTION + ", q=\"id:(1)\", fl=\"id,a_s,a_i,a_f\", sort=${mySort})"); - params.put("mySort", "a_f asc"); - stream = new SolrStream(url, params); + sParams.set(CommonParams.QT, "/stream"); + sParams.set("q1", "search(" + COLLECTION + ", q=\"id:(0 3 4)\", fl=\"id,a_s,a_i,a_f\", sort=${mySort})"); + sParams.set("q2", "search(" + COLLECTION + ", q=\"id:(1)\", fl=\"id,a_s,a_i,a_f\", sort=${mySort})"); + sParams.set("mySort", "a_f asc"); + stream = new SolrStream(url, sParams); tuples = getTuples(stream); assertEquals(4, tuples.size()); assertOrder(tuples, 0,1,3,4); // Basic test desc - params.put("mySort", "a_f desc"); - stream = new SolrStream(url, params); + sParams.set("mySort", "a_f desc"); + stream = new SolrStream(url, sParams); tuples = getTuples(stream); assertEquals(4, tuples.size()); assertOrder(tuples, 4,3,1,0); // Basic w/ multi comp - params.put("q2", "search(" + COLLECTION + ", q=\"id:(1 2)\", fl=\"id,a_s,a_i,a_f\", sort=${mySort})"); - params.put("mySort", "\"a_f asc, a_s asc\""); - stream = new SolrStream(url, params); + sParams.set("q2", "search(" + COLLECTION + ", q=\"id:(1 2)\", fl=\"id,a_s,a_i,a_f\", sort=${mySort})"); + sParams.set("mySort", "\"a_f asc, a_s asc\""); + stream = new SolrStream(url, sParams); tuples = getTuples(stream); assertEquals(5, tuples.size()); @@ -2677,16 +2697,14 @@ public class StreamExpressionTest extends SolrCloudTestCase { //Lets sleep long enough for daemon updates to run. //Lets stop the daemons - Map params = new HashMap(); - params.put(CommonParams.QT,"/stream"); - params.put("action","list"); + ModifiableSolrParams sParams = new ModifiableSolrParams(StreamingTest.mapParams(CommonParams.QT, "/stream", "action", "list")); int workersComplete = 0; for(JettySolrRunner jetty : cluster.getJettySolrRunners()) { int iterations = 0; INNER: while(iterations == 0) { - SolrStream solrStream = new SolrStream(jetty.getBaseUrl().toString() + "/collection1", params); + SolrStream solrStream = new SolrStream(jetty.getBaseUrl().toString() + "/collection1", sParams); solrStream.open(); Tuple tupleResponse = solrStream.read(); if (tupleResponse.EOF) { @@ -2714,27 +2732,27 @@ public class StreamExpressionTest extends SolrCloudTestCase { cluster.getSolrClient().commit("parallelDestinationCollection1"); //Lets stop the daemons - params = new HashMap(); - params.put(CommonParams.QT,"/stream"); - params.put("action", "stop"); - params.put("id", "test"); + sParams = new ModifiableSolrParams(); + sParams.set(CommonParams.QT, "/stream"); + sParams.set("action", "stop"); + sParams.set("id", "test"); for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { - SolrStream solrStream = new SolrStream(jetty.getBaseUrl() + "/collection1", params); + SolrStream solrStream = new SolrStream(jetty.getBaseUrl() + "/collection1", sParams); solrStream.open(); Tuple tupleResponse = solrStream.read(); solrStream.close(); } - params = new HashMap(); - params.put(CommonParams.QT,"/stream"); - params.put("action","list"); + sParams = new ModifiableSolrParams(); + sParams.set(CommonParams.QT, "/stream"); + sParams.set("action", "list"); workersComplete = 0; for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { long stopTime = 0; INNER: while(stopTime == 0) { - SolrStream solrStream = new SolrStream(jetty.getBaseUrl() + "/collection1", params); + SolrStream solrStream = new SolrStream(jetty.getBaseUrl() + "/collection1", sParams); solrStream.open(); Tuple tupleResponse = solrStream.read(); if (tupleResponse.EOF) { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java index 9db02ebadf4..17897598b96 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java @@ -18,7 +18,6 @@ package org.apache.solr.client.solrj.io.stream; import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; @@ -43,6 +42,8 @@ import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.cloud.AbstractDistribZkTestBase; import org.apache.solr.cloud.SolrCloudTestCase; +import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.params.SolrParams; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Ignore; @@ -107,8 +108,8 @@ public class StreamingTest extends SolrCloudTestCase { .add(id, "1", "a_s", "hello1", "a_i", "1", "a_f", "1") .commit(cluster.getSolrClient(), COLLECTION); - Map params = mapParams("q","*:*","fl","id,a_s,a_i,a_f","sort", "a_f asc,a_i asc"); - CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, params); + SolrParams sParams = StreamingTest.mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_f asc,a_i asc"); + CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParams); UniqueStream ustream = new UniqueStream(stream, new FieldEqualitor("a_f")); List tuples = getTuples(ustream); assertEquals(4, tuples.size()); @@ -119,13 +120,13 @@ public class StreamingTest extends SolrCloudTestCase { @Test public void testSpacesInParams() throws Exception { - Map params = mapParams("q", "*:*", "fl", "id , a_s , a_i , a_f", "sort", "a_f asc , a_i asc"); + SolrParams sParams = StreamingTest.mapParams("q", "*:*", "fl", "id , a_s , a_i , a_f", "sort", "a_f asc , a_i asc"); //CloudSolrStream compares the values of the sort with the fl field. //The constructor will throw an exception if the sort fields do not the //a value in the field list. - CloudSolrStream stream = new CloudSolrStream("", "collection1", params); + CloudSolrStream stream = new CloudSolrStream("", "collection1", sParams); } @Test @@ -144,8 +145,8 @@ public class StreamingTest extends SolrCloudTestCase { .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10") .commit(cluster.getSolrClient(), COLLECTION); - Map paramsA = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_s asc,a_f asc", "partitionKeys", "none"); - CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, paramsA); + SolrParams sParamsA = StreamingTest.mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_s asc,a_f asc", "partitionKeys", "none"); + CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA); ParallelStream pstream = new ParallelStream(zkHost, COLLECTION, stream, 2, new FieldComparator("a_s",ComparatorOrder.ASCENDING)); attachStreamFactory(pstream); @@ -170,8 +171,8 @@ public class StreamingTest extends SolrCloudTestCase { .add(id, "8", "a_s", "hello1", "a_i", "13", "a_f", "4") .commit(cluster.getSolrClient(), COLLECTION); - Map params = mapParams("q","*:*","fl","id,a_s,a_i,a_f","sort", "a_f asc,a_i asc", "partitionKeys", "a_f"); - CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, params); + SolrParams sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_f asc,a_i asc", "partitionKeys", "a_f"); + CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParams); UniqueStream ustream = new UniqueStream(stream, new FieldEqualitor("a_f")); ParallelStream pstream = new ParallelStream(zkHost, COLLECTION, ustream, 2, new FieldComparator("a_f",ComparatorOrder.ASCENDING)); attachStreamFactory(pstream); @@ -186,6 +187,31 @@ public class StreamingTest extends SolrCloudTestCase { } + @Test + public void testMultipleFqClauses() throws Exception { + + new UpdateRequest() + .add(id, "0", "a_ss", "hello0", "a_ss", "hello1", "a_i", "0", "a_f", "0") + .add(id, "2", "a_ss", "hello2", "a_i", "2", "a_f", "0") + .add(id, "3", "a_ss", "hello3", "a_i", "3", "a_f", "3") + .add(id, "4", "a_ss", "hello4", "a_i", "4", "a_f", "4") + .add(id, "1", "a_ss", "hello1", "a_i", "1", "a_f", "1") + .add(id, "5", "a_ss", "hello1", "a_i", "10", "a_f", "1") + .add(id, "6", "a_ss", "hello1", "a_i", "11", "a_f", "5") + .add(id, "7", "a_ss", "hello1", "a_i", "12", "a_f", "5") + .add(id, "8", "a_ss", "hello1", "a_i", "13", "a_f", "4") + .commit(cluster.getSolrClient(), COLLECTION); + + streamFactory.withCollectionZkHost(COLLECTION, zkHost); + + ModifiableSolrParams params = new ModifiableSolrParams(mapParams("q", "*:*", "fl", "id,a_i", + "sort", "a_i asc", "fq", "a_ss:hello0", "fq", "a_ss:hello1")); + CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, params); + List tuples = getTuples(stream); + assertEquals("Multiple fq clauses should have been honored", tuples.size(), 1); + assertEquals("should only have gotten back document 0", tuples.get(0).getString("id"), "0"); + } + @Test public void testRankStream() throws Exception { @@ -198,8 +224,8 @@ public class StreamingTest extends SolrCloudTestCase { .commit(cluster.getSolrClient(), COLLECTION); - Map params = mapParams("q", "*:*", "fl", "id,a_s,a_i", "sort", "a_i asc"); - CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, params); + SolrParams sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i", "sort", "a_i asc"); + CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParams); RankStream rstream = new RankStream(stream, 3, new FieldComparator("a_i",ComparatorOrder.DESCENDING)); List tuples = getTuples(rstream); @@ -224,8 +250,8 @@ public class StreamingTest extends SolrCloudTestCase { .add(id, "10", "a_s", "hello1", "a_i", "10", "a_f", "1") .commit(cluster.getSolrClient(), COLLECTION); - Map params = mapParams("q", "*:*", "fl", "id,a_s,a_i", "sort", "a_i asc", "partitionKeys", "a_i"); - CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, params); + SolrParams sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i", "sort", "a_i asc", "partitionKeys", "a_i"); + CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParams); RankStream rstream = new RankStream(stream, 11, new FieldComparator("a_i",ComparatorOrder.DESCENDING)); ParallelStream pstream = new ParallelStream(zkHost, COLLECTION, rstream, 2, new FieldComparator("a_i",ComparatorOrder.DESCENDING)); attachStreamFactory(pstream); @@ -253,8 +279,8 @@ public class StreamingTest extends SolrCloudTestCase { .commit(cluster.getSolrClient(), COLLECTION); //Test with spaces in the parameter lists. - Map paramsA = mapParams("q","*:*","fl","id,a_s, a_i, a_f","sort", "a_s asc , a_f asc"); - CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, paramsA); + SolrParams sParamsA = mapParams("q", "*:*", "fl", "id,a_s, a_i,a_f", "sort", "a_s asc,a_f asc"); + CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA); stream.setTrace(true); List tuples = getTuples(stream); assert(tuples.get(0).get("_COLLECTION_").equals(COLLECTION)); @@ -280,8 +306,8 @@ public class StreamingTest extends SolrCloudTestCase { .commit(cluster.getSolrClient(), COLLECTION); //Test with spaces in the parameter lists. - Map paramsA = mapParams("q","*:*","fl","id,a_s, a_i, a_f","sort", "a_s asc , a_f asc"); - CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, paramsA); + SolrParams sParamsA = mapParams("q", "*:*", "fl", "id,a_s, a_i, a_f", "sort", "a_s asc , a_f asc"); + CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA); ReducerStream rstream = new ReducerStream(stream, new FieldEqualitor("a_s"), new GroupOperation(new FieldComparator("a_f", ComparatorOrder.ASCENDING), 5)); @@ -303,8 +329,8 @@ public class StreamingTest extends SolrCloudTestCase { assertMaps(maps2, 4, 6); //Test with spaces in the parameter lists using a comparator - paramsA = mapParams("q","*:*","fl","id,a_s, a_i, a_f","sort", "a_s asc , a_f asc"); - stream = new CloudSolrStream(zkHost, COLLECTION, paramsA); + sParamsA = mapParams("q", "*:*", "fl", "id,a_s, a_i, a_f", "sort", "a_s asc , a_f asc"); + stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA); rstream = new ReducerStream(stream, new FieldComparator("a_s", ComparatorOrder.ASCENDING), new GroupOperation(new FieldComparator("a_f", ComparatorOrder.DESCENDING), 5)); @@ -345,8 +371,8 @@ public class StreamingTest extends SolrCloudTestCase { .commit(cluster.getSolrClient(), COLLECTION); //Test with spaces in the parameter lists. - Map paramsA = mapParams("q", "blah", "fl", "id,a_s, a_i, a_f", "sort", "a_s asc , a_f asc"); - CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, paramsA); + SolrParams sParamsA = mapParams("q", "blah", "fl", "id,a_s, a_i, a_f", "sort", "a_s asc , a_f asc"); + CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA); ReducerStream rstream = new ReducerStream(stream, new FieldEqualitor("a_s"), new GroupOperation(new FieldComparator("a_f", ComparatorOrder.ASCENDING), 5)); @@ -373,8 +399,8 @@ public class StreamingTest extends SolrCloudTestCase { .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10") .commit(cluster.getSolrClient(), COLLECTION); - Map paramsA = mapParams("q","*:*","fl","id,a_s,a_i,a_f","sort", "a_s asc,a_f asc", "partitionKeys", "a_s"); - CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, paramsA); + SolrParams sParamsA = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_s asc,a_f asc", "partitionKeys", "a_s"); + CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA); ReducerStream rstream = new ReducerStream(stream, new FieldEqualitor("a_s"), @@ -401,8 +427,8 @@ public class StreamingTest extends SolrCloudTestCase { //Test Descending with Ascending subsort - paramsA = mapParams("q","*:*","fl","id,a_s,a_i,a_f","sort", "a_s desc,a_f asc", "partitionKeys", "a_s"); - stream = new CloudSolrStream(zkHost, COLLECTION, paramsA); + sParamsA = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_s desc,a_f asc", "partitionKeys", "a_s"); + stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA); rstream = new ReducerStream(stream, new FieldEqualitor("a_s"), @@ -447,8 +473,8 @@ public class StreamingTest extends SolrCloudTestCase { .commit(cluster.getSolrClient(), COLLECTION); //Test an error that comes originates from the /select handler - Map paramsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f,blah", "sort", "blah asc"); - CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, paramsA); + SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f,blah", "sort", "blah asc"); + CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA); ExceptionStream estream = new ExceptionStream(stream); Tuple t = getTuple(estream); assert(t.EOF); @@ -456,8 +482,8 @@ public class StreamingTest extends SolrCloudTestCase { assert(t.getException().contains("sort param field can't be found: blah")); //Test an error that comes originates from the /export handler - paramsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f,score", "sort", "a_s asc", "qt","/export"); - stream = new CloudSolrStream(zkHost, COLLECTION, paramsA); + sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f,score", "sort", "a_s asc", "qt", "/export"); + stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA); estream = new ExceptionStream(stream); t = getTuple(estream); assert(t.EOF); @@ -483,8 +509,8 @@ public class StreamingTest extends SolrCloudTestCase { .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10") .commit(cluster.getSolrClient(), COLLECTION); - Map paramsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f,blah", "sort", "blah asc"); - CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, paramsA); + SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f,blah", "sort", "blah asc"); + CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA); ParallelStream pstream = new ParallelStream(zkHost, COLLECTION, stream, 2, new FieldComparator("blah", ComparatorOrder.ASCENDING)); ExceptionStream estream = new ExceptionStream(pstream); Tuple t = getTuple(estream); @@ -495,8 +521,8 @@ public class StreamingTest extends SolrCloudTestCase { //Test an error that originates from the /select handler - paramsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f,blah", "sort", "blah asc", "partitionKeys","a_s"); - stream = new CloudSolrStream(zkHost, COLLECTION, paramsA); + sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f,blah", "sort", "blah asc", "partitionKeys", "a_s"); + stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA); pstream = new ParallelStream(zkHost, COLLECTION, stream, 2, new FieldComparator("blah", ComparatorOrder.ASCENDING)); estream = new ExceptionStream(pstream); t = getTuple(estream); @@ -506,8 +532,8 @@ public class StreamingTest extends SolrCloudTestCase { //Test an error that originates from the /export handler - paramsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f,score", "sort", "a_s asc", "qt","/export", "partitionKeys","a_s"); - stream = new CloudSolrStream(zkHost, COLLECTION, paramsA); + sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f,score", "sort", "a_s asc", "qt", "/export", "partitionKeys", "a_s"); + stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA); pstream = new ParallelStream(zkHost, COLLECTION, stream, 2, new FieldComparator("a_s", ComparatorOrder.ASCENDING)); estream = new ExceptionStream(pstream); t = getTuple(estream); @@ -533,7 +559,7 @@ public class StreamingTest extends SolrCloudTestCase { .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10") .commit(cluster.getSolrClient(), COLLECTION); - Map paramsA = mapParams("q", "*:*"); + SolrParams sParamsA = mapParams("q", "*:*"); Metric[] metrics = {new SumMetric("a_i"), new SumMetric("a_f"), @@ -545,7 +571,7 @@ public class StreamingTest extends SolrCloudTestCase { new MeanMetric("a_f"), new CountMetric()}; - StatsStream statsStream = new StatsStream(zkHost, COLLECTION, paramsA, metrics); + StatsStream statsStream = new StatsStream(zkHost, COLLECTION, sParamsA, metrics); List tuples = getTuples(statsStream); @@ -593,7 +619,7 @@ public class StreamingTest extends SolrCloudTestCase { .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10") .commit(cluster.getSolrClient(), COLLECTION); - Map paramsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f", "sort", "a_s asc"); + SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f", "sort", "a_s asc"); Bucket[] buckets = {new Bucket("a_s")}; @@ -610,7 +636,7 @@ public class StreamingTest extends SolrCloudTestCase { FieldComparator[] sorts = {new FieldComparator("sum(a_i)", ComparatorOrder.ASCENDING)}; - FacetStream facetStream = new FacetStream(zkHost, COLLECTION, paramsA, buckets, metrics, sorts, 100); + FacetStream facetStream = new FacetStream(zkHost, COLLECTION, sParamsA, buckets, metrics, sorts, 100); List tuples = getTuples(facetStream); @@ -692,7 +718,7 @@ public class StreamingTest extends SolrCloudTestCase { sorts[0] = new FieldComparator("sum(a_i)", ComparatorOrder.DESCENDING); - facetStream = new FacetStream(zkHost, COLLECTION, paramsA, buckets, metrics, sorts, 100); + facetStream = new FacetStream(zkHost, COLLECTION, sParamsA, buckets, metrics, sorts, 100); tuples = getTuples(facetStream); @@ -775,7 +801,7 @@ public class StreamingTest extends SolrCloudTestCase { sorts[0] = new FieldComparator("a_s", ComparatorOrder.DESCENDING); - facetStream = new FacetStream(zkHost, COLLECTION, paramsA, buckets, metrics, sorts, 100); + facetStream = new FacetStream(zkHost, COLLECTION, sParamsA, buckets, metrics, sorts, 100); tuples = getTuples(facetStream); @@ -856,7 +882,7 @@ public class StreamingTest extends SolrCloudTestCase { sorts[0] = new FieldComparator("a_s", ComparatorOrder.ASCENDING); - facetStream = new FacetStream(zkHost, COLLECTION, paramsA, buckets, metrics, sorts, 100); + facetStream = new FacetStream(zkHost, COLLECTION, sParamsA, buckets, metrics, sorts, 100); tuples = getTuples(facetStream); @@ -949,7 +975,7 @@ public class StreamingTest extends SolrCloudTestCase { .add(id, "9", "level1_s", "hello0", "level2_s", "b", "a_i", "14", "a_f", "10") .commit(cluster.getSolrClient(), COLLECTION); - Map paramsA = mapParams("q","*:*","fl","a_i,a_f"); + SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_i,a_f"); Bucket[] buckets = {new Bucket("level1_s"), new Bucket("level2_s")}; @@ -961,7 +987,7 @@ public class StreamingTest extends SolrCloudTestCase { FacetStream facetStream = new FacetStream( zkHost, COLLECTION, - paramsA, + sParamsA, buckets, metrics, sorts, @@ -1041,7 +1067,7 @@ public class StreamingTest extends SolrCloudTestCase { facetStream = new FacetStream( zkHost, COLLECTION, - paramsA, + sParamsA, buckets, metrics, sorts, @@ -1134,8 +1160,8 @@ public class StreamingTest extends SolrCloudTestCase { .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10") .commit(cluster.getSolrClient(), COLLECTION); - Map paramsA = mapParams("q","*:*","fl","a_s,a_i,a_f","sort", "a_s asc"); - CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, paramsA); + SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f", "sort", "a_s asc"); + CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA); Bucket[] buckets = {new Bucket("a_s")}; @@ -1234,8 +1260,8 @@ public class StreamingTest extends SolrCloudTestCase { .add(id, "12", "a_s", null, "a_i", "14", "a_f", "10") .commit(cluster.getSolrClient(), COLLECTION); - paramsA = mapParams("q","*:*","fl","a_s,a_i,a_f","sort", "a_s asc", "qt", "/export"); - stream = new CloudSolrStream(zkHost, COLLECTION, paramsA); + sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f", "sort", "a_s asc", "qt", "/export"); + stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA); Bucket[] buckets1 = {new Bucket("a_s")}; @@ -1285,12 +1311,9 @@ public class StreamingTest extends SolrCloudTestCase { SolrClientCache cache = new SolrClientCache(); context.setSolrClientCache(cache); - Map params = new HashMap(); - params.put("q","a_s:hello0"); - params.put("rows", "500"); - params.put("fl", "id"); + SolrParams sParams = mapParams("q", "a_s:hello0", "rows", "500", "fl", "id"); - TopicStream topicStream = new TopicStream(zkHost, COLLECTION, COLLECTION, "50000000", 1000000, params); + TopicStream topicStream = new TopicStream(zkHost, COLLECTION, COLLECTION, "50000000", 1000000, sParams); DaemonStream daemonStream = new DaemonStream(topicStream, "daemon1", 1000, 500); daemonStream.setStreamContext(context); @@ -1300,13 +1323,11 @@ public class StreamingTest extends SolrCloudTestCase { // Wait for the checkpoint JettySolrRunner jetty = cluster.getJettySolrRunners().get(0); - Map params1 = new HashMap(); - params1.put("qt","/get"); - params1.put("ids","50000000"); - params1.put("fl","id"); + + SolrParams sParams1 = mapParams("qt", "/get", "ids", "50000000", "fl", "id"); int count = 0; while(count == 0) { - SolrStream solrStream = new SolrStream(jetty.getBaseUrl().toString() + "/" + COLLECTION, params1); + SolrStream solrStream = new SolrStream(jetty.getBaseUrl().toString() + "/" + COLLECTION, sParams1); List tuples = getTuples(solrStream); count = tuples.size(); if(count > 0) { @@ -1364,8 +1385,8 @@ public class StreamingTest extends SolrCloudTestCase { .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10") .commit(cluster.getSolrClient(), COLLECTION); - Map paramsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f", "sort", "a_s asc", "partitionKeys", "a_s"); - CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, paramsA); + SolrParams sParamsA = mapParams("q", "*:*", "fl", "a_s,a_i,a_f", "sort", "a_s asc", "partitionKeys", "a_s"); + CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA); Bucket[] buckets = {new Bucket("a_s")}; @@ -1475,8 +1496,8 @@ public class StreamingTest extends SolrCloudTestCase { .add(id, "9", "a_s", "hello0", "a_i", "14", "a_f", "10") .commit(cluster.getSolrClient(), COLLECTION); - Map paramsA = mapParams("q", "blah", "fl", "id,a_s,a_i,a_f","sort", "a_s asc,a_f asc", "partitionKeys", "a_s"); - CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, paramsA); + SolrParams sParamsA = mapParams("q", "blah", "fl", "id,a_s,a_i,a_f", "sort", "a_s asc,a_f asc", "partitionKeys", "a_s"); + CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParamsA); ReducerStream rstream = new ReducerStream(stream, new FieldEqualitor("a_s"), new GroupOperation(new FieldComparator("a_s", ComparatorOrder.ASCENDING), 2)); @@ -1497,8 +1518,8 @@ public class StreamingTest extends SolrCloudTestCase { "1", "i_multi", "2", "f_multi", "1.2", "f_multi", "1.3") .commit(cluster.getSolrClient(), COLLECTION); - Map params = mapParams("q","*:*","fl","id,a_s,a_i,a_f,s_multi,i_multi,f_multi","sort", "a_s asc"); - CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, params); + SolrParams sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f,s_multi,i_multi,f_multi", "sort", "a_s asc"); + CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParams); List tuples = getTuples(stream); Tuple tuple = tuples.get(0); @@ -1538,11 +1559,11 @@ public class StreamingTest extends SolrCloudTestCase { .commit(cluster.getSolrClient(), COLLECTION); //Test ascending - Map paramsA = mapParams("q","id:(4 1)","fl","id,a_s,a_i","sort", "a_i asc"); - CloudSolrStream streamA = new CloudSolrStream(zkHost, COLLECTION, paramsA); + SolrParams sParamsA = mapParams("q", "id:(4 1)", "fl", "id,a_s,a_i", "sort", "a_i asc"); + CloudSolrStream streamA = new CloudSolrStream(zkHost, COLLECTION, sParamsA); - Map paramsB = mapParams("q","id:(0 2 3)","fl","id,a_s,a_i","sort", "a_i asc"); - CloudSolrStream streamB = new CloudSolrStream(zkHost, COLLECTION, paramsB); + SolrParams sParamsB = mapParams("q", "id:(0 2 3)", "fl", "id,a_s,a_i", "sort", "a_i asc"); + CloudSolrStream streamB = new CloudSolrStream(zkHost, COLLECTION, sParamsB); MergeStream mstream = new MergeStream(streamA, streamB, new FieldComparator("a_i",ComparatorOrder.ASCENDING)); List tuples = getTuples(mstream); @@ -1551,11 +1572,11 @@ public class StreamingTest extends SolrCloudTestCase { assertOrder(tuples, 0,1,2,3,4); //Test descending - paramsA = mapParams("q","id:(4 1)","fl","id,a_s,a_i","sort", "a_i desc"); - streamA = new CloudSolrStream(zkHost, COLLECTION, paramsA); + sParamsA = mapParams("q", "id:(4 1)", "fl", "id,a_s,a_i", "sort", "a_i desc"); + streamA = new CloudSolrStream(zkHost, COLLECTION, sParamsA); - paramsB = mapParams("q","id:(0 2 3)","fl","id,a_s,a_i","sort", "a_i desc"); - streamB = new CloudSolrStream(zkHost, COLLECTION, paramsB); + sParamsB = mapParams("q", "id:(0 2 3)", "fl", "id,a_s,a_i", "sort", "a_i desc"); + streamB = new CloudSolrStream(zkHost, COLLECTION, sParamsB); mstream = new MergeStream(streamA, streamB, new FieldComparator("a_i",ComparatorOrder.DESCENDING)); tuples = getTuples(mstream); @@ -1565,11 +1586,11 @@ public class StreamingTest extends SolrCloudTestCase { //Test compound sort - paramsA = mapParams("q","id:(2 4 1)","fl","id,a_s,a_i,a_f","sort", "a_f asc,a_i asc"); - streamA = new CloudSolrStream(zkHost, COLLECTION, paramsA); + sParamsA = mapParams("q", "id:(2 4 1)", "fl", "id,a_s,a_i,a_f", "sort", "a_f asc,a_i asc"); + streamA = new CloudSolrStream(zkHost, COLLECTION, sParamsA); - paramsB = mapParams("q","id:(0 3)","fl","id,a_s,a_i,a_f","sort", "a_f asc,a_i asc"); - streamB = new CloudSolrStream(zkHost, COLLECTION, paramsB); + sParamsB = mapParams("q", "id:(0 3)", "fl", "id,a_s,a_i,a_f", "sort", "a_f asc,a_i asc"); + streamB = new CloudSolrStream(zkHost, COLLECTION, sParamsB); mstream = new MergeStream(streamA, streamB, new MultipleFieldComparator(new FieldComparator("a_f",ComparatorOrder.ASCENDING),new FieldComparator("a_i",ComparatorOrder.ASCENDING))); tuples = getTuples(mstream); @@ -1577,11 +1598,11 @@ public class StreamingTest extends SolrCloudTestCase { assert(tuples.size() == 5); assertOrder(tuples, 0,2,1,3,4); - paramsA = mapParams("q","id:(2 4 1)","fl","id,a_s,a_i,a_f","sort", "a_f asc,a_i desc"); - streamA = new CloudSolrStream(zkHost, COLLECTION, paramsA); + sParamsA = mapParams("q", "id:(2 4 1)", "fl", "id,a_s,a_i,a_f", "sort", "a_f asc,a_i desc"); + streamA = new CloudSolrStream(zkHost, COLLECTION, sParamsA); - paramsB = mapParams("q","id:(0 3)","fl","id,a_s,a_i,a_f","sort", "a_f asc,a_i desc"); - streamB = new CloudSolrStream(zkHost, COLLECTION, paramsB); + sParamsB = mapParams("q", "id:(0 3)", "fl", "id,a_s,a_i,a_f", "sort", "a_f asc,a_i desc"); + streamB = new CloudSolrStream(zkHost, COLLECTION, sParamsB); mstream = new MergeStream(streamA, streamB, new MultipleFieldComparator(new FieldComparator("a_f",ComparatorOrder.ASCENDING),new FieldComparator("a_i",ComparatorOrder.DESCENDING))); tuples = getTuples(mstream); @@ -1608,11 +1629,11 @@ public class StreamingTest extends SolrCloudTestCase { .commit(cluster.getSolrClient(), COLLECTION); //Test ascending - Map paramsA = mapParams("q","id:(4 1 8 7 9)","fl","id,a_s,a_i","sort", "a_i asc", "partitionKeys", "a_i"); - CloudSolrStream streamA = new CloudSolrStream(zkHost, COLLECTION, paramsA); + SolrParams sParamsA = mapParams("q", "id:(4 1 8 7 9)", "fl", "id,a_s,a_i", "sort", "a_i asc", "partitionKeys", "a_i"); + CloudSolrStream streamA = new CloudSolrStream(zkHost, COLLECTION, sParamsA); - Map paramsB = mapParams("q","id:(0 2 3 6)","fl","id,a_s,a_i","sort", "a_i asc", "partitionKeys", "a_i"); - CloudSolrStream streamB = new CloudSolrStream(zkHost, COLLECTION, paramsB); + SolrParams sParamsB = mapParams("q", "id:(0 2 3 6)", "fl", "id,a_s,a_i", "sort", "a_i asc", "partitionKeys", "a_i"); + CloudSolrStream streamB = new CloudSolrStream(zkHost, COLLECTION, sParamsB); MergeStream mstream = new MergeStream(streamA, streamB, new FieldComparator("a_i",ComparatorOrder.ASCENDING)); ParallelStream pstream = new ParallelStream(zkHost, COLLECTION, mstream, 2, new FieldComparator("a_i",ComparatorOrder.ASCENDING)); @@ -1623,11 +1644,11 @@ public class StreamingTest extends SolrCloudTestCase { assertOrder(tuples, 0,1,2,3,4,7,6,8,9); //Test descending - paramsA = mapParams("q", "id:(4 1 8 9)", "fl", "id,a_s,a_i", "sort", "a_i desc", "partitionKeys", "a_i"); - streamA = new CloudSolrStream(zkHost, COLLECTION, paramsA); + sParamsA = mapParams("q", "id:(4 1 8 9)", "fl", "id,a_s,a_i", "sort", "a_i desc", "partitionKeys", "a_i"); + streamA = new CloudSolrStream(zkHost, COLLECTION, sParamsA); - paramsB = mapParams("q","id:(0 2 3 6)","fl","id,a_s,a_i","sort", "a_i desc", "partitionKeys", "a_i"); - streamB = new CloudSolrStream(zkHost, COLLECTION, paramsB); + sParamsB = mapParams("q", "id:(0 2 3 6)", "fl", "id,a_s,a_i", "sort", "a_i desc", "partitionKeys", "a_i"); + streamB = new CloudSolrStream(zkHost, COLLECTION, sParamsB); mstream = new MergeStream(streamA, streamB, new FieldComparator("a_i",ComparatorOrder.DESCENDING)); pstream = new ParallelStream(zkHost, COLLECTION, mstream, 2, new FieldComparator("a_i",ComparatorOrder.DESCENDING)); @@ -1656,11 +1677,11 @@ public class StreamingTest extends SolrCloudTestCase { .commit(cluster.getSolrClient(), COLLECTION); //Test ascending - Map paramsA = mapParams("q","id:(4 1 8 7 9)","fl","id,a_s,a_i","sort", "a_i asc", "partitionKeys", "a_i"); - CloudSolrStream streamA = new CloudSolrStream(zkHost, COLLECTION, paramsA); + SolrParams sParamsA = mapParams("q", "id:(4 1 8 7 9)", "fl", "id,a_s,a_i", "sort", "a_i asc", "partitionKeys", "a_i"); + CloudSolrStream streamA = new CloudSolrStream(zkHost, COLLECTION, sParamsA); - Map paramsB = mapParams("q","id:(0 2 3 6)","fl","id,a_s,a_i","sort", "a_i asc", "partitionKeys", "a_i"); - CloudSolrStream streamB = new CloudSolrStream(zkHost, COLLECTION, paramsB); + SolrParams sParamsB = mapParams("q", "id:(0 2 3 6)", "fl", "id,a_s,a_i", "sort", "a_i asc", "partitionKeys", "a_i"); + CloudSolrStream streamB = new CloudSolrStream(zkHost, COLLECTION, sParamsB); MergeStream mstream = new MergeStream(streamA, streamB, new FieldComparator("a_i",ComparatorOrder.ASCENDING)); ParallelStream pstream = new ParallelStream(zkHost, COLLECTION, mstream, 2, new FieldComparator("a_i",ComparatorOrder.ASCENDING)); @@ -1685,20 +1706,19 @@ public class StreamingTest extends SolrCloudTestCase { .add(id, "1", "a_s", "hello1", "a_i", "1", "a_f", "1") .commit(cluster.getSolrClient(), COLLECTION); - Map params = null; //Basic CloudSolrStream Test with Descending Sort - params = mapParams("q","*:*","fl","id,a_s,a_i","sort", "a_i desc"); - CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, params); + SolrParams sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i", "sort", "a_i desc"); + CloudSolrStream stream = new CloudSolrStream(zkHost, COLLECTION, sParams); List tuples = getTuples(stream); assert(tuples.size() == 5); assertOrder(tuples, 4, 3, 2, 1, 0); //With Ascending Sort - params = mapParams("q","*:*","fl","id,a_s,a_i","sort", "a_i asc"); - stream = new CloudSolrStream(zkHost, COLLECTION, params); + sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i", "sort", "a_i asc"); + stream = new CloudSolrStream(zkHost, COLLECTION, sParams); tuples = getTuples(stream); assert(tuples.size() == 5); @@ -1706,16 +1726,16 @@ public class StreamingTest extends SolrCloudTestCase { //Test compound sort - params = mapParams("q","*:*","fl","id,a_s,a_i,a_f","sort", "a_f asc,a_i desc"); - stream = new CloudSolrStream(zkHost, COLLECTION, params); + sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_f asc,a_i desc"); + stream = new CloudSolrStream(zkHost, COLLECTION, sParams); tuples = getTuples(stream); assert(tuples.size() == 5); assertOrder(tuples, 2,0,1,3,4); - params = mapParams("q","*:*","fl","id,a_s,a_i,a_f","sort", "a_f asc,a_i asc"); - stream = new CloudSolrStream(zkHost, COLLECTION, params); + sParams = mapParams("q", "*:*", "fl", "id,a_s,a_i,a_f", "sort", "a_f asc,a_i asc"); + stream = new CloudSolrStream(zkHost, COLLECTION, sParams); tuples = getTuples(stream); assert (tuples.size() == 5); @@ -1723,21 +1743,6 @@ public class StreamingTest extends SolrCloudTestCase { } - protected Map mapParams(String... vals) { - Map params = new HashMap(); - String k = null; - for(String val : vals) { - if(k == null) { - k = val; - } else { - params.put(k, val); - k = null; - } - } - - return params; - } - protected List getTuples(TupleStream tupleStream) throws IOException { tupleStream.open(); List tuples = new ArrayList(); @@ -1819,4 +1824,15 @@ public class StreamingTest extends SolrCloudTestCase { streamContext.setStreamFactory(streamFactory); tupleStream.setStreamContext(streamContext); } + + public static SolrParams mapParams(String... vals) { + ModifiableSolrParams params = new ModifiableSolrParams(); + assertEquals("Parameters passed in here must be in pairs!", 0, (vals.length % 2)); + for (int idx = 0; idx < vals.length; idx += 2) { + params.add(vals[idx], vals[idx + 1]); + } + + return params; + } + }