SOLR-9110: moving JoinFromCollection- SubQueryTransformer- BlockJoinFacet- Distrib Tests to SolrCloudTestCase

This commit is contained in:
Mikhail Khludnev 2016-05-21 14:36:37 +03:00
parent f1f85e560f
commit aec3654fb8
4 changed files with 197 additions and 106 deletions

View File

@ -301,6 +301,8 @@ Other Changes
* SOLR-9119: several static methods in ValueSourceParser have been made private (hossman)
* SOLR-9110: Move JoinFromCollection- SubQueryTransformer- BlockJoinFacet- Distrib Tests to SolrCloudTestCase (Mikhail Khludnev)
================== 6.0.1 ==================
(No Changes)

View File

@ -16,102 +16,128 @@
*/
package org.apache.solr.cloud;
import static org.hamcrest.CoreMatchers.not;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.Slice;
import org.junit.After;
import org.junit.Before;
import org.apache.commons.lang.StringUtils;
import org.apache.solr.common.cloud.ZkStateReader;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.hamcrest.CoreMatchers.*;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.HashSet;
import java.util.Set;
/**
* Tests using fromIndex that points to a collection in SolrCloud mode.
*/
public class DistribJoinFromCollectionTest extends AbstractFullDistribZkTestBase {
public class DistribJoinFromCollectionTest extends SolrCloudTestCase{
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
final private static String[] scoreModes = {"avg","max","min","total"};
public DistribJoinFromCollectionTest() {
super();
}
@Before
@Override
public void setUp() throws Exception {
super.setUp();
System.setProperty("numShards", Integer.toString(sliceCount));
}
@Override
@After
public void tearDown() throws Exception {
try {
super.tearDown();
} catch (Exception exc) {}
resetExceptionIgnores();
}
// resetExceptionIgnores();
private static String toColl = "to_2x2";
private static String fromColl = "from_1x4";
@Test
public void test() throws Exception {
private static Integer toDocId;
private static CloudSolrClient cloudClient;
@BeforeClass
public static void setupCluster() throws Exception {
final Path configDir = Paths.get(TEST_HOME(), "collection1", "conf");
String configName = "solrCloudCollectionConfig";
int nodeCount = 5;
configureCluster(nodeCount)
.addConfig(configName, configDir)
.configure();
Map<String, String> collectionProperties = new HashMap<>();
collectionProperties.put("config", "solrconfig-tlog.xml" );
collectionProperties.put("schema", "schema.xml");
// create a collection holding data for the "to" side of the JOIN
String toColl = "to_2x2";
createCollection(toColl, 2, 2, 2);
ensureAllReplicasAreActive(toColl, "shard1", 2, 2, 30);
ensureAllReplicasAreActive(toColl, "shard2", 2, 2, 30);
int shards = 2;
int replicas = 2 ;
assertNotNull(cluster.createCollection(toColl, shards, replicas,
configName,
collectionProperties));
// get the set of nodes where replicas for the "to" collection exist
Set<String> nodeSet = new HashSet<>();
ClusterState cs = cloudClient.getZkStateReader().getClusterState();
for (Slice slice : cs.getActiveSlices(toColl))
cloudClient = cluster.getSolrClient();
ZkStateReader zkStateReader = cloudClient.getZkStateReader();
ClusterState cs = zkStateReader.getClusterState();
for (Slice slice : cs.getCollection(toColl).getActiveSlices())
for (Replica replica : slice.getReplicas())
nodeSet.add(replica.getNodeName());
assertTrue(nodeSet.size() > 0);
// deploy the "from" collection to all nodes where the "to" collection exists
String fromColl = "from_1x2";
createCollection(null, fromColl, 1, nodeSet.size(), 1, null, StringUtils.join(nodeSet,","));
ensureAllReplicasAreActive(fromColl, "shard1", 1, nodeSet.size(), 30);
// both to and from collections are up and active, index some docs ...
Integer toDocId = indexDoc(toColl, 1001, "a", null, "b");
assertNotNull(cluster.createCollection(fromColl, 1, 4,
configName, StringUtils.join(nodeSet,","), null,
collectionProperties));
AbstractDistribZkTestBase.waitForRecoveriesToFinish(toColl, zkStateReader, false, true, 30);
AbstractDistribZkTestBase.waitForRecoveriesToFinish(fromColl, zkStateReader, false, true, 30);
toDocId = indexDoc(toColl, 1001, "a", null, "b");
indexDoc(fromColl, 2001, "a", "c", null);
Thread.sleep(1000); // so the commits fire
}
@Test
public void testScore() throws Exception {
//without score
testJoins(toColl, fromColl, toDocId, false);
}
@Test
public void testNoScore() throws Exception {
//with score
testJoins(toColl, fromColl, toDocId, true);
}
@AfterClass
public static void shutdown() {
log.info("DistribJoinFromCollectionTest logic complete ... deleting the " + toColl + " and " + fromColl + " collections");
// try to clean up
for (String c : new String[]{ toColl, fromColl }) {
try {
CollectionAdminRequest.Delete req = new CollectionAdminRequest.Delete()
.setCollectionName(c);
CollectionAdminRequest.Delete req = CollectionAdminRequest.deleteCollection(c);
req.process(cloudClient);
} catch (Exception e) {
// don't fail the test
@ -145,9 +171,7 @@ public class DistribJoinFromCollectionTest extends AbstractFullDistribZkTestBase
// create an alias for the fromIndex and then query through the alias
String alias = fromColl+"Alias";
CollectionAdminRequest.CreateAlias request = new CollectionAdminRequest.CreateAlias();
request.setAliasName(alias);
request.setAliasedCollections(fromColl);
CollectionAdminRequest.CreateAlias request = CollectionAdminRequest.createAlias(alias,fromColl);
request.process(cloudClient);
{
@ -195,14 +219,14 @@ public class DistribJoinFromCollectionTest extends AbstractFullDistribZkTestBase
+ "from=join_s fromIndex=" + wrongName + " to=join_s}match_s:c";
final QueryRequest qr = new QueryRequest(params("collection", toColl, "q", joinQ, "fl", "id,get_s,score"));
try {
cloudClient.request(qr);
cluster.getSolrClient().request(qr);
} catch (HttpSolrClient.RemoteSolrException ex) {
assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code());
assertTrue(ex.getMessage().contains(wrongName));
}
}
protected Integer indexDoc(String collection, int id, String joinField, String matchField, String getField) throws Exception {
protected static Integer indexDoc(String collection, int id, String joinField, String matchField, String getField) throws Exception {
UpdateRequest up = new UpdateRequest();
up.setCommitWithin(50);
up.setParam("collection", collection);
@ -215,7 +239,7 @@ public class DistribJoinFromCollectionTest extends AbstractFullDistribZkTestBase
if (getField != null)
doc.addField("get_s", getField);
up.add(doc);
cloudClient.request(up);
cluster.getSolrClient().request(up);
return docId;
}
}

View File

@ -17,6 +17,8 @@
package org.apache.solr.response.transform;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
@ -26,27 +28,59 @@ import java.util.Random;
import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
import org.apache.solr.cloud.AbstractDistribZkTestBase;
import org.apache.solr.cloud.SolrCloudTestCase;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.util.ContentStreamBase;
import org.junit.BeforeClass;
import org.junit.Test;
@SuppressSSL
public class TestSubQueryTransformerDistrib extends AbstractFullDistribZkTestBase {
public class TestSubQueryTransformerDistrib extends SolrCloudTestCase {
@Override
protected String getCloudSchemaFile() {
return "schema-docValuesJoin.xml";
final static String people = "people";
final static String depts = "departments";
private static CloudSolrClient client;
@BeforeClass
public static void setupCluster() throws Exception {
final Path configDir = Paths.get(TEST_HOME(), "collection1", "conf");
String configName = "solrCloudCollectionConfig";
int nodeCount = 5;
configureCluster(nodeCount)
.addConfig(configName, configDir)
.configure();
Map<String, String> collectionProperties = new HashMap<>();
collectionProperties.put("config", "solrconfig-doctransformers.xml" );
collectionProperties.put("schema", "schema-docValuesJoin.xml");
int shards = 2;
int replicas = 2 ;
assertNotNull(cluster.createCollection(people, shards, replicas,
configName,
collectionProperties));
assertNotNull(cluster.createCollection(depts, shards, replicas,
configName, collectionProperties));
client = cluster.getSolrClient();
client.setDefaultCollection(people);
ZkStateReader zkStateReader = client.getZkStateReader();
AbstractDistribZkTestBase.waitForRecoveriesToFinish(people, zkStateReader, true, true, 30);
AbstractDistribZkTestBase.waitForRecoveriesToFinish(depts, zkStateReader, false, true, 30);
}
@Override
protected String getCloudSolrConfig() {
return "solrconfig-basic.xml";
}
@SuppressWarnings("serial")
@Test
@ -54,14 +88,6 @@ public class TestSubQueryTransformerDistrib extends AbstractFullDistribZkTestBas
int peopleMultiplier = atLeast(1);
int deptMultiplier = atLeast(1);
final String people = "people";
createCollection(people, 2, 1, 10);
final String depts = "departments";
createCollection(depts, 2, 1, 10);
createIndex(people, peopleMultiplier, depts, deptMultiplier);
Random random1 = random();
@ -79,7 +105,7 @@ public class TestSubQueryTransformerDistrib extends AbstractFullDistribZkTestBas
"depts.rows",""+(deptMultiplier*2),
"depts.logParamsList","q,fl,rows,row.dept_ss_dv"}));
final QueryResponse rsp = new QueryResponse();
rsp.setResponse(cloudClient.request(qr, people));
rsp.setResponse(client.request(qr, people));
final SolrDocumentList hits = rsp.getResults();
assertEquals(peopleMultiplier, hits.getNumFound());
@ -116,6 +142,7 @@ public class TestSubQueryTransformerDistrib extends AbstractFullDistribZkTestBas
private void createIndex(String people, int peopleMultiplier, String depts, int deptMultiplier)
throws SolrServerException, IOException {
int id=0;
List<String> peopleDocs = new ArrayList<>();
for (int p=0; p < peopleMultiplier; p++){
@ -161,6 +188,9 @@ public class TestSubQueryTransformerDistrib extends AbstractFullDistribZkTestBas
private void addDocs(String collection, List<String> docs) throws SolrServerException, IOException {
StringBuilder upd = new StringBuilder("<update>");
upd.append("<delete><query>*:*</query></delete>");
for (Iterator<String> iterator = docs.iterator(); iterator.hasNext();) {
String add = iterator.next();
upd.append(add);
@ -176,7 +206,7 @@ public class TestSubQueryTransformerDistrib extends AbstractFullDistribZkTestBas
ContentStreamUpdateRequest req = new ContentStreamUpdateRequest("/update");
req.addContentStream(new ContentStreamBase.StringStream(upd.toString(),"text/xml"));
cloudClient.request(req, collection);
client.request(req, collection);
upd.setLength("<update>".length());
}
}

View File

@ -16,7 +16,12 @@
*/
package org.apache.solr.search.join;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
@ -24,34 +29,56 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.BaseDistributedSearchTestCase;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.FacetField.Count;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.cloud.AbstractDistribZkTestBase;
import org.apache.solr.cloud.SolrCloudTestCase;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
import org.junit.BeforeClass;
import org.junit.Test;
@Slow
public class BlockJoinFacetDistribTest extends BaseDistributedSearchTestCase {
public class BlockJoinFacetDistribTest extends SolrCloudTestCase{
private static final String collection = "facetcollection";
@BeforeClass
public static void beforeSuperClass() throws Exception {
schemaString = "schema-blockjoinfacetcomponent.xml";
configString = "solrconfig-blockjoinfacetcomponent.xml";
}
public static void setupCluster() throws Exception {
final Path configDir = Paths.get(TEST_HOME(), "collection1", "conf");
@ShardsFixed(num = 3)
public void test() throws Exception {
testBJQFacetComponent();
String configName = "solrCloudCollectionConfig";
int nodeCount = 6;
configureCluster(nodeCount)
.addConfig(configName, configDir)
.configure();
Map<String, String> collectionProperties = new HashMap<>();
collectionProperties.put("config", "solrconfig-blockjoinfacetcomponent.xml" );
collectionProperties.put("schema", "schema-blockjoinfacetcomponent.xml");
// create a collection holding data for the "to" side of the JOIN
int shards = 3;
int replicas = 2 ;
assertNotNull(cluster.createCollection(collection, shards, replicas,
configName,
collectionProperties));
AbstractDistribZkTestBase.waitForRecoveriesToFinish(collection,
cluster.getSolrClient().getZkStateReader(), false, true, 30);
}
final static List<String> colors = Arrays.asList("red","blue","brown","white","black","yellow","cyan","magenta","blur",
"fuchsia", "light","dark","green","grey","don't","know","any","more" );
final static List<String> sizes = Arrays.asList("s","m","l","xl","xxl","xml","xxxl","3","4","5","6","petite","maxi");
private void testBJQFacetComponent() throws Exception {
@Test
public void testBJQFacetComponent() throws Exception {
assert ! colors.removeAll(sizes): "there is no colors in sizes";
Collections.shuffle(colors,random());
@ -64,8 +91,11 @@ public class BlockJoinFacetDistribTest extends BaseDistributedSearchTestCase {
}
};
cluster.getSolrClient().deleteByQuery(collection, "*:*");
final int parents = atLeast(10);
boolean aggregationOccurs = false;
List<SolrInputDocument> parentDocs = new ArrayList<>();
for(int parent=0; parent<parents || !aggregationOccurs;parent++){
assert parent < 2000000 : "parent num "+parent+
" aggregationOccurs:"+aggregationOccurs+". Sorry! too tricky loop condition.";
@ -89,22 +119,18 @@ public class BlockJoinFacetDistribTest extends BaseDistributedSearchTestCase {
}
pdoc.addChildDocument(childDoc);
}
indexDoc(pdoc);
parentDocs.add(pdoc);
if (!parentDocs.isEmpty() && rarely()) {
indexDocs(parentDocs);
parentDocs.clear();
cluster.getSolrClient().commit(collection, false, false, true);
}
}
commit();
//handle.clear();
handle.put("timestamp", SKIPVAL);
handle.put("_version_", SKIPVAL); // not a cloud test, but may use updateLog
handle.put("maxScore", SKIP);// see org.apache.solr.TestDistributedSearch.test()
handle.put("shards", SKIP);
handle.put("distrib", SKIP);
handle.put("rid", SKIP);
handle.put("track", SKIP);
handle.put("facet_fields", UNORDERED);
handle.put("SIZE_s", UNORDERED);
handle.put("COLOR_s", UNORDERED);
if (!parentDocs.isEmpty()) {
indexDocs(parentDocs);
}
cluster.getSolrClient().commit(collection);
// to parent query
final String childQueryClause = "COLOR_s:("+(matchingColors.toString().replaceAll("[,\\[\\]]", " "))+")";
QueryResponse results = query("q", "{!parent which=\"type_s:parent\"}"+childQueryClause,
@ -122,15 +148,24 @@ public class BlockJoinFacetDistribTest extends BaseDistributedSearchTestCase {
String msg = ""+parentIdsByAttrValue+" "+color_s+" "+size_s;
for (FacetField facet: new FacetField[]{color_s, size_s}) {
for (Count c : facet.getValues()) {
assertEquals(c.getName()+"("+msg+")", parentIdsByAttrValue.get(c.getName()).size(), c.getCount());
assertEquals(c.getName()+"("+msg+")",
parentIdsByAttrValue.get(c.getName()).size(), c.getCount());
}
}
assertEquals(msg , parentIdsByAttrValue.size(),color_s.getValueCount() + size_s.getValueCount());
// }
//System.out.println(parentIdsByAttrValue);
}
protected String getCloudSolrConfig() {
return configString;
private QueryResponse query(String ... arg) throws SolrServerException, IOException {
ModifiableSolrParams solrParams = new ModifiableSolrParams();
for(int i=0; i<arg.length; i+=2) {
solrParams.add(arg[i], arg[i+1]);
}
return cluster.getSolrClient().query(collection, solrParams);
}
private void indexDocs(Collection<SolrInputDocument> pdocs) throws SolrServerException, IOException {
cluster.getSolrClient().add(collection, pdocs);
}
}