mirror of https://github.com/apache/lucene.git
SOLR-5017 support for routeField in COmpositeId router also
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1513356 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
a0ab76a95c
commit
2baecc2713
|
@ -172,7 +172,7 @@ public class SolrIndexSplitter {
|
|||
|
||||
int hash = 0;
|
||||
if (hashRouter != null) {
|
||||
hash = hashRouter.sliceHash(idString, null, null);
|
||||
hash = hashRouter.sliceHash(idString, null, null, null);
|
||||
}
|
||||
// int hash = Hash.murmurhash3_x86_32(ref, ref.offset, ref.length, 0);
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.util.LuceneTestCase.Slow;
|
|||
import org.apache.lucene.util._TestUtil;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrRequest;
|
||||
import org.apache.solr.client.solrj.SolrServer;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.impl.CloudSolrServer;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrServer;
|
||||
|
@ -62,6 +63,7 @@ import java.util.concurrent.ThreadPoolExecutor;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.apache.solr.cloud.OverseerCollectionProcessor.MAX_SHARDS_PER_NODE;
|
||||
import static org.apache.solr.cloud.OverseerCollectionProcessor.NUM_SLICES;
|
||||
import static org.apache.solr.cloud.OverseerCollectionProcessor.REPLICATION_FACTOR;
|
||||
import static org.apache.solr.cloud.OverseerCollectionProcessor.ROUTER;
|
||||
import static org.apache.solr.cloud.OverseerCollectionProcessor.SHARDS_PROP;
|
||||
|
@ -132,6 +134,7 @@ public class CustomCollectionTest extends AbstractFullDistribZkTestBase {
|
|||
@Override
|
||||
public void doTest() throws Exception {
|
||||
testCustomCollectionsAPI();
|
||||
testRouteFieldForHashRouter();
|
||||
if (DEBUG) {
|
||||
super.printLayout();
|
||||
}
|
||||
|
@ -242,8 +245,8 @@ public class CustomCollectionTest extends AbstractFullDistribZkTestBase {
|
|||
collectionClient.commit();
|
||||
|
||||
assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
|
||||
assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam("shard.keys","b")).getResults().getNumFound());
|
||||
assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam("shard.keys","a")).getResults().getNumFound());
|
||||
assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"b")).getResults().getNumFound());
|
||||
assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"a")).getResults().getNumFound());
|
||||
|
||||
collectionClient.deleteByQuery("*:*");
|
||||
collectionClient.commit(true,true);
|
||||
|
@ -263,8 +266,8 @@ public class CustomCollectionTest extends AbstractFullDistribZkTestBase {
|
|||
collectionClient.request(up);
|
||||
|
||||
assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
|
||||
assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam("shard.keys","a")).getResults().getNumFound());
|
||||
assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam("shard.keys","c")).getResults().getNumFound());
|
||||
assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"a")).getResults().getNumFound());
|
||||
assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"c")).getResults().getNumFound());
|
||||
|
||||
//Testing CREATESHARD
|
||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||
|
@ -292,7 +295,7 @@ public class CustomCollectionTest extends AbstractFullDistribZkTestBase {
|
|||
collectionClient.add(getDoc(id, 66, i1, -600, tlong, 600, t1,
|
||||
"humpty dumpy sat on a wall", _ROUTE_,"x"));
|
||||
collectionClient.commit();
|
||||
assertEquals(1, collectionClient.query(new SolrQuery("*:*").setParam("shard.keys","x")).getResults().getNumFound());
|
||||
assertEquals(1, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"x")).getResults().getNumFound());
|
||||
|
||||
|
||||
int numShards = 4;
|
||||
|
@ -349,9 +352,67 @@ public class CustomCollectionTest extends AbstractFullDistribZkTestBase {
|
|||
collectionClient.commit();
|
||||
|
||||
assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
|
||||
assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam("shard.keys","b")).getResults().getNumFound());
|
||||
assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"b")).getResults().getNumFound());
|
||||
//TODO debug the following case
|
||||
assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam("shard.keys", "a")).getResults().getNumFound());
|
||||
assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound());
|
||||
|
||||
|
||||
}
|
||||
|
||||
private void testRouteFieldForHashRouter()throws Exception{
|
||||
String collectionName = "routeFieldColl";
|
||||
int numShards = 4;
|
||||
int replicationFactor = 2;
|
||||
int maxShardsPerNode = (((numShards * replicationFactor) / getCommonCloudSolrServer()
|
||||
.getZkStateReader().getClusterState().getLiveNodes().size())) + 1;
|
||||
|
||||
HashMap<String, List<Integer>> collectionInfos = new HashMap<String, List<Integer>>();
|
||||
CloudSolrServer client = null;
|
||||
String shard_fld = "shard_s";
|
||||
try {
|
||||
client = createCloudClient(null);
|
||||
Map<String, Object> props = OverseerCollectionProcessor.asMap(
|
||||
REPLICATION_FACTOR, replicationFactor,
|
||||
MAX_SHARDS_PER_NODE, maxShardsPerNode,
|
||||
NUM_SLICES,numShards,
|
||||
DocRouter.ROUTE_FIELD, shard_fld);
|
||||
|
||||
createCollection(collectionInfos, collectionName,props,client);
|
||||
} finally {
|
||||
if (client != null) client.shutdown();
|
||||
}
|
||||
|
||||
List<Integer> list = collectionInfos.get(collectionName);
|
||||
checkForCollection(collectionName, list, null);
|
||||
|
||||
|
||||
String url = getUrlFromZk(collectionName);
|
||||
|
||||
HttpSolrServer collectionClient = new HttpSolrServer(url);
|
||||
|
||||
// poll for a second - it can take a moment before we are ready to serve
|
||||
waitForNon403or404or503(collectionClient);
|
||||
|
||||
|
||||
collectionClient = new HttpSolrServer(url);
|
||||
|
||||
|
||||
// lets try and use the solrj client to index a couple documents
|
||||
|
||||
collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1,
|
||||
"humpty dumpy sat on a wall", shard_fld,"a"));
|
||||
|
||||
collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1,
|
||||
"humpty dumpy3 sat on a walls", shard_fld,"a"));
|
||||
|
||||
collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1,
|
||||
"humpty dumpy2 sat on a walled", shard_fld,"a"));
|
||||
|
||||
collectionClient.commit();
|
||||
|
||||
assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
|
||||
//TODO debug the following case
|
||||
assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound());
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -17,15 +17,6 @@ package org.apache.solr.cloud;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.http.params.CoreConnectionPNames;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrRequest;
|
||||
|
@ -50,6 +41,15 @@ import org.apache.solr.update.DirectUpdateHandler2;
|
|||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
|
||||
public class ShardSplitTest extends BasicDistributedZkTest {
|
||||
|
||||
public static final String SHARD1_0 = SHARD1 + "_0";
|
||||
|
@ -289,7 +289,7 @@ public class ShardSplitTest extends BasicDistributedZkTest {
|
|||
int hash = 0;
|
||||
if (router instanceof HashBasedRouter) {
|
||||
HashBasedRouter hashBasedRouter = (HashBasedRouter) router;
|
||||
hash = hashBasedRouter.sliceHash(id, null, null);
|
||||
hash = hashBasedRouter.sliceHash(id, null, null,null);
|
||||
}
|
||||
for (int i = 0; i < ranges.size(); i++) {
|
||||
DocRouter.Range range = ranges.get(i);
|
||||
|
|
|
@ -1480,6 +1480,7 @@
|
|||
<searchComponent name="terms" class="solr.TermsComponent"/>
|
||||
|
||||
<!-- A request handler for demonstrating the terms component -->
|
||||
<requestHandler name="/js" class="org.apache.solr.handler.js.JavaScriptRequestHandler" startup="lazy"/>
|
||||
<requestHandler name="/terms" class="solr.SearchHandler" startup="lazy">
|
||||
<lst name="defaults">
|
||||
<bool name="terms">true</bool>
|
||||
|
|
|
@ -17,6 +17,7 @@ package org.apache.solr.common.cloud;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.common.params.SolrParams;
|
||||
import org.apache.solr.common.util.Hash;
|
||||
|
@ -59,17 +60,33 @@ public class CompositeIdRouter extends HashBasedRouter {
|
|||
}
|
||||
|
||||
@Override
|
||||
public int sliceHash(String id, SolrInputDocument doc, SolrParams params) {
|
||||
int idx = id.indexOf(separator);
|
||||
if (idx < 0) {
|
||||
return Hash.murmurhash3_x86_32(id, 0, id.length(), 0);
|
||||
public int sliceHash(String id, SolrInputDocument doc, SolrParams params, DocCollection collection) {
|
||||
String shardFieldName = collection ==null? null: collection.getStr(DocRouter.ROUTE_FIELD);
|
||||
String part1 = null;
|
||||
int idx = 0;
|
||||
int commaIdx = 0;
|
||||
|
||||
if(shardFieldName == null || doc == null) {
|
||||
idx = id.indexOf(separator);
|
||||
if (idx < 0) {
|
||||
return Hash.murmurhash3_x86_32(id, 0, id.length(), 0);
|
||||
}
|
||||
part1 = id.substring(0, idx);
|
||||
commaIdx = part1.indexOf(bitsSeparator);
|
||||
|
||||
} else {
|
||||
Object o = doc.getFieldValue(shardFieldName);
|
||||
if (o != null) {
|
||||
part1 = o.toString();
|
||||
return Hash.murmurhash3_x86_32(part1, 0, part1.length(), 0);
|
||||
} else {
|
||||
throw new SolrException (SolrException.ErrorCode.BAD_REQUEST, "No value for :"+shardFieldName + ". Unable to identify shard");
|
||||
}
|
||||
}
|
||||
|
||||
int m1 = mask1;
|
||||
int m2 = mask2;
|
||||
|
||||
String part1 = id.substring(0,idx);
|
||||
int commaIdx = part1.indexOf(bitsSeparator);
|
||||
if (commaIdx > 0) {
|
||||
int firstBits = getBits(part1, commaIdx);
|
||||
if (firstBits >= 0) {
|
||||
|
|
|
@ -30,19 +30,19 @@ public abstract class HashBasedRouter extends DocRouter {
|
|||
@Override
|
||||
public Slice getTargetSlice(String id, SolrInputDocument sdoc, SolrParams params, DocCollection collection) {
|
||||
if (id == null) id = getId(sdoc, params);
|
||||
int hash = sliceHash(id, sdoc, params);
|
||||
int hash = sliceHash(id, sdoc, params,collection);
|
||||
return hashToSlice(hash, collection);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isTargetSlice(String id, SolrInputDocument sdoc, SolrParams params, String shardId, DocCollection collection) {
|
||||
if (id == null) id = getId(sdoc, params);
|
||||
int hash = sliceHash(id, sdoc, params);
|
||||
int hash = sliceHash(id, sdoc, params, collection);
|
||||
Range range = collection.getSlice(shardId).getRange();
|
||||
return range != null && range.includes(hash);
|
||||
}
|
||||
|
||||
public int sliceHash(String id, SolrInputDocument sdoc, SolrParams params) {
|
||||
public int sliceHash(String id, SolrInputDocument sdoc, SolrParams params, DocCollection collection) {
|
||||
return Hash.murmurhash3_x86_32(id, 0, id.length(), 0);
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue