mirror of https://github.com/apache/lucene.git
SOLR-8995: Use lambda to simplify CollectionsHandler
This commit is contained in:
parent
9aa639d45e
commit
fb4de6adb1
|
@ -37,7 +37,6 @@ import org.apache.commons.lang.StringUtils;
|
||||||
import org.apache.solr.client.solrj.SolrResponse;
|
import org.apache.solr.client.solrj.SolrResponse;
|
||||||
import org.apache.solr.client.solrj.impl.HttpSolrClient;
|
import org.apache.solr.client.solrj.impl.HttpSolrClient;
|
||||||
import org.apache.solr.client.solrj.impl.HttpSolrClient.Builder;
|
import org.apache.solr.client.solrj.impl.HttpSolrClient.Builder;
|
||||||
import org.apache.solr.client.solrj.request.CoreAdminRequest;
|
|
||||||
import org.apache.solr.client.solrj.request.CoreAdminRequest.RequestSyncShard;
|
import org.apache.solr.client.solrj.request.CoreAdminRequest.RequestSyncShard;
|
||||||
import org.apache.solr.client.solrj.response.RequestStatusState;
|
import org.apache.solr.client.solrj.response.RequestStatusState;
|
||||||
import org.apache.solr.client.solrj.util.SolrIdentifierValidator;
|
import org.apache.solr.client.solrj.util.SolrIdentifierValidator;
|
||||||
|
@ -204,7 +203,7 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
|
||||||
log.info("Invoked Collection Action :{} with params {} and sendToOCPQueue={}", action.toLower(), req.getParamString(), operation.sendToOCPQueue);
|
log.info("Invoked Collection Action :{} with params {} and sendToOCPQueue={}", action.toLower(), req.getParamString(), operation.sendToOCPQueue);
|
||||||
|
|
||||||
SolrResponse response = null;
|
SolrResponse response = null;
|
||||||
Map<String, Object> props = operation.call(req, rsp, this);
|
Map<String, Object> props = operation.execute(req, rsp, this);
|
||||||
String asyncId = req.getParams().get(ASYNC);
|
String asyncId = req.getParams().get(ASYNC);
|
||||||
if (props != null) {
|
if (props != null) {
|
||||||
if (asyncId != null) {
|
if (asyncId != null) {
|
||||||
|
@ -335,7 +334,34 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
|
||||||
|
|
||||||
public static final String SYSTEM_COLL = ".system";
|
public static final String SYSTEM_COLL = ".system";
|
||||||
|
|
||||||
enum CollectionOperation {
|
private static void createSysConfigSet(CoreContainer coreContainer) throws KeeperException, InterruptedException {
|
||||||
|
SolrZkClient zk = coreContainer.getZkController().getZkStateReader().getZkClient();
|
||||||
|
ZkCmdExecutor cmdExecutor = new ZkCmdExecutor(zk.getZkClientTimeout());
|
||||||
|
cmdExecutor.ensureExists(ZkStateReader.CONFIGS_ZKNODE, zk);
|
||||||
|
cmdExecutor.ensureExists(ZkStateReader.CONFIGS_ZKNODE + "/" + SYSTEM_COLL, zk);
|
||||||
|
|
||||||
|
try {
|
||||||
|
String path = ZkStateReader.CONFIGS_ZKNODE + "/" + SYSTEM_COLL + "/schema.xml";
|
||||||
|
byte[] data = IOUtils.toByteArray(Thread.currentThread().getContextClassLoader().getResourceAsStream("SystemCollectionSchema.xml"));
|
||||||
|
cmdExecutor.ensureExists(path, data, CreateMode.PERSISTENT, zk);
|
||||||
|
path = ZkStateReader.CONFIGS_ZKNODE + "/" + SYSTEM_COLL + "/solrconfig.xml";
|
||||||
|
data = IOUtils.toByteArray(Thread.currentThread().getContextClassLoader().getResourceAsStream("SystemCollectionSolrConfig.xml"));
|
||||||
|
cmdExecutor.ensureExists(path, data, CreateMode.PERSISTENT, zk);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new SolrException(ErrorCode.SERVER_ERROR, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void addStatusToResponse(NamedList<Object> results, RequestStatusState state, String msg) {
|
||||||
|
SimpleOrderedMap<String> status = new SimpleOrderedMap<>();
|
||||||
|
status.add("state", state.getKey());
|
||||||
|
status.add("msg", msg);
|
||||||
|
results.add("status", status);
|
||||||
|
}
|
||||||
|
|
||||||
|
enum CollectionOperation implements CollectionOp {
|
||||||
/**
|
/**
|
||||||
* very simple currently, you can pass a template collection, and the new collection is created on
|
* very simple currently, you can pass a template collection, and the new collection is created on
|
||||||
* every node the template collection is on
|
* every node the template collection is on
|
||||||
|
@ -343,284 +369,194 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
|
||||||
* we might also want to think about error handling (add the request to a zk queue and involve overseer?)
|
* we might also want to think about error handling (add the request to a zk queue and involve overseer?)
|
||||||
* as well as specific replicas= options
|
* as well as specific replicas= options
|
||||||
*/
|
*/
|
||||||
CREATE_OP(CREATE) {
|
CREATE_OP(CREATE, (req, rsp, h) -> {
|
||||||
@Override
|
Map<String, Object> props = req.getParams().required().getAll(null, NAME);
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h)
|
props.put("fromApi", "true");
|
||||||
throws KeeperException, InterruptedException {
|
req.getParams().getAll(props,
|
||||||
Map<String, Object> props = req.getParams().required().getAll(null, NAME);
|
REPLICATION_FACTOR,
|
||||||
props.put("fromApi", "true");
|
COLL_CONF,
|
||||||
req.getParams().getAll(props,
|
NUM_SLICES,
|
||||||
REPLICATION_FACTOR,
|
MAX_SHARDS_PER_NODE,
|
||||||
COLL_CONF,
|
CREATE_NODE_SET, CREATE_NODE_SET_SHUFFLE,
|
||||||
NUM_SLICES,
|
SHARDS_PROP,
|
||||||
MAX_SHARDS_PER_NODE,
|
STATE_FORMAT,
|
||||||
CREATE_NODE_SET, CREATE_NODE_SET_SHUFFLE,
|
AUTO_ADD_REPLICAS,
|
||||||
SHARDS_PROP,
|
RULE,
|
||||||
STATE_FORMAT,
|
SNITCH);
|
||||||
AUTO_ADD_REPLICAS,
|
|
||||||
RULE,
|
|
||||||
SNITCH);
|
|
||||||
|
|
||||||
if (props.get(STATE_FORMAT) == null) {
|
if (props.get(STATE_FORMAT) == null) {
|
||||||
props.put(STATE_FORMAT, "2");
|
props.put(STATE_FORMAT, "2");
|
||||||
}
|
}
|
||||||
addMapObject(props, RULE);
|
addMapObject(props, RULE);
|
||||||
addMapObject(props, SNITCH);
|
addMapObject(props, SNITCH);
|
||||||
verifyRuleParams(h.coreContainer, props);
|
verifyRuleParams(h.coreContainer, props);
|
||||||
final String collectionName = SolrIdentifierValidator.validateCollectionName((String)props.get(NAME));
|
final String collectionName = SolrIdentifierValidator.validateCollectionName((String) props.get(NAME));
|
||||||
final String shardsParam = (String) props.get(SHARDS_PROP);
|
final String shardsParam = (String) props.get(SHARDS_PROP);
|
||||||
if (StringUtils.isNotEmpty(shardsParam)) {
|
if (StringUtils.isNotEmpty(shardsParam)) {
|
||||||
verifyShardsParam(shardsParam);
|
verifyShardsParam(shardsParam);
|
||||||
}
|
}
|
||||||
if (SYSTEM_COLL.equals(collectionName)) {
|
if (SYSTEM_COLL.equals(collectionName)) {
|
||||||
//We must always create a .system collection with only a single shard
|
//We must always create a .system collection with only a single shard
|
||||||
props.put(NUM_SLICES, 1);
|
props.put(NUM_SLICES, 1);
|
||||||
props.remove(SHARDS_PROP);
|
props.remove(SHARDS_PROP);
|
||||||
createSysConfigSet(h.coreContainer);
|
createSysConfigSet(h.coreContainer);
|
||||||
|
|
||||||
}
|
|
||||||
copyPropertiesWithPrefix(req.getParams(), props, COLL_PROP_PREFIX);
|
|
||||||
return copyPropertiesWithPrefix(req.getParams(), props, "router.");
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
copyPropertiesWithPrefix(req.getParams(), props, COLL_PROP_PREFIX);
|
||||||
|
return copyPropertiesWithPrefix(req.getParams(), props, "router.");
|
||||||
|
|
||||||
private void createSysConfigSet(CoreContainer coreContainer) throws KeeperException, InterruptedException {
|
}),
|
||||||
SolrZkClient zk = coreContainer.getZkController().getZkStateReader().getZkClient();
|
DELETE_OP(DELETE, (req, rsp, h) -> req.getParams().required().getAll(null, NAME)),
|
||||||
ZkCmdExecutor cmdExecutor = new ZkCmdExecutor(zk.getZkClientTimeout());
|
|
||||||
cmdExecutor.ensureExists(ZkStateReader.CONFIGS_ZKNODE, zk);
|
|
||||||
cmdExecutor.ensureExists(ZkStateReader.CONFIGS_ZKNODE + "/" + SYSTEM_COLL, zk);
|
|
||||||
|
|
||||||
try {
|
RELOAD_OP(RELOAD, (req, rsp, h) -> req.getParams().required().getAll(null, NAME)),
|
||||||
String path = ZkStateReader.CONFIGS_ZKNODE + "/" + SYSTEM_COLL + "/schema.xml";
|
|
||||||
byte[] data = IOUtils.toByteArray(Thread.currentThread().getContextClassLoader().getResourceAsStream("SystemCollectionSchema.xml"));
|
|
||||||
cmdExecutor.ensureExists(path, data, CreateMode.PERSISTENT, zk);
|
|
||||||
path = ZkStateReader.CONFIGS_ZKNODE + "/" + SYSTEM_COLL + "/solrconfig.xml";
|
|
||||||
data = IOUtils.toByteArray(Thread.currentThread().getContextClassLoader().getResourceAsStream("SystemCollectionSolrConfig.xml"));
|
|
||||||
cmdExecutor.ensureExists(path, data, CreateMode.PERSISTENT, zk);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new SolrException(ErrorCode.SERVER_ERROR, e);
|
|
||||||
|
|
||||||
}
|
SYNCSHARD_OP(SYNCSHARD, (req, rsp, h) -> {
|
||||||
|
String collection = req.getParams().required().get("collection");
|
||||||
|
String shard = req.getParams().required().get("shard");
|
||||||
|
|
||||||
|
ClusterState clusterState = h.coreContainer.getZkController().getClusterState();
|
||||||
|
|
||||||
|
DocCollection docCollection = clusterState.getCollection(collection);
|
||||||
|
ZkNodeProps leaderProps = docCollection.getLeader(shard);
|
||||||
|
ZkCoreNodeProps nodeProps = new ZkCoreNodeProps(leaderProps);
|
||||||
|
|
||||||
|
try (HttpSolrClient client = new Builder(nodeProps.getBaseUrl()).build()) {
|
||||||
|
client.setConnectionTimeout(15000);
|
||||||
|
client.setSoTimeout(60000);
|
||||||
|
RequestSyncShard reqSyncShard = new RequestSyncShard();
|
||||||
|
reqSyncShard.setCollection(collection);
|
||||||
|
reqSyncShard.setShard(shard);
|
||||||
|
reqSyncShard.setCoreName(nodeProps.getCoreName());
|
||||||
|
client.request(reqSyncShard);
|
||||||
}
|
}
|
||||||
},
|
return null;
|
||||||
DELETE_OP(DELETE) {
|
}),
|
||||||
@Override
|
CREATEALIAS_OP(CREATEALIAS, (req, rsp, h) -> {
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler)
|
final String aliasName = SolrIdentifierValidator.validateAliasName(req.getParams().get(NAME));
|
||||||
throws Exception {
|
return req.getParams().required().getAll(null, NAME, "collections");
|
||||||
return req.getParams().required().getAll(null, NAME);
|
}),
|
||||||
|
DELETEALIAS_OP(DELETEALIAS, (req, rsp, h) -> req.getParams().required().getAll(null, NAME)),
|
||||||
|
SPLITSHARD_OP(SPLITSHARD, DEFAULT_COLLECTION_OP_TIMEOUT * 5, true, (req, rsp, h) -> {
|
||||||
|
String name = req.getParams().required().get(COLLECTION_PROP);
|
||||||
|
// TODO : add support for multiple shards
|
||||||
|
String shard = req.getParams().get(SHARD_ID_PROP);
|
||||||
|
String rangesStr = req.getParams().get(CoreAdminParams.RANGES);
|
||||||
|
String splitKey = req.getParams().get("split.key");
|
||||||
|
|
||||||
|
if (splitKey == null && shard == null) {
|
||||||
|
throw new SolrException(ErrorCode.BAD_REQUEST, "At least one of shard, or split.key should be specified.");
|
||||||
}
|
}
|
||||||
},
|
if (splitKey != null && shard != null) {
|
||||||
RELOAD_OP(RELOAD) {
|
throw new SolrException(ErrorCode.BAD_REQUEST,
|
||||||
@Override
|
"Only one of 'shard' or 'split.key' should be specified");
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler)
|
|
||||||
throws Exception {
|
|
||||||
return req.getParams().required().getAll(null, NAME);
|
|
||||||
}
|
}
|
||||||
},
|
if (splitKey != null && rangesStr != null) {
|
||||||
SYNCSHARD_OP(SYNCSHARD) {
|
throw new SolrException(ErrorCode.BAD_REQUEST,
|
||||||
@Override
|
"Only one of 'ranges' or 'split.key' should be specified");
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h)
|
|
||||||
throws Exception {
|
|
||||||
String collection = req.getParams().required().get("collection");
|
|
||||||
String shard = req.getParams().required().get("shard");
|
|
||||||
|
|
||||||
ClusterState clusterState = h.coreContainer.getZkController().getClusterState();
|
|
||||||
|
|
||||||
DocCollection docCollection = clusterState.getCollection(collection);
|
|
||||||
ZkNodeProps leaderProps = docCollection.getLeader(shard);
|
|
||||||
ZkCoreNodeProps nodeProps = new ZkCoreNodeProps(leaderProps);
|
|
||||||
|
|
||||||
try (HttpSolrClient client = new Builder(nodeProps.getBaseUrl()).build()) {
|
|
||||||
client.setConnectionTimeout(15000);
|
|
||||||
client.setSoTimeout(60000);
|
|
||||||
RequestSyncShard reqSyncShard = new CoreAdminRequest.RequestSyncShard();
|
|
||||||
reqSyncShard.setCollection(collection);
|
|
||||||
reqSyncShard.setShard(shard);
|
|
||||||
reqSyncShard.setCoreName(nodeProps.getCoreName());
|
|
||||||
client.request(reqSyncShard);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
},
|
Map<String, Object> map = req.getParams().getAll(null,
|
||||||
CREATEALIAS_OP(CREATEALIAS) {
|
COLLECTION_PROP,
|
||||||
@Override
|
SHARD_ID_PROP,
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler)
|
"split.key",
|
||||||
throws Exception {
|
CoreAdminParams.RANGES);
|
||||||
final String aliasName = SolrIdentifierValidator.validateAliasName(req.getParams().get(NAME));
|
return copyPropertiesWithPrefix(req.getParams(), map, COLL_PROP_PREFIX);
|
||||||
return req.getParams().required().getAll(null, NAME, "collections");
|
}),
|
||||||
}
|
DELETESHARD_OP(DELETESHARD, (req, rsp, h) -> {
|
||||||
},
|
Map<String, Object> map = req.getParams().required().getAll(null,
|
||||||
DELETEALIAS_OP(DELETEALIAS) {
|
COLLECTION_PROP,
|
||||||
@Override
|
SHARD_ID_PROP);
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler)
|
req.getParams().getAll(map,
|
||||||
throws Exception {
|
DELETE_INDEX,
|
||||||
return req.getParams().required().getAll(null, NAME);
|
DELETE_DATA_DIR,
|
||||||
|
DELETE_INSTANCE_DIR);
|
||||||
|
return map;
|
||||||
|
}),
|
||||||
|
FORCELEADER_OP(FORCELEADER, (req, rsp, h) -> {
|
||||||
|
forceLeaderElection(req, h);
|
||||||
|
return null;
|
||||||
|
}),
|
||||||
|
CREATESHARD_OP(CREATESHARD, (req, rsp, h) -> {
|
||||||
|
Map<String, Object> map = req.getParams().required().getAll(null,
|
||||||
|
COLLECTION_PROP,
|
||||||
|
SHARD_ID_PROP);
|
||||||
|
ClusterState clusterState = h.coreContainer.getZkController().getClusterState();
|
||||||
|
final String newShardName = SolrIdentifierValidator.validateShardName(req.getParams().get(SHARD_ID_PROP));
|
||||||
|
if (!ImplicitDocRouter.NAME.equals(((Map) clusterState.getCollection(req.getParams().get(COLLECTION_PROP)).get(DOC_ROUTER)).get(NAME)))
|
||||||
|
throw new SolrException(ErrorCode.BAD_REQUEST, "shards can be added only to 'implicit' collections");
|
||||||
|
req.getParams().getAll(map,
|
||||||
|
REPLICATION_FACTOR,
|
||||||
|
CREATE_NODE_SET);
|
||||||
|
return copyPropertiesWithPrefix(req.getParams(), map, COLL_PROP_PREFIX);
|
||||||
|
}),
|
||||||
|
DELETEREPLICA_OP(DELETEREPLICA, (req, rsp, h) -> {
|
||||||
|
Map<String, Object> map = req.getParams().required().getAll(null,
|
||||||
|
COLLECTION_PROP,
|
||||||
|
SHARD_ID_PROP,
|
||||||
|
REPLICA_PROP);
|
||||||
|
|
||||||
|
req.getParams().getAll(map,
|
||||||
|
DELETE_INDEX,
|
||||||
|
DELETE_DATA_DIR,
|
||||||
|
DELETE_INSTANCE_DIR);
|
||||||
|
|
||||||
|
return req.getParams().getAll(map, ONLY_IF_DOWN);
|
||||||
|
}),
|
||||||
|
MIGRATE_OP(MIGRATE, (req, rsp, h) -> {
|
||||||
|
Map<String, Object> map = req.getParams().required().getAll(null, COLLECTION_PROP, "split.key", "target.collection");
|
||||||
|
return req.getParams().getAll(map, "forward.timeout");
|
||||||
|
}),
|
||||||
|
ADDROLE_OP(ADDROLE, (req, rsp, h) -> {
|
||||||
|
Map<String, Object> map = req.getParams().required().getAll(null, "role", "node");
|
||||||
|
if (!KNOWN_ROLES.contains(map.get("role")))
|
||||||
|
throw new SolrException(ErrorCode.BAD_REQUEST, "Unknown role. Supported roles are ," + KNOWN_ROLES);
|
||||||
|
return map;
|
||||||
|
}),
|
||||||
|
REMOVEROLE_OP(REMOVEROLE, (req, rsp, h) -> {
|
||||||
|
Map<String, Object> map = req.getParams().required().getAll(null, "role", "node");
|
||||||
|
if (!KNOWN_ROLES.contains(map.get("role")))
|
||||||
|
throw new SolrException(ErrorCode.BAD_REQUEST, "Unknown role. Supported roles are ," + KNOWN_ROLES);
|
||||||
|
return map;
|
||||||
|
}),
|
||||||
|
CLUSTERPROP_OP(CLUSTERPROP, (req, rsp, h) -> {
|
||||||
|
String name = req.getParams().required().get(NAME);
|
||||||
|
String val = req.getParams().get(VALUE_LONG);
|
||||||
|
ClusterProperties cp = new ClusterProperties(h.coreContainer.getZkController().getZkClient());
|
||||||
|
cp.setClusterProperty(name, val);
|
||||||
|
return null;
|
||||||
|
}),
|
||||||
|
REQUESTSTATUS_OP(REQUESTSTATUS, (req, rsp, h) -> {
|
||||||
|
req.getParams().required().check(REQUESTID);
|
||||||
|
|
||||||
|
final CoreContainer coreContainer1 = h.coreContainer;
|
||||||
|
final String requestId = req.getParams().get(REQUESTID);
|
||||||
|
final ZkController zkController = coreContainer1.getZkController();
|
||||||
|
|
||||||
|
final NamedList<Object> results = new NamedList<>();
|
||||||
|
if (zkController.getOverseerCompletedMap().contains(requestId)) {
|
||||||
|
final byte[] mapEntry = zkController.getOverseerCompletedMap().get(requestId);
|
||||||
|
rsp.getValues().addAll(SolrResponse.deserialize(mapEntry).getResponse());
|
||||||
|
addStatusToResponse(results, COMPLETED, "found [" + requestId + "] in completed tasks");
|
||||||
|
} else if (zkController.getOverseerFailureMap().contains(requestId)) {
|
||||||
|
final byte[] mapEntry = zkController.getOverseerFailureMap().get(requestId);
|
||||||
|
rsp.getValues().addAll(SolrResponse.deserialize(mapEntry).getResponse());
|
||||||
|
addStatusToResponse(results, FAILED, "found [" + requestId + "] in failed tasks");
|
||||||
|
} else if (zkController.getOverseerRunningMap().contains(requestId)) {
|
||||||
|
addStatusToResponse(results, RUNNING, "found [" + requestId + "] in running tasks");
|
||||||
|
} else if (h.overseerCollectionQueueContains(requestId)) {
|
||||||
|
addStatusToResponse(results, SUBMITTED, "found [" + requestId + "] in submitted tasks");
|
||||||
|
} else {
|
||||||
|
addStatusToResponse(results, NOT_FOUND, "Did not find [" + requestId + "] in any tasks queue");
|
||||||
}
|
}
|
||||||
|
|
||||||
},
|
final SolrResponse response = new OverseerSolrResponse(results);
|
||||||
SPLITSHARD_OP(SPLITSHARD, DEFAULT_COLLECTION_OP_TIMEOUT * 5, true) {
|
rsp.getValues().addAll(response.getResponse());
|
||||||
@Override
|
return null;
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h)
|
}),
|
||||||
throws Exception {
|
DELETESTATUS_OP(DELETESTATUS, new CollectionOp() {
|
||||||
String name = req.getParams().required().get(COLLECTION_PROP);
|
|
||||||
// TODO : add support for multiple shards
|
|
||||||
String shard = req.getParams().get(SHARD_ID_PROP);
|
|
||||||
String rangesStr = req.getParams().get(CoreAdminParams.RANGES);
|
|
||||||
String splitKey = req.getParams().get("split.key");
|
|
||||||
|
|
||||||
if (splitKey == null && shard == null) {
|
|
||||||
throw new SolrException(ErrorCode.BAD_REQUEST, "At least one of shard, or split.key should be specified.");
|
|
||||||
}
|
|
||||||
if (splitKey != null && shard != null) {
|
|
||||||
throw new SolrException(ErrorCode.BAD_REQUEST,
|
|
||||||
"Only one of 'shard' or 'split.key' should be specified");
|
|
||||||
}
|
|
||||||
if (splitKey != null && rangesStr != null) {
|
|
||||||
throw new SolrException(ErrorCode.BAD_REQUEST,
|
|
||||||
"Only one of 'ranges' or 'split.key' should be specified");
|
|
||||||
}
|
|
||||||
|
|
||||||
Map<String, Object> map = req.getParams().getAll(null,
|
|
||||||
COLLECTION_PROP,
|
|
||||||
SHARD_ID_PROP,
|
|
||||||
"split.key",
|
|
||||||
CoreAdminParams.RANGES);
|
|
||||||
return copyPropertiesWithPrefix(req.getParams(), map, COLL_PROP_PREFIX);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
DELETESHARD_OP(DELETESHARD) {
|
|
||||||
@Override
|
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler) throws Exception {
|
|
||||||
Map<String, Object> map = req.getParams().required().getAll(null,
|
|
||||||
COLLECTION_PROP,
|
|
||||||
SHARD_ID_PROP);
|
|
||||||
req.getParams().getAll(map,
|
|
||||||
DELETE_INDEX,
|
|
||||||
DELETE_DATA_DIR,
|
|
||||||
DELETE_INSTANCE_DIR);
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
FORCELEADER_OP(FORCELEADER) {
|
|
||||||
@Override
|
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler) throws Exception {
|
|
||||||
forceLeaderElection(req, handler);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
CREATESHARD_OP(CREATESHARD) {
|
|
||||||
@Override
|
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler) throws Exception {
|
|
||||||
Map<String, Object> map = req.getParams().required().getAll(null,
|
|
||||||
COLLECTION_PROP,
|
|
||||||
SHARD_ID_PROP);
|
|
||||||
ClusterState clusterState = handler.coreContainer.getZkController().getClusterState();
|
|
||||||
final String newShardName = SolrIdentifierValidator.validateShardName(req.getParams().get(SHARD_ID_PROP));
|
|
||||||
if (!ImplicitDocRouter.NAME.equals(((Map) clusterState.getCollection(req.getParams().get(COLLECTION_PROP)).get(DOC_ROUTER)).get(NAME)))
|
|
||||||
throw new SolrException(ErrorCode.BAD_REQUEST, "shards can be added only to 'implicit' collections");
|
|
||||||
req.getParams().getAll(map,
|
|
||||||
REPLICATION_FACTOR,
|
|
||||||
CREATE_NODE_SET);
|
|
||||||
return copyPropertiesWithPrefix(req.getParams(), map, COLL_PROP_PREFIX);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
DELETEREPLICA_OP(DELETEREPLICA) {
|
|
||||||
@Override
|
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler) throws Exception {
|
|
||||||
Map<String, Object> map = req.getParams().required().getAll(null,
|
|
||||||
COLLECTION_PROP,
|
|
||||||
SHARD_ID_PROP,
|
|
||||||
REPLICA_PROP);
|
|
||||||
|
|
||||||
req.getParams().getAll(map,
|
|
||||||
DELETE_INDEX,
|
|
||||||
DELETE_DATA_DIR,
|
|
||||||
DELETE_INSTANCE_DIR);
|
|
||||||
|
|
||||||
return req.getParams().getAll(map, ONLY_IF_DOWN);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
MIGRATE_OP(MIGRATE) {
|
|
||||||
@Override
|
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
|
|
||||||
Map<String, Object> map = req.getParams().required().getAll(null, COLLECTION_PROP, "split.key", "target.collection");
|
|
||||||
return req.getParams().getAll(map, "forward.timeout");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
ADDROLE_OP(ADDROLE) {
|
|
||||||
@Override
|
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler) throws Exception {
|
|
||||||
Map<String, Object> map = req.getParams().required().getAll(null, "role", "node");
|
|
||||||
if (!KNOWN_ROLES.contains(map.get("role")))
|
|
||||||
throw new SolrException(ErrorCode.BAD_REQUEST, "Unknown role. Supported roles are ," + KNOWN_ROLES);
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
REMOVEROLE_OP(REMOVEROLE) {
|
|
||||||
@Override
|
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
|
|
||||||
Map<String, Object> map = req.getParams().required().getAll(null, "role", "node");
|
|
||||||
if (!KNOWN_ROLES.contains(map.get("role")))
|
|
||||||
throw new SolrException(ErrorCode.BAD_REQUEST, "Unknown role. Supported roles are ," + KNOWN_ROLES);
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
CLUSTERPROP_OP(CLUSTERPROP) {
|
|
||||||
@Override
|
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
|
|
||||||
String name = req.getParams().required().get(NAME);
|
|
||||||
String val = req.getParams().get(VALUE_LONG);
|
|
||||||
ClusterProperties cp = new ClusterProperties(h.coreContainer.getZkController().getZkClient());
|
|
||||||
cp.setClusterProperty(name, val);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
REQUESTSTATUS_OP(REQUESTSTATUS) {
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
@Override
|
@Override
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
|
public Map<String, Object> execute(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
|
||||||
req.getParams().required().check(REQUESTID);
|
|
||||||
|
|
||||||
final CoreContainer coreContainer = h.coreContainer;
|
|
||||||
final String requestId = req.getParams().get(REQUESTID);
|
|
||||||
final ZkController zkController = coreContainer.getZkController();
|
|
||||||
|
|
||||||
final NamedList<Object> results = new NamedList<>();
|
|
||||||
if (zkController.getOverseerCompletedMap().contains(requestId)) {
|
|
||||||
final byte[] mapEntry = zkController.getOverseerCompletedMap().get(requestId);
|
|
||||||
rsp.getValues().addAll(SolrResponse.deserialize(mapEntry).getResponse());
|
|
||||||
addStatusToResponse(results, COMPLETED, "found [" + requestId + "] in completed tasks");
|
|
||||||
} else if (zkController.getOverseerFailureMap().contains(requestId)) {
|
|
||||||
final byte[] mapEntry = zkController.getOverseerFailureMap().get(requestId);
|
|
||||||
rsp.getValues().addAll(SolrResponse.deserialize(mapEntry).getResponse());
|
|
||||||
addStatusToResponse(results, FAILED, "found [" + requestId + "] in failed tasks");
|
|
||||||
} else if (zkController.getOverseerRunningMap().contains(requestId)) {
|
|
||||||
addStatusToResponse(results, RUNNING, "found [" + requestId + "] in running tasks");
|
|
||||||
} else if (h.overseerCollectionQueueContains(requestId)) {
|
|
||||||
addStatusToResponse(results, SUBMITTED, "found [" + requestId + "] in submitted tasks");
|
|
||||||
} else {
|
|
||||||
addStatusToResponse(results, NOT_FOUND, "Did not find [" + requestId + "] in any tasks queue");
|
|
||||||
}
|
|
||||||
|
|
||||||
final SolrResponse response = new OverseerSolrResponse(results);
|
|
||||||
rsp.getValues().addAll(response.getResponse());
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void addStatusToResponse(NamedList<Object> results, RequestStatusState state, String msg) {
|
|
||||||
SimpleOrderedMap<String> status = new SimpleOrderedMap<>();
|
|
||||||
status.add("state", state.getKey());
|
|
||||||
status.add("msg", msg);
|
|
||||||
results.add("status", status);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
DELETESTATUS_OP(DELETESTATUS) {
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
@Override
|
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
|
|
||||||
final CoreContainer coreContainer = h.coreContainer;
|
final CoreContainer coreContainer = h.coreContainer;
|
||||||
final String requestId = req.getParams().get(REQUESTID);
|
final String requestId = req.getParams().get(REQUESTID);
|
||||||
final ZkController zkController = coreContainer.getZkController();
|
final ZkController zkController = coreContainer.getZkController();
|
||||||
|
@ -652,263 +588,217 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
},
|
}),
|
||||||
ADDREPLICA_OP(ADDREPLICA) {
|
ADDREPLICA_OP(ADDREPLICA, (req, rsp, h) -> {
|
||||||
@Override
|
Map<String, Object> props = req.getParams().getAll(null,
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h)
|
COLLECTION_PROP,
|
||||||
throws Exception {
|
"node",
|
||||||
Map<String, Object> props = req.getParams().getAll(null,
|
SHARD_ID_PROP,
|
||||||
COLLECTION_PROP,
|
_ROUTE_,
|
||||||
"node",
|
CoreAdminParams.NAME,
|
||||||
SHARD_ID_PROP,
|
INSTANCE_DIR,
|
||||||
_ROUTE_,
|
DATA_DIR);
|
||||||
CoreAdminParams.NAME,
|
return copyPropertiesWithPrefix(req.getParams(), props, COLL_PROP_PREFIX);
|
||||||
INSTANCE_DIR,
|
}),
|
||||||
DATA_DIR);
|
OVERSEERSTATUS_OP(OVERSEERSTATUS, (req, rsp, h) -> (Map) new LinkedHashMap<>()),
|
||||||
return copyPropertiesWithPrefix(req.getParams(), props, COLL_PROP_PREFIX);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
OVERSEERSTATUS_OP(OVERSEERSTATUS) {
|
|
||||||
@Override
|
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
|
|
||||||
return new LinkedHashMap<>();
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handle list collection request.
|
* Handle list collection request.
|
||||||
* Do list collection request to zk host
|
* Do list collection request to zk host
|
||||||
*/
|
*/
|
||||||
LIST_OP(LIST) {
|
LIST_OP(LIST, (req, rsp, h) -> {
|
||||||
@Override
|
NamedList<Object> results = new NamedList<>();
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler) throws Exception {
|
Map<String, DocCollection> collections = h.coreContainer.getZkController().getZkStateReader().getClusterState().getCollectionsMap();
|
||||||
NamedList<Object> results = new NamedList<>();
|
List<String> collectionList = new ArrayList<>(collections.keySet());
|
||||||
Map<String, DocCollection> collections = handler.coreContainer.getZkController().getZkStateReader().getClusterState().getCollectionsMap();
|
results.add("collections", collectionList);
|
||||||
List<String> collectionList = new ArrayList<>(collections.keySet());
|
SolrResponse response = new OverseerSolrResponse(results);
|
||||||
results.add("collections", collectionList);
|
rsp.getValues().addAll(response.getResponse());
|
||||||
SolrResponse response = new OverseerSolrResponse(results);
|
return null;
|
||||||
rsp.getValues().addAll(response.getResponse());
|
}),
|
||||||
return null;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
/**
|
/**
|
||||||
* Handle cluster status request.
|
* Handle cluster status request.
|
||||||
* Can return status per specific collection/shard or per all collections.
|
* Can return status per specific collection/shard or per all collections.
|
||||||
*/
|
*/
|
||||||
CLUSTERSTATUS_OP(CLUSTERSTATUS) {
|
CLUSTERSTATUS_OP(CLUSTERSTATUS, (req, rsp, h) -> {
|
||||||
@Override
|
Map<String, Object> all = req.getParams().getAll(null,
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler)
|
COLLECTION_PROP,
|
||||||
throws KeeperException, InterruptedException {
|
SHARD_ID_PROP,
|
||||||
Map<String, Object> all = req.getParams().getAll(null,
|
_ROUTE_);
|
||||||
COLLECTION_PROP,
|
new ClusterStatus(h.coreContainer.getZkController().getZkStateReader(),
|
||||||
SHARD_ID_PROP,
|
new ZkNodeProps(all)).getClusterStatus(rsp.getValues());
|
||||||
_ROUTE_);
|
return null;
|
||||||
new ClusterStatus(handler.coreContainer.getZkController().getZkStateReader(),
|
}),
|
||||||
new ZkNodeProps(all)).getClusterStatus(rsp.getValues());
|
ADDREPLICAPROP_OP(ADDREPLICAPROP, (req, rsp, h) -> {
|
||||||
return null;
|
Map<String, Object> map = req.getParams().required().getAll(null,
|
||||||
|
COLLECTION_PROP,
|
||||||
|
PROPERTY_PROP,
|
||||||
|
SHARD_ID_PROP,
|
||||||
|
REPLICA_PROP,
|
||||||
|
PROPERTY_VALUE_PROP);
|
||||||
|
req.getParams().getAll(map, SHARD_UNIQUE);
|
||||||
|
String property = (String) map.get(PROPERTY_PROP);
|
||||||
|
if (!property.startsWith(COLL_PROP_PREFIX)) {
|
||||||
|
property = COLL_PROP_PREFIX + property;
|
||||||
}
|
}
|
||||||
},
|
|
||||||
ADDREPLICAPROP_OP(ADDREPLICAPROP) {
|
|
||||||
@Override
|
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
|
|
||||||
Map<String, Object> map = req.getParams().required().getAll(null,
|
|
||||||
COLLECTION_PROP,
|
|
||||||
PROPERTY_PROP,
|
|
||||||
SHARD_ID_PROP,
|
|
||||||
REPLICA_PROP,
|
|
||||||
PROPERTY_VALUE_PROP);
|
|
||||||
req.getParams().getAll(map, SHARD_UNIQUE);
|
|
||||||
String property = (String) map.get(PROPERTY_PROP);
|
|
||||||
if (!property.startsWith(COLL_PROP_PREFIX)) {
|
|
||||||
property = COLL_PROP_PREFIX + property;
|
|
||||||
}
|
|
||||||
|
|
||||||
boolean uniquePerSlice = Boolean.parseBoolean((String) map.get(SHARD_UNIQUE));
|
boolean uniquePerSlice = Boolean.parseBoolean((String) map.get(SHARD_UNIQUE));
|
||||||
|
|
||||||
// Check if we're trying to set a property with parameters that allow us to set the property on multiple replicas
|
// Check if we're trying to set a property with parameters that allow us to set the property on multiple replicas
|
||||||
// in a slice on properties that are known to only be one-per-slice and error out if so.
|
// in a slice on properties that are known to only be one-per-slice and error out if so.
|
||||||
if (StringUtils.isNotBlank((String) map.get(SHARD_UNIQUE)) &&
|
if (StringUtils.isNotBlank((String) map.get(SHARD_UNIQUE)) &&
|
||||||
SliceMutator.SLICE_UNIQUE_BOOLEAN_PROPERTIES.contains(property.toLowerCase(Locale.ROOT)) &&
|
SliceMutator.SLICE_UNIQUE_BOOLEAN_PROPERTIES.contains(property.toLowerCase(Locale.ROOT)) &&
|
||||||
uniquePerSlice == false) {
|
uniquePerSlice == false) {
|
||||||
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
|
throw new SolrException(ErrorCode.BAD_REQUEST,
|
||||||
"Overseer replica property command received for property " + property +
|
"Overseer replica property command received for property " + property +
|
||||||
" with the " + SHARD_UNIQUE +
|
" with the " + SHARD_UNIQUE +
|
||||||
" parameter set to something other than 'true'. No action taken.");
|
" parameter set to something other than 'true'. No action taken.");
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
}
|
||||||
},
|
return map;
|
||||||
DELETEREPLICAPROP_OP(DELETEREPLICAPROP) {
|
}),
|
||||||
@Override
|
DELETEREPLICAPROP_OP(DELETEREPLICAPROP, (req, rsp, h) -> {
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
|
Map<String, Object> map = req.getParams().required().getAll(null,
|
||||||
Map<String, Object> map = req.getParams().required().getAll(null,
|
COLLECTION_PROP,
|
||||||
COLLECTION_PROP,
|
PROPERTY_PROP,
|
||||||
PROPERTY_PROP,
|
SHARD_ID_PROP,
|
||||||
SHARD_ID_PROP,
|
REPLICA_PROP);
|
||||||
REPLICA_PROP);
|
return req.getParams().getAll(map, PROPERTY_PROP);
|
||||||
return req.getParams().getAll(map, PROPERTY_PROP);
|
}),
|
||||||
|
BALANCESHARDUNIQUE_OP(BALANCESHARDUNIQUE, (req, rsp, h) -> {
|
||||||
|
Map<String, Object> map = req.getParams().required().getAll(null,
|
||||||
|
COLLECTION_PROP,
|
||||||
|
PROPERTY_PROP);
|
||||||
|
Boolean shardUnique = Boolean.parseBoolean(req.getParams().get(SHARD_UNIQUE));
|
||||||
|
String prop = req.getParams().get(PROPERTY_PROP).toLowerCase(Locale.ROOT);
|
||||||
|
if (!StringUtils.startsWith(prop, COLL_PROP_PREFIX)) {
|
||||||
|
prop = COLL_PROP_PREFIX + prop;
|
||||||
}
|
}
|
||||||
},
|
|
||||||
BALANCESHARDUNIQUE_OP(BALANCESHARDUNIQUE) {
|
|
||||||
@Override
|
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
|
|
||||||
Map<String, Object> map = req.getParams().required().getAll(null,
|
|
||||||
COLLECTION_PROP,
|
|
||||||
PROPERTY_PROP);
|
|
||||||
Boolean shardUnique = Boolean.parseBoolean(req.getParams().get(SHARD_UNIQUE));
|
|
||||||
String prop = req.getParams().get(PROPERTY_PROP).toLowerCase(Locale.ROOT);
|
|
||||||
if (!StringUtils.startsWith(prop, COLL_PROP_PREFIX)) {
|
|
||||||
prop = COLL_PROP_PREFIX + prop;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!shardUnique && !SliceMutator.SLICE_UNIQUE_BOOLEAN_PROPERTIES.contains(prop)) {
|
if (!shardUnique && !SliceMutator.SLICE_UNIQUE_BOOLEAN_PROPERTIES.contains(prop)) {
|
||||||
throw new SolrException(ErrorCode.BAD_REQUEST, "Balancing properties amongst replicas in a slice requires that"
|
throw new SolrException(ErrorCode.BAD_REQUEST, "Balancing properties amongst replicas in a slice requires that"
|
||||||
+ " the property be pre-defined as a unique property (e.g. 'preferredLeader') or that 'shardUnique' be set to 'true'. " +
|
+ " the property be pre-defined as a unique property (e.g. 'preferredLeader') or that 'shardUnique' be set to 'true'. " +
|
||||||
" Property: " + prop + " shardUnique: " + Boolean.toString(shardUnique));
|
" Property: " + prop + " shardUnique: " + Boolean.toString(shardUnique));
|
||||||
}
|
|
||||||
|
|
||||||
return req.getParams().getAll(map, ONLY_ACTIVE_NODES, SHARD_UNIQUE);
|
|
||||||
}
|
}
|
||||||
},
|
|
||||||
REBALANCELEADERS_OP(REBALANCELEADERS) {
|
return req.getParams().getAll(map, ONLY_ACTIVE_NODES, SHARD_UNIQUE);
|
||||||
@Override
|
}),
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
|
REBALANCELEADERS_OP(REBALANCELEADERS, (req, rsp, h) -> {
|
||||||
new RebalanceLeaders(req,rsp,h).execute();
|
new RebalanceLeaders(req, rsp, h).execute();
|
||||||
return null;
|
return null;
|
||||||
|
}),
|
||||||
|
MODIFYCOLLECTION_OP(MODIFYCOLLECTION, (req, rsp, h) -> {
|
||||||
|
Map<String, Object> m = req.getParams().getAll(null, MODIFIABLE_COLL_PROPS);
|
||||||
|
if (m.isEmpty()) throw new SolrException(ErrorCode.BAD_REQUEST,
|
||||||
|
formatString("no supported values provided rule, snitch, maxShardsPerNode, replicationFactor, collection.configName"));
|
||||||
|
req.getParams().required().getAll(m, COLLECTION_PROP);
|
||||||
|
addMapObject(m, RULE);
|
||||||
|
addMapObject(m, SNITCH);
|
||||||
|
for (String prop : MODIFIABLE_COLL_PROPS) DocCollection.verifyProp(m, prop);
|
||||||
|
verifyRuleParams(h.coreContainer, m);
|
||||||
|
return m;
|
||||||
|
}),
|
||||||
|
MIGRATESTATEFORMAT_OP(MIGRATESTATEFORMAT, (req, rsp, h) -> req.getParams().required().getAll(null, COLLECTION_PROP)),
|
||||||
|
|
||||||
|
BACKUP_OP(BACKUP, (req, rsp, h) -> {
|
||||||
|
req.getParams().required().check(NAME, COLLECTION_PROP);
|
||||||
|
|
||||||
|
String collectionName = req.getParams().get(COLLECTION_PROP);
|
||||||
|
ClusterState clusterState = h.coreContainer.getZkController().getClusterState();
|
||||||
|
if (!clusterState.hasCollection(collectionName)) {
|
||||||
|
throw new SolrException(ErrorCode.BAD_REQUEST, "Collection '" + collectionName + "' does not exist, no action taken.");
|
||||||
}
|
}
|
||||||
},
|
|
||||||
MODIFYCOLLECTION_OP(MODIFYCOLLECTION) {
|
|
||||||
@Override
|
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
|
|
||||||
|
|
||||||
Map<String, Object> m = req.getParams().getAll(null, MODIFIABLE_COLL_PROPS);
|
CoreContainer cc = h.coreContainer;
|
||||||
if (m.isEmpty()) throw new SolrException(ErrorCode.BAD_REQUEST,
|
String repo = req.getParams().get(CoreAdminParams.BACKUP_REPOSITORY);
|
||||||
formatString("no supported values provided rule, snitch, maxShardsPerNode, replicationFactor, collection.configName"));
|
BackupRepository repository = cc.newBackupRepository(Optional.ofNullable(repo));
|
||||||
req.getParams().required().getAll(m, COLLECTION_PROP);
|
|
||||||
addMapObject(m, RULE);
|
|
||||||
addMapObject(m, SNITCH);
|
|
||||||
for (String prop : MODIFIABLE_COLL_PROPS) DocCollection.verifyProp(m, prop);
|
|
||||||
verifyRuleParams(h.coreContainer, m);
|
|
||||||
return m;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
MIGRATESTATEFORMAT_OP(MIGRATESTATEFORMAT) {
|
|
||||||
@Override
|
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler handler)
|
|
||||||
throws Exception {
|
|
||||||
return req.getParams().required().getAll(null, COLLECTION_PROP);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
BACKUP_OP(BACKUP) {
|
|
||||||
@Override
|
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
|
|
||||||
req.getParams().required().check(NAME, COLLECTION_PROP);
|
|
||||||
|
|
||||||
String collectionName = req.getParams().get(COLLECTION_PROP);
|
String location = repository.getBackupLocation(req.getParams().get(CoreAdminParams.BACKUP_LOCATION));
|
||||||
ClusterState clusterState = h.coreContainer.getZkController().getClusterState();
|
if (location == null) {
|
||||||
if (!clusterState.hasCollection(collectionName)) {
|
//Refresh the cluster property file to make sure the value set for location is the latest
|
||||||
throw new SolrException(ErrorCode.BAD_REQUEST, "Collection '" + collectionName + "' does not exist, no action taken.");
|
h.coreContainer.getZkController().getZkStateReader().forceUpdateClusterProperties();
|
||||||
}
|
|
||||||
|
|
||||||
CoreContainer cc = h.coreContainer;
|
// Check if the location is specified in the cluster property.
|
||||||
String repo = req.getParams().get(CoreAdminParams.BACKUP_REPOSITORY);
|
location = h.coreContainer.getZkController().getZkStateReader().getClusterProperty(CoreAdminParams.BACKUP_LOCATION, null);
|
||||||
BackupRepository repository = cc.newBackupRepository(Optional.ofNullable(repo));
|
|
||||||
|
|
||||||
String location = repository.getBackupLocation(req.getParams().get(CoreAdminParams.BACKUP_LOCATION));
|
|
||||||
if (location == null) {
|
if (location == null) {
|
||||||
//Refresh the cluster property file to make sure the value set for location is the latest
|
throw new SolrException(ErrorCode.BAD_REQUEST, "'location' is not specified as a query"
|
||||||
h.coreContainer.getZkController().getZkStateReader().forceUpdateClusterProperties();
|
+ " parameter or as a default repository property or as a cluster property.");
|
||||||
|
|
||||||
// Check if the location is specified in the cluster property.
|
|
||||||
location = h.coreContainer.getZkController().getZkStateReader().getClusterProperty(CoreAdminParams.BACKUP_LOCATION, null);
|
|
||||||
if (location == null) {
|
|
||||||
throw new SolrException(ErrorCode.BAD_REQUEST, "'location' is not specified as a query"
|
|
||||||
+ " parameter or as a default repository property or as a cluster property.");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if the specified location is valid for this repository.
|
|
||||||
URI uri = repository.createURI(location);
|
|
||||||
try {
|
|
||||||
if (!repository.exists(uri)) {
|
|
||||||
throw new SolrException(ErrorCode.SERVER_ERROR, "specified location " + uri + " does not exist.");
|
|
||||||
}
|
|
||||||
} catch (IOException ex) {
|
|
||||||
throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to check the existance of " + uri + ". Is it valid?", ex);
|
|
||||||
}
|
|
||||||
|
|
||||||
Map<String, Object> params = req.getParams().getAll(null, NAME, COLLECTION_PROP);
|
|
||||||
params.put(CoreAdminParams.BACKUP_LOCATION, location);
|
|
||||||
return params;
|
|
||||||
}
|
}
|
||||||
},
|
|
||||||
RESTORE_OP(RESTORE) {
|
|
||||||
@Override
|
|
||||||
Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception {
|
|
||||||
req.getParams().required().check(NAME, COLLECTION_PROP);
|
|
||||||
|
|
||||||
String collectionName = SolrIdentifierValidator.validateCollectionName(req.getParams().get(COLLECTION_PROP));
|
// Check if the specified location is valid for this repository.
|
||||||
ClusterState clusterState = h.coreContainer.getZkController().getClusterState();
|
URI uri = repository.createURI(location);
|
||||||
//We always want to restore into an collection name which doesn't exist yet.
|
try {
|
||||||
if (clusterState.hasCollection(collectionName)) {
|
if (!repository.exists(uri)) {
|
||||||
throw new SolrException(ErrorCode.BAD_REQUEST, "Collection '" + collectionName + "' exists, no action taken.");
|
throw new SolrException(ErrorCode.SERVER_ERROR, "specified location " + uri + " does not exist.");
|
||||||
}
|
}
|
||||||
|
} catch (IOException ex) {
|
||||||
|
throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to check the existance of " + uri + ". Is it valid?", ex);
|
||||||
|
}
|
||||||
|
|
||||||
CoreContainer cc = h.coreContainer;
|
Map<String, Object> params = req.getParams().getAll(null, NAME, COLLECTION_PROP);
|
||||||
String repo = req.getParams().get(CoreAdminParams.BACKUP_REPOSITORY);
|
params.put(CoreAdminParams.BACKUP_LOCATION, location);
|
||||||
BackupRepository repository = cc.newBackupRepository(Optional.ofNullable(repo));
|
return params;
|
||||||
|
}),
|
||||||
|
RESTORE_OP(RESTORE, (req, rsp, h) -> {
|
||||||
|
req.getParams().required().check(NAME, COLLECTION_PROP);
|
||||||
|
|
||||||
String location = repository.getBackupLocation(req.getParams().get(CoreAdminParams.BACKUP_LOCATION));
|
String collectionName = SolrIdentifierValidator.validateCollectionName(req.getParams().get(COLLECTION_PROP));
|
||||||
|
ClusterState clusterState = h.coreContainer.getZkController().getClusterState();
|
||||||
|
//We always want to restore into an collection name which doesn't exist yet.
|
||||||
|
if (clusterState.hasCollection(collectionName)) {
|
||||||
|
throw new SolrException(ErrorCode.BAD_REQUEST, "Collection '" + collectionName + "' exists, no action taken.");
|
||||||
|
}
|
||||||
|
|
||||||
|
CoreContainer cc = h.coreContainer;
|
||||||
|
String repo = req.getParams().get(CoreAdminParams.BACKUP_REPOSITORY);
|
||||||
|
BackupRepository repository = cc.newBackupRepository(Optional.ofNullable(repo));
|
||||||
|
|
||||||
|
String location = repository.getBackupLocation(req.getParams().get(CoreAdminParams.BACKUP_LOCATION));
|
||||||
|
if (location == null) {
|
||||||
|
//Refresh the cluster property file to make sure the value set for location is the latest
|
||||||
|
h.coreContainer.getZkController().getZkStateReader().forceUpdateClusterProperties();
|
||||||
|
|
||||||
|
// Check if the location is specified in the cluster property.
|
||||||
|
location = h.coreContainer.getZkController().getZkStateReader().getClusterProperty("location", null);
|
||||||
if (location == null) {
|
if (location == null) {
|
||||||
//Refresh the cluster property file to make sure the value set for location is the latest
|
throw new SolrException(ErrorCode.BAD_REQUEST, "'location' is not specified as a query"
|
||||||
h.coreContainer.getZkController().getZkStateReader().forceUpdateClusterProperties();
|
+ " parameter or as a default repository property or as a cluster property.");
|
||||||
|
|
||||||
// Check if the location is specified in the cluster property.
|
|
||||||
location = h.coreContainer.getZkController().getZkStateReader().getClusterProperty("location", null);
|
|
||||||
if (location == null) {
|
|
||||||
throw new SolrException(ErrorCode.BAD_REQUEST, "'location' is not specified as a query"
|
|
||||||
+ " parameter or as a default repository property or as a cluster property.");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if the specified location is valid for this repository.
|
|
||||||
URI uri = repository.createURI(location);
|
|
||||||
try {
|
|
||||||
if (!repository.exists(uri)) {
|
|
||||||
throw new SolrException(ErrorCode.SERVER_ERROR, "specified location " + uri + " does not exist.");
|
|
||||||
}
|
|
||||||
} catch (IOException ex) {
|
|
||||||
throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to check the existance of " + uri + ". Is it valid?", ex);
|
|
||||||
}
|
|
||||||
|
|
||||||
Map<String, Object> params = req.getParams().getAll(null, NAME, COLLECTION_PROP);
|
|
||||||
params.put(CoreAdminParams.BACKUP_LOCATION, location);
|
|
||||||
// from CREATE_OP:
|
|
||||||
req.getParams().getAll(params, COLL_CONF, REPLICATION_FACTOR, MAX_SHARDS_PER_NODE, STATE_FORMAT, AUTO_ADD_REPLICAS);
|
|
||||||
copyPropertiesWithPrefix(req.getParams(), params, COLL_PROP_PREFIX);
|
|
||||||
return params;
|
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
// Check if the specified location is valid for this repository.
|
||||||
|
URI uri = repository.createURI(location);
|
||||||
|
try {
|
||||||
|
if (!repository.exists(uri)) {
|
||||||
|
throw new SolrException(ErrorCode.SERVER_ERROR, "specified location " + uri + " does not exist.");
|
||||||
|
}
|
||||||
|
} catch (IOException ex) {
|
||||||
|
throw new SolrException(ErrorCode.SERVER_ERROR, "Failed to check the existance of " + uri + ". Is it valid?", ex);
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, Object> params = req.getParams().getAll(null, NAME, COLLECTION_PROP);
|
||||||
|
params.put(CoreAdminParams.BACKUP_LOCATION, location);
|
||||||
|
// from CREATE_OP:
|
||||||
|
req.getParams().getAll(params, COLL_CONF, REPLICATION_FACTOR, MAX_SHARDS_PER_NODE, STATE_FORMAT, AUTO_ADD_REPLICAS);
|
||||||
|
copyPropertiesWithPrefix(req.getParams(), params, COLL_PROP_PREFIX);
|
||||||
|
return params;
|
||||||
|
});
|
||||||
|
public final CollectionOp fun;
|
||||||
CollectionAction action;
|
CollectionAction action;
|
||||||
long timeOut;
|
long timeOut;
|
||||||
boolean sendToOCPQueue;
|
boolean sendToOCPQueue;
|
||||||
|
|
||||||
CollectionOperation(CollectionAction action) {
|
CollectionOperation(CollectionAction action, CollectionOp fun) {
|
||||||
this(action, DEFAULT_COLLECTION_OP_TIMEOUT, true);
|
this(action, DEFAULT_COLLECTION_OP_TIMEOUT, true, fun);
|
||||||
}
|
}
|
||||||
|
|
||||||
CollectionOperation(CollectionAction action, long timeOut, boolean sendToOCPQueue) {
|
CollectionOperation(CollectionAction action, long timeOut, boolean sendToOCPQueue, CollectionOp fun) {
|
||||||
this.action = action;
|
this.action = action;
|
||||||
this.timeOut = timeOut;
|
this.timeOut = timeOut;
|
||||||
this.sendToOCPQueue = sendToOCPQueue;
|
this.sendToOCPQueue = sendToOCPQueue;
|
||||||
|
this.fun = fun;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* All actions must implement this method. If a non null map is returned , the action name is added to
|
|
||||||
* the map and sent to overseer for processing. If it returns a null, the call returns immediately
|
|
||||||
*/
|
|
||||||
abstract Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception;
|
|
||||||
|
|
||||||
public static CollectionOperation get(CollectionAction action) {
|
public static CollectionOperation get(CollectionAction action) {
|
||||||
for (CollectionOperation op : values()) {
|
for (CollectionOperation op : values()) {
|
||||||
|
@ -916,6 +806,12 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
|
||||||
}
|
}
|
||||||
throw new SolrException(ErrorCode.SERVER_ERROR, "No such action" + action);
|
throw new SolrException(ErrorCode.SERVER_ERROR, "No such action" + action);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<String, Object> execute(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h)
|
||||||
|
throws Exception {
|
||||||
|
return fun.execute(req, rsp, h);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void forceLeaderElection(SolrQueryRequest req, CollectionsHandler handler) {
|
private static void forceLeaderElection(SolrQueryRequest req, CollectionsHandler handler) {
|
||||||
|
@ -1096,6 +992,11 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface CollectionOp {
|
||||||
|
Map<String, Object> execute(SolrQueryRequest req, SolrQueryResponse rsp, CollectionsHandler h) throws Exception;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
public static final List<String> MODIFIABLE_COLL_PROPS = Arrays.asList(
|
public static final List<String> MODIFIABLE_COLL_PROPS = Arrays.asList(
|
||||||
RULE,
|
RULE,
|
||||||
SNITCH,
|
SNITCH,
|
||||||
|
|
Loading…
Reference in New Issue