SOLR-13565, SOLR-13553 (#774)

* SOLR-13565: initial commit

* SOLR-13565: updated with testcase

* SOLR-13565: removed unused methods

* SOLR-13565: better logging

* SOLR-13565: disable SSL

* SOLR-13565: more tests

* SOLR-13565: syncing with master

* SOLR-13565: fixing tests

* SOLR-13565: fixing tests

* SOLR-13534: Fix test

Remove buggy 'port roulette' code that can easily fail if OS gives the selected port to a different process just before creating the server

Use jetty's built in support for listining on an OS selected port instead

Also increase timeouts to better account for slow/heavily loaded (ie:jenkins) VMs where SolrCore reloading may take longer then 10 seconds

* SOLR-13565: set proper permission name

* SOLR-13565: syncing with master

* SOLR-13565: syncing with master

* SOLR-13565: removed accidental change

* SOLR-13565: removed accidental change

* SOLR-13565: removed accidental change

* SOLR-13565: more tests

* SOLR-13565: Tests with key signing tests

* SOLR-13565: fixing concurrency issues in tests

* SOLR-13565: add tests with 512 bit RSA

* SOLR-13565: fixing concurrency issues

* SOLR-13565: remove unused code
This commit is contained in:
Noble Paul 2019-07-20 14:20:32 +10:00 committed by GitHub
parent c5cabf1e03
commit bd30c3701e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
31 changed files with 1903 additions and 681 deletions

View File

@ -60,7 +60,7 @@ import static org.apache.solr.common.cloud.ZkStateReader.BASE_URL_PROP;
* The purpose of this class is to store the Jars loaded in memory and to keep only one copy of the Jar in a single node.
*/
public class BlobRepository {
private static final long MAX_JAR_SIZE = Long.parseLong(System.getProperty("runtme.lib.size", String.valueOf(5 * 1024 * 1024)));
private static final long MAX_JAR_SIZE = Long.parseLong(System.getProperty("runtime.lib.size", String.valueOf(5 * 1024 * 1024)));
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
static final Random RANDOM;
static final Pattern BLOB_KEY_PATTERN_CHECKER = Pattern.compile(".*/\\d+");
@ -186,7 +186,9 @@ public class BlobRepository {
//unlikely
throw new SolrException(SERVER_ERROR, e);
}
byteBuffer = ByteBuffer.wrap(byteBuffer.array(), byteBuffer.arrayOffset(), byteBuffer.limit());
digest.update(byteBuffer);
return String.format(
Locale.ROOT,
"%0128x",
@ -214,13 +216,14 @@ public class BlobRepository {
entity = response.getEntity();
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode != 200) {
throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "no such blob or version available: " + key);
throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "no such resource available: " + key + ", url : "+ url);
}
try (InputStream is = entity.getContent()) {
b = SimplePostTool.inputStreamToByteArray(is, MAX_JAR_SIZE);
}
} catch (Exception e) {
log.error("Error loading resource "+ url, e);
if (e instanceof SolrException) {
throw (SolrException) e;
} else {

View File

@ -219,6 +219,8 @@ public class CoreContainer {
protected volatile AutoscalingHistoryHandler autoscalingHistoryHandler;
private final LibListener clusterPropertiesListener = new LibListener(this);
// Bits for the state variable.
public final static long LOAD_COMPLETE = 0x1L;
@ -623,6 +625,7 @@ public class CoreContainer {
zkSys.initZooKeeper(this, solrHome, cfg.getCloudConfig());
if (isZooKeeperAware()) {
getZkController().getZkStateReader().registerClusterPropertiesListener(clusterPropertiesListener);
pkiAuthenticationPlugin = new PKIAuthenticationPlugin(this, zkSys.getZkController().getNodeName(),
(PublicKeyHandler) containerHandlers.get(PublicKeyHandler.PATH));
TracerConfigurator.loadTracer(loader, cfg.getTracerConfiguratorPluginInfo(), getZkController().getZkStateReader());
@ -634,6 +637,7 @@ public class CoreContainer {
reloadSecurityProperties();
this.backupRepoFactory = new BackupRepositoryFactory(cfg.getBackupRepositoryPlugins());
containerHandlers.put("/ext", clusterPropertiesListener.extHandler);
createHandler(ZK_PATH, ZookeeperInfoHandler.class.getName(), ZookeeperInfoHandler.class);
createHandler(ZK_STATUS_PATH, ZookeeperStatusHandler.class.getName(), ZookeeperStatusHandler.class);
collectionsHandler = createHandler(COLLECTIONS_HANDLER_PATH, cfg.getCollectionsHandlerClass(), CollectionsHandler.class);
@ -1788,6 +1792,14 @@ public class CoreContainer {
return handler;
}
public PluginBag<SolrRequestHandler> getContainerHandlers() {
return containerHandlers;
}
public LibListener getClusterPropertiesListener(){
return clusterPropertiesListener;
}
public CoreAdminHandler getMultiCoreHandler() {
return coreAdminHandler;
}

View File

@ -0,0 +1,267 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.core;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import com.google.common.collect.ImmutableMap;
import org.apache.solr.api.Api;
import org.apache.solr.api.V2HttpCall;
import org.apache.solr.common.IteratorWriter;
import org.apache.solr.common.MapWriter;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.cloud.ClusterPropertiesListener;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.CoreAdminParams;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.common.util.Utils;
import org.apache.solr.handler.RequestHandlerBase;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.schema.FieldType;
import org.apache.solr.security.AuthorizationContext;
import org.apache.solr.security.PermissionNameProvider;
import org.apache.solr.util.plugin.PluginInfoInitialized;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.common.params.CommonParams.VERSION;
import static org.apache.solr.core.RuntimeLib.SHA512;
public class LibListener implements ClusterPropertiesListener {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final CoreContainer coreContainer;
Map<String, RuntimeLib> runtimeLibs = new HashMap<>();
MemClassLoader memClassLoader;
final ExtHandler extHandler;
private int myversion = -1;
LibListener(CoreContainer coreContainer) {
this.coreContainer = coreContainer;
extHandler = new ExtHandler(this);
}
public <T> T newInstance(String cName, Class<T> expectedType) {
try {
return coreContainer.getResourceLoader().newInstance(cName, expectedType,
null, new Class[]{CoreContainer.class}, new Object[]{coreContainer});
} catch (SolrException e) {
if (memClassLoader != null) {
try {
Class<? extends T> klas = memClassLoader.findClass(cName, expectedType);
try {
return klas.getConstructor(CoreContainer.class).newInstance(coreContainer);
} catch (NoSuchMethodException ex) {
return klas.getConstructor(null).newInstance();
}
} catch (Exception ex) {
if (!memClassLoader.getErrors().isEmpty()) {
//some libraries were no loaded due to some errors. May the class was there in those libraries
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
"There were errors loading some libraries: " + StrUtils.join(memClassLoader.getErrors(), ','), ex);
}
//there were no errors in loading any libraries. The class was probably not suppoed to be there in those libraries
// so throw the original exception
throw e;
}
} else {
throw e;
}
}
}
@Override
public boolean onChange(Map<String, Object> properties) {
log.info("clusterprops.json changed , version {}", coreContainer.getZkController().getZkStateReader().getClusterPropsVersion());
boolean forceReload = updateRuntimeLibs(properties);
extHandler.updateReqHandlers(properties, forceReload);
myversion = coreContainer.getZkController().getZkStateReader().getClusterPropsVersion();
return false;
}
private boolean updateRuntimeLibs(Map<String, Object> properties) {
Map m = (Map) properties.getOrDefault(RuntimeLib.TYPE, Collections.emptyMap());
if (runtimeLibs.isEmpty() && m.isEmpty()) return false;
boolean needsReload[] = new boolean[1];
if (m.size() == runtimeLibs.size()) {
m.forEach((k, v) -> {
if (v instanceof Map) {
if (!runtimeLibs.containsKey(k)) needsReload[0] = true;
RuntimeLib rtl = runtimeLibs.get(k);
if (rtl == null || !Objects.equals(rtl.getSha512(), ((Map) v).get(SHA512))) {
needsReload[0] = true;
}
}
});
} else {
needsReload[0] = true;
}
if (needsReload[0]) {
createNewClassLoader(m);
}
return needsReload[0];
}
void createNewClassLoader(Map m) {
boolean[] loadedAll = new boolean[1];
loadedAll[0] = true;
Map<String, RuntimeLib> libMap = new LinkedHashMap<>();
m.forEach((k, v) -> {
if (v instanceof Map) {
Map map = new HashMap((Map) v);
map.put(CoreAdminParams.NAME, String.valueOf(k));
RuntimeLib lib = new RuntimeLib(coreContainer);
try {
lib.init(new PluginInfo(null, map));
if (lib.getUrl() == null) {
log.error("Unable to initialize runtimeLib : " + Utils.toJSONString(v));
loadedAll[0] = false;
}
lib.loadJar();
libMap.put(lib.getName(), lib);
} catch (Exception e) {
log.error("error loading a runtimeLib " + Utils.toJSONString(v), e);
loadedAll[0] = false;
}
}
});
if (loadedAll[0]) {
log.info("Libraries changed. New memclassloader created with jars {}", libMap.values().stream().map(runtimeLib -> runtimeLib.getUrl()).collect(Collectors.toList()));
this.memClassLoader = new MemClassLoader(new ArrayList<>(libMap.values()), coreContainer.getResourceLoader());
this.runtimeLibs = libMap;
}
}
static class ExtHandler extends RequestHandlerBase implements PermissionNameProvider {
final LibListener libListener;
private Map<String, SolrRequestHandler> customHandlers = new HashMap<>();
ExtHandler(LibListener libListener) {
this.libListener = libListener;
}
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) {
int v = req.getParams().getInt(ConfigOverlay.ZNODEVER, -1);
if (v >= 0) {
log.debug("expected version : {} , my version {}", v, libListener.myversion );
ZkStateReader zkStateReader = libListener.coreContainer.getZkController().getZkStateReader();
zkStateReader.forceRefreshClusterProps(v);
}
rsp.add("metadata", (MapWriter) ew -> ew.putIfNotNull(VERSION,
libListener.coreContainer.getZkController().zkStateReader.getClusterPropsVersion()));
rsp.add(RuntimeLib.TYPE, libListener.runtimeLibs.values());
rsp.add(SolrRequestHandler.TYPE,
(IteratorWriter) iw -> customHandlers.forEach((s, h) -> iw.addNoEx(ImmutableMap.of(s, h.getClass().getName()))));
}
@Override
public Collection<Api> getApis() {
return Collections.singleton(new Api(Utils.getSpec("node.ext")) {
@Override
public void call(SolrQueryRequest req, SolrQueryResponse rsp) {
String name = ((V2HttpCall) req.getHttpSolrCall()).getUrlParts().get("handlerName");
if (name == null) {
handleRequestBody(req, rsp);
return;
}
SolrRequestHandler handler = customHandlers.get(name);
if (handler == null) {
String err = StrUtils.formatString(" No such handler: {0}, available handlers : {1}" , name, customHandlers.keySet());
log.error(err);
throw new SolrException(SolrException.ErrorCode.NOT_FOUND, err);
}
handler.handleRequest(req, rsp);
}
});
}
private void updateReqHandlers(Map<String, Object> properties, boolean forceReload) {
Map m = (Map) properties.getOrDefault(SolrRequestHandler.TYPE, Collections.emptyMap());
if (m.isEmpty() && customHandlers.isEmpty()) return;
boolean hasChanged = true;
if (customHandlers.size() == m.size() && customHandlers.keySet().containsAll(m.keySet())) hasChanged = false;
if (forceReload || hasChanged) {
log.debug("RequestHandlers being reloaded : {}", m.keySet());
Map<String, SolrRequestHandler> newCustomHandlers = new HashMap<>();
m.forEach((k, v) -> {
if (v instanceof Map) {
String klas = (String) ((Map) v).get(FieldType.CLASS_NAME);
if (klas != null) {
SolrRequestHandler inst = libListener.newInstance(klas, SolrRequestHandler.class);
if (inst instanceof PluginInfoInitialized) {
((PluginInfoInitialized) inst).init(new PluginInfo(SolrRequestHandler.TYPE, (Map) v));
}
newCustomHandlers.put((String) k, inst);
}
} else {
log.error("Invalid data for requestHandler : {} , {}", k, v);
}
});
log.debug("Registering request handlers {} ", newCustomHandlers.keySet());
Map<String, SolrRequestHandler> old = customHandlers;
customHandlers = newCustomHandlers;
old.forEach((s, h) -> PluginBag.closeQuietly(h));
}
}
@Override
public String getDescription() {
return "Custom Handlers";
}
@Override
public Boolean registerV1() {
return Boolean.FALSE;
}
@Override
public Boolean registerV2() {
return Boolean.TRUE;
}
@Override
public Name getPermissionName(AuthorizationContext request) {
if(request.getResource().endsWith("/node/ext")) return Name.COLL_READ_PERM;
return Name.CUSTOM_PERM;
}
}
}

View File

@ -26,6 +26,7 @@ import java.security.CodeSource;
import java.security.ProtectionDomain;
import java.security.cert.Certificate;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -43,12 +44,12 @@ public class MemClassLoader extends ClassLoader implements AutoCloseable, Resour
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private boolean allJarsLoaded = false;
private final SolrResourceLoader parentLoader;
private List<PluginBag.RuntimeLib> libs = new ArrayList<>();
private List<RuntimeLib> libs = new ArrayList<>();
private Map<String, Class> classCache = new HashMap<>();
private List<String> errors = new ArrayList<>();
public MemClassLoader(List<PluginBag.RuntimeLib> libs, SolrResourceLoader resourceLoader) {
public MemClassLoader(List<RuntimeLib> libs, SolrResourceLoader resourceLoader) {
this.parentLoader = resourceLoader;
this.libs = libs;
}
@ -56,7 +57,7 @@ public class MemClassLoader extends ClassLoader implements AutoCloseable, Resour
synchronized void loadRemoteJars() {
if (allJarsLoaded) return;
int count = 0;
for (PluginBag.RuntimeLib lib : libs) {
for (RuntimeLib lib : libs) {
if (lib.getUrl() != null) {
try {
lib.loadJar();
@ -70,10 +71,13 @@ public class MemClassLoader extends ClassLoader implements AutoCloseable, Resour
if (count == libs.size()) allJarsLoaded = true;
}
public Collection<String> getErrors(){
return errors;
}
public synchronized void loadJars() {
if (allJarsLoaded) return;
for (PluginBag.RuntimeLib lib : libs) {
for (RuntimeLib lib : libs) {
try {
lib.loadJar();
lib.verify();
@ -133,7 +137,7 @@ public class MemClassLoader extends ClassLoader implements AutoCloseable, Resour
String path = name.replace('.', '/').concat(".class");
ByteBuffer buf = null;
for (PluginBag.RuntimeLib lib : libs) {
for (RuntimeLib lib : libs) {
try {
buf = lib.getFileContent(path);
if (buf != null) {
@ -150,7 +154,7 @@ public class MemClassLoader extends ClassLoader implements AutoCloseable, Resour
@Override
public void close() throws Exception {
for (PluginBag.RuntimeLib lib : libs) {
for (RuntimeLib lib : libs) {
try {
lib.close();
} catch (Exception e) {

View File

@ -16,11 +16,8 @@
*/
package org.apache.solr.core;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
@ -30,15 +27,12 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.apache.lucene.analysis.util.ResourceLoader;
import org.apache.lucene.analysis.util.ResourceLoaderAware;
import org.apache.solr.api.Api;
import org.apache.solr.api.ApiBag;
import org.apache.solr.api.ApiSupport;
import org.apache.solr.cloud.CloudUtil;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.handler.RequestHandlerBase;
@ -46,8 +40,6 @@ import org.apache.solr.handler.component.SearchComponent;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.update.processor.UpdateRequestProcessorChain;
import org.apache.solr.update.processor.UpdateRequestProcessorFactory;
import org.apache.solr.util.CryptoKeys;
import org.apache.solr.util.SimplePostTool;
import org.apache.solr.util.plugin.NamedListInitializedPlugin;
import org.apache.solr.util.plugin.PluginInfoInitialized;
import org.apache.solr.util.plugin.SolrCoreAware;
@ -56,7 +48,6 @@ import org.slf4j.LoggerFactory;
import static java.util.Collections.singletonMap;
import static org.apache.solr.api.ApiBag.HANDLER_NAME;
import static org.apache.solr.common.params.CommonParams.NAME;
/**
* This manages the lifecycle of a set of plugin of the same type .
@ -125,7 +116,7 @@ public class PluginBag<T> implements AutoCloseable {
}
public PluginHolder<T> createPlugin(PluginInfo info) {
if ("true".equals(String.valueOf(info.attributes.get("runtimeLib")))) {
if ("true".equals(String.valueOf(info.attributes.get(RuntimeLib.TYPE)))) {
log.debug(" {} : '{}' created with runtimeLib=true ", meta.getCleanTag(), info.name);
LazyPluginHolder<T> holder = new LazyPluginHolder<>(meta, info, core, RuntimeLib.isEnabled() ?
core.getMemClassLoader() :
@ -324,6 +315,14 @@ public class PluginBag<T> implements AutoCloseable {
}
}
public static void closeQuietly(Object inst) {
try {
if (inst != null && inst instanceof AutoCloseable) ((AutoCloseable) inst).close();
} catch (Exception e) {
log.error("Error closing "+ inst , e);
}
}
/**
* An indirect reference to a plugin. It just wraps a plugin instance.
* subclasses may choose to lazily load the plugin
@ -358,7 +357,7 @@ public class PluginBag<T> implements AutoCloseable {
// can close() be called concurrently with other methods?
if (isLoaded()) {
T myInst = get();
if (myInst != null && myInst instanceof AutoCloseable) ((AutoCloseable) myInst).close();
closeQuietly(myInst);
}
}
@ -460,166 +459,6 @@ public class PluginBag<T> implements AutoCloseable {
}
/**
* This represents a Runtime Jar. A jar requires two details , name and version
*/
public static class RuntimeLib implements PluginInfoInitialized, AutoCloseable {
private String name, version, sig, sha512, url;
private BlobRepository.BlobContentRef<ByteBuffer> jarContent;
private final CoreContainer coreContainer;
private boolean verified = false;
@Override
public void init(PluginInfo info) {
name = info.attributes.get(NAME);
url = info.attributes.get("url");
sig = info.attributes.get("sig");
if(url == null) {
Object v = info.attributes.get("version");
if (name == null || v == null) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "runtimeLib must have name and version");
}
version = String.valueOf(v);
} else {
sha512 = info.attributes.get("sha512");
if(sha512 == null){
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "runtimeLib with url must have a 'sha512' attribute");
}
ByteBuffer buf = null;
buf = coreContainer.getBlobRepository().fetchFromUrl(name, url);
String digest = BlobRepository.sha512Digest(buf);
if(!sha512.equals(digest)) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, StrUtils.formatString(BlobRepository.INVALID_JAR_MSG, url, sha512, digest) );
}
log.info("dynamic library verified {}, sha512: {}", url, sha512);
}
}
public RuntimeLib(SolrCore core) {
coreContainer = core.getCoreContainer();
}
public String getUrl(){
return url;
}
void loadJar() {
if (jarContent != null) return;
synchronized (this) {
if (jarContent != null) return;
jarContent = url == null?
coreContainer.getBlobRepository().getBlobIncRef(name + "/" + version):
coreContainer.getBlobRepository().getBlobIncRef(name, null,url,sha512);
}
}
public static boolean isEnabled() {
return Boolean.getBoolean("enable.runtime.lib");
}
public String getName() {
return name;
}
public String getVersion() {
return version;
}
public String getSig() {
return sig;
}
public ByteBuffer getFileContent(String entryName) throws IOException {
if (jarContent == null)
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "jar not available: " + name );
return getFileContent(jarContent.blob, entryName);
}
public ByteBuffer getFileContent(BlobRepository.BlobContent<ByteBuffer> blobContent, String entryName) throws IOException {
ByteBuffer buff = blobContent.get();
ByteArrayInputStream zipContents = new ByteArrayInputStream(buff.array(), buff.arrayOffset(), buff.limit());
ZipInputStream zis = new ZipInputStream(zipContents);
try {
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
if (entryName == null || entryName.equals(entry.getName())) {
SimplePostTool.BAOS out = new SimplePostTool.BAOS();
byte[] buffer = new byte[2048];
int size;
while ((size = zis.read(buffer, 0, buffer.length)) != -1) {
out.write(buffer, 0, size);
}
out.close();
return out.getByteBuffer();
}
}
} finally {
zis.closeEntry();
}
return null;
}
@Override
public void close() throws Exception {
if (jarContent != null) coreContainer.getBlobRepository().decrementBlobRefCount(jarContent);
}
public static List<RuntimeLib> getLibObjects(SolrCore core, List<PluginInfo> libs) {
List<RuntimeLib> l = new ArrayList<>(libs.size());
for (PluginInfo lib : libs) {
RuntimeLib rtl = new RuntimeLib(core);
try {
rtl.init(lib);
} catch (Exception e) {
log.error("error loading runtime library", e);
}
l.add(rtl);
}
return l;
}
public void verify() throws Exception {
if (verified) return;
if (jarContent == null) {
log.error("Calling verify before loading the jar");
return;
}
if (!coreContainer.isZooKeeperAware())
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Signing jar is possible only in cloud");
Map<String, byte[]> keys = CloudUtil.getTrustedKeys(coreContainer.getZkController().getZkClient(), "exe");
if (keys.isEmpty()) {
if (sig == null) {
verified = true;
return;
} else {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No public keys are available in ZK to verify signature for runtime lib " + name);
}
} else if (sig == null) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, StrUtils.formatString("runtimelib {0} should be signed with one of the keys in ZK /keys/exe ", name));
}
try {
String matchedKey = new CryptoKeys(keys).verify(sig, jarContent.blob.get());
if (matchedKey == null)
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No key matched signature for jar : " + name + " version: " + version);
log.info("Jar {} signed with {} successfully verified", name, matchedKey);
} catch (Exception e) {
if (e instanceof SolrException) throw e;
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error verifying key ", e);
}
}
}
public Api v2lookup(String path, String method, Map<String, String> parts) {
if (apiBag == null) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "this should not happen, looking up for v2 API at the wrong place");

View File

@ -0,0 +1,220 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.core;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.apache.solr.cloud.CloudUtil;
import org.apache.solr.common.MapWriter;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.util.CryptoKeys;
import org.apache.solr.util.SimplePostTool;
import org.apache.solr.util.plugin.PluginInfoInitialized;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.common.params.CommonParams.NAME;
/**
* This represents a Runtime Jar. A jar requires two details , name and version
*/
public class RuntimeLib implements PluginInfoInitialized, AutoCloseable, MapWriter {
public static final String TYPE = "runtimeLib";
public static final String SHA512 = "sha512";
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final CoreContainer coreContainer;
private String name, version, sig, sha512, url;
private BlobRepository.BlobContentRef<ByteBuffer> jarContent;
private boolean verified = false;
@Override
public void writeMap(EntryWriter ew) throws IOException {
ew.putIfNotNull(NAME, name);
ew.putIfNotNull("url", url);
ew.putIfNotNull(version, version);
ew.putIfNotNull(sha512, sha512);
ew.putIfNotNull("sig", sig);
}
public RuntimeLib(CoreContainer coreContainer) {
this.coreContainer = coreContainer;
}
public static boolean isEnabled() {
return "true".equals(System.getProperty("enable.runtime.lib"));
}
public static List<RuntimeLib> getLibObjects(SolrCore core, List<PluginInfo> libs) {
List<RuntimeLib> l = new ArrayList<>(libs.size());
for (PluginInfo lib : libs) {
RuntimeLib rtl = new RuntimeLib(core.getCoreContainer());
try {
rtl.init(lib);
} catch (Exception e) {
log.error("error loading runtime library", e);
}
l.add(rtl);
}
return l;
}
@Override
public void init(PluginInfo info) {
name = info.attributes.get(NAME);
url = info.attributes.get("url");
sig = info.attributes.get("sig");
if (url == null) {
Object v = info.attributes.get("version");
if (name == null || v == null) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "runtimeLib must have name and version");
}
version = String.valueOf(v);
} else {
sha512 = info.attributes.get(SHA512);
if (sha512 == null) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "runtimeLib with url must have a 'sha512' attribute");
}
ByteBuffer buf = coreContainer.getBlobRepository().fetchFromUrl(name, url);
String digest = BlobRepository.sha512Digest(buf);
if (!sha512.equals(digest)) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, StrUtils.formatString(BlobRepository.INVALID_JAR_MSG, url, sha512, digest));
}
verifyJarSignature(buf);
log.debug("dynamic library verified {}, sha512: {}", url, sha512);
}
}
public String getUrl() {
return url;
}
void loadJar() {
if (jarContent != null) return;
synchronized (this) {
if (jarContent != null) return;
jarContent = url == null ?
coreContainer.getBlobRepository().getBlobIncRef(name + "/" + version) :
coreContainer.getBlobRepository().getBlobIncRef(name, null, url, sha512);
}
}
public String getName() {
return name;
}
public String getVersion() {
return version;
}
public String getSig() {
return sig;
}
public String getSha512() {
return sha512;
}
public ByteBuffer getFileContent(String entryName) throws IOException {
if (jarContent == null)
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "jar not available: " + name);
return getFileContent(jarContent.blob, entryName);
}
public ByteBuffer getFileContent(BlobRepository.BlobContent<ByteBuffer> blobContent, String entryName) throws IOException {
ByteBuffer buff = blobContent.get();
ByteArrayInputStream zipContents = new ByteArrayInputStream(buff.array(), buff.arrayOffset(), buff.limit());
ZipInputStream zis = new ZipInputStream(zipContents);
try {
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
if (entryName == null || entryName.equals(entry.getName())) {
SimplePostTool.BAOS out = new SimplePostTool.BAOS();
byte[] buffer = new byte[2048];
int size;
while ((size = zis.read(buffer, 0, buffer.length)) != -1) {
out.write(buffer, 0, size);
}
out.close();
return out.getByteBuffer();
}
}
} finally {
zis.closeEntry();
}
return null;
}
@Override
public void close() throws Exception {
if (jarContent != null) coreContainer.getBlobRepository().decrementBlobRefCount(jarContent);
}
public void verify() throws Exception {
if (verified) return;
if (jarContent == null) {
log.error("Calling verify before loading the jar");
return;
}
if (!coreContainer.isZooKeeperAware())
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Signing jar is possible only in cloud");
verifyJarSignature(jarContent.blob.get());
}
void verifyJarSignature(ByteBuffer buf) {
Map<String, byte[]> keys = CloudUtil.getTrustedKeys(coreContainer.getZkController().getZkClient(), "exe");
if (keys.isEmpty()) {
if (sig == null) {
verified = true;
return;
} else {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No public keys are available in ZK to verify signature for runtime lib " + name);
}
} else if (sig == null) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, StrUtils.formatString("runtimelib {0} should be signed with one of the keys in ZK /keys/exe ", name));
}
try {
String matchedKey = new CryptoKeys(keys).verify(sig, buf);
if (matchedKey == null)
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No key matched signature for jar : " + name + " version: " + version);
log.info("Jar {} signed with {} successfully verified", name, matchedKey);
} catch (Exception e) {
log.error("Signature verifying error ", e);
if (e instanceof SolrException) throw (SolrException) e;
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error verifying key ", e);
}
}
}

View File

@ -377,7 +377,7 @@ public class SolrConfig extends XmlConfigFile implements MapSerializable {
// and even then -- only if there is a single SpellCheckComponent
// because of queryConverter.setIndexAnalyzer
.add(new SolrPluginInfo(QueryConverter.class, "queryConverter", REQUIRE_NAME, REQUIRE_CLASS))
.add(new SolrPluginInfo(PluginBag.RuntimeLib.class, "runtimeLib", REQUIRE_NAME, MULTI_OK))
.add(new SolrPluginInfo(RuntimeLib.class, RuntimeLib.TYPE, REQUIRE_NAME, MULTI_OK))
// this is hackish, since it picks up all SolrEventListeners,
// regardless of when/how/why they are used (or even if they are
// declared outside of the appropriate context) but there's no nice

View File

@ -966,7 +966,7 @@ public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeab
this.codec = initCodec(solrConfig, this.schema);
memClassLoader = new MemClassLoader(
PluginBag.RuntimeLib.getLibObjects(this, solrConfig.getPluginInfos(PluginBag.RuntimeLib.class.getName())),
RuntimeLib.getLibObjects(this, solrConfig.getPluginInfos(RuntimeLib.class.getName())),
getResourceLoader());
initIndex(prev != null, reload);

View File

@ -36,6 +36,7 @@ import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Consumer;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
@ -47,6 +48,7 @@ import org.apache.solr.client.solrj.SolrResponse;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.cloud.ZkController;
import org.apache.solr.cloud.ZkSolrResourceLoader;
import org.apache.solr.common.MapWriter;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.DocCollection;
@ -62,9 +64,9 @@ import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.common.util.Utils;
import org.apache.solr.core.ConfigOverlay;
import org.apache.solr.core.PluginBag;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.RequestParams;
import org.apache.solr.core.RuntimeLib;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrResourceLoader;
@ -150,11 +152,258 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
public static boolean getImmutable(SolrCore core) {
NamedList configSetProperties = core.getConfigSetProperties();
if(configSetProperties == null) return false;
if (configSetProperties == null) return false;
Object immutable = configSetProperties.get(IMMUTABLE_CONFIGSET_ARG);
return immutable != null ? Boolean.parseBoolean(immutable.toString()) : false;
}
public static String validateName(String s) {
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if ((c >= 'A' && c <= 'Z') ||
(c >= 'a' && c <= 'z') ||
(c >= '0' && c <= '9') ||
c == '_' ||
c == '-' ||
c == '.'
) continue;
else {
return formatString("''{0}'' name should only have chars [a-zA-Z_-.0-9] ", s);
}
}
return null;
}
/**
* Block up to a specified maximum time until we see agreement on the schema
* version in ZooKeeper across all replicas for a collection.
*/
public static void waitForAllReplicasState(String collection,
ZkController zkController,
String prop,
int expectedVersion,
int maxWaitSecs) {
final RTimer timer = new RTimer();
// get a list of active replica cores to query for the schema zk version (skipping this core of course)
List<PerReplicaCallable> concurrentTasks = new ArrayList<>();
for (String coreUrl : getActiveReplicaCoreUrls(zkController, collection)) {
PerReplicaCallable e = new PerReplicaCallable(coreUrl, prop, expectedVersion, maxWaitSecs);
concurrentTasks.add(e);
}
if (concurrentTasks.isEmpty()) return; // nothing to wait for ...
log.info(formatString("Waiting up to {0} secs for {1} replicas to set the property {2} to be of version {3} for collection {4}",
maxWaitSecs, concurrentTasks.size(), prop, expectedVersion, collection));
// use an executor service to invoke schema zk version requests in parallel with a max wait time
execInparallel(concurrentTasks, parallelExecutor -> {
try {
List<String> failedList = executeAll(expectedVersion, maxWaitSecs, concurrentTasks, parallelExecutor);
// if any tasks haven't completed within the specified timeout, it's an error
if (failedList != null)
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
formatString("{0} out of {1} the property {2} to be of version {3} within {4} seconds! Failed cores: {5}",
failedList.size(), concurrentTasks.size() + 1, prop, expectedVersion, maxWaitSecs, failedList));
} catch (InterruptedException e) {
log.warn(formatString(
"Core was interrupted . trying to set the property {0} to version {1} to propagate to {2} replicas for collection {3}",
prop, expectedVersion, concurrentTasks.size(), collection));
Thread.currentThread().interrupt();
}
});
log.info("Took {}ms to set the property {} to be of version {} for collection {}",
timer.getTime(), prop, expectedVersion, collection);
}
public static void execInparallel( List<? extends PerReplicaCallable> concurrentTasks, Consumer<ExecutorService> fun) {
int poolSize = Math.min(concurrentTasks.size(), 10);
ExecutorService parallelExecutor =
ExecutorUtil.newMDCAwareFixedThreadPool(poolSize, new DefaultSolrThreadFactory("solrHandlerExecutor"));
try {
fun.accept(parallelExecutor);
} finally {
ExecutorUtil.shutdownAndAwaitTermination(parallelExecutor);
}
}
@Override
public SolrRequestHandler getSubHandler(String path) {
if (subPaths.contains(path)) return this;
if (path.startsWith("/params/")) return this;
return null;
}
private static Set<String> subPaths = new HashSet<>(Arrays.asList("/overlay", "/params", "/updateHandler",
"/query", "/jmx", "/requestDispatcher", "/znodeVersion"));
static {
for (SolrConfig.SolrPluginInfo solrPluginInfo : SolrConfig.plugins)
subPaths.add("/" + solrPluginInfo.getCleanTag());
}
//////////////////////// SolrInfoMBeans methods //////////////////////
@Override
public String getDescription() {
return "Edit solrconfig.xml";
}
@Override
public Category getCategory() {
return Category.ADMIN;
}
public static final String SET_PROPERTY = "set-property";
public static final String UNSET_PROPERTY = "unset-property";
public static final String SET_USER_PROPERTY = "set-user-property";
public static final String UNSET_USER_PROPERTY = "unset-user-property";
public static final String SET = "set";
public static final String UPDATE = "update";
public static final String CREATE = "create";
private static Set<String> cmdPrefixes = ImmutableSet.of(CREATE, UPDATE, "delete", "add");
public static List<String> executeAll(int expectedVersion, int maxWaitSecs, List<? extends PerReplicaCallable> concurrentTasks, ExecutorService parallelExecutor) throws InterruptedException {
List<Future<Boolean>> results =
parallelExecutor.invokeAll(concurrentTasks, maxWaitSecs, TimeUnit.SECONDS);
// determine whether all replicas have the update
List<String> failedList = null; // lazily init'd
for (int f = 0; f < results.size(); f++) {
Boolean success = false;
Future<Boolean> next = results.get(f);
if (next.isDone() && !next.isCancelled()) {
// looks to have finished, but need to check if it succeeded
try {
success = next.get();
} catch (ExecutionException e) {
// shouldn't happen since we checked isCancelled
}
}
if (!success) {
String coreUrl = concurrentTasks.get(f).coreUrl;
log.warn("Core " + coreUrl + "could not get the expected version " + expectedVersion);
if (failedList == null) failedList = new ArrayList<>();
failedList.add(coreUrl);
}
}
return failedList;
}
public static class PerReplicaCallable extends SolrRequest implements Callable<Boolean> {
protected String coreUrl;
String prop;
protected int expectedZkVersion;
protected Number remoteVersion = null;
int maxWait;
public PerReplicaCallable(String coreUrl, String prop, int expectedZkVersion, int maxWait) {
super(METHOD.GET, "/config/" + ZNODEVER);
this.coreUrl = coreUrl;
this.expectedZkVersion = expectedZkVersion;
this.prop = prop;
this.maxWait = maxWait;
}
@Override
public SolrParams getParams() {
return new ModifiableSolrParams()
.set(prop, expectedZkVersion)
.set(CommonParams.WT, CommonParams.JAVABIN);
}
@Override
public Boolean call() throws Exception {
final RTimer timer = new RTimer();
int attempts = 0;
try (HttpSolrClient solr = new HttpSolrClient.Builder(coreUrl).build()) {
// eventually, this loop will get killed by the ExecutorService's timeout
while (true) {
try {
long timeElapsed = (long) timer.getTime() / 1000;
if (timeElapsed >= maxWait) {
return false;
}
log.info("Time elapsed : {} secs, maxWait {}", timeElapsed, maxWait);
Thread.sleep(100);
MapWriter resp = solr.httpUriRequest(this).future.get();
if (verifyResponse(resp, attempts)) break;
attempts++;
} catch (Exception e) {
if (e instanceof InterruptedException) {
break; // stop looping
} else {
log.warn("Failed to get /schema/zkversion from " + coreUrl + " due to: " + e);
}
}
}
}
return true;
}
protected boolean verifyResponse(MapWriter mw, int attempts) {
NamedList resp = (NamedList) mw;
if (resp != null) {
Map m = (Map) resp.get(ZNODEVER);
if (m != null) {
remoteVersion = (Number) m.get(prop);
if (remoteVersion != null && remoteVersion.intValue() >= expectedZkVersion) return true;
log.info(formatString("Could not get expectedVersion {0} from {1} for prop {2} after {3} attempts", expectedZkVersion, coreUrl, prop, attempts));
}
}
return false;
}
@Override
protected SolrResponse createResponse(SolrClient client) {
return null;
}
}
public static List<String> getActiveReplicaCoreUrls(ZkController zkController,
String collection) {
List<String> activeReplicaCoreUrls = new ArrayList<>();
ClusterState clusterState = zkController.getZkStateReader().getClusterState();
Set<String> liveNodes = clusterState.getLiveNodes();
final DocCollection docCollection = clusterState.getCollectionOrNull(collection);
if (docCollection != null && docCollection.getActiveSlices() != null && docCollection.getActiveSlices().size() > 0) {
final Collection<Slice> activeSlices = docCollection.getActiveSlices();
for (Slice next : activeSlices) {
Map<String, Replica> replicasMap = next.getReplicasMap();
if (replicasMap != null) {
for (Map.Entry<String, Replica> entry : replicasMap.entrySet()) {
Replica replica = entry.getValue();
if (replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName())) {
activeReplicaCoreUrls.add(replica.getCoreUrl());
}
}
}
}
}
return activeReplicaCoreUrls;
}
@Override
public Name getPermissionName(AuthorizationContext ctx) {
switch (ctx.getHttpMethod()) {
case "GET":
return Name.CONFIG_READ_PERM;
case "POST":
return Name.CONFIG_EDIT_PERM;
default:
return null;
}
}
private class Command {
private final SolrQueryRequest req;
@ -425,7 +674,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
List errs = CommandOperation.captureErrors(ops);
if (!errs.isEmpty()) {
throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST,"error processing params", errs);
throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "error processing params", errs);
}
SolrResourceLoader loader = req.getCore().getResourceLoader();
@ -488,7 +737,7 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
}
List errs = CommandOperation.captureErrors(ops);
if (!errs.isEmpty()) {
throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST,"error processing commands", errs);
throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "error processing commands", errs);
}
SolrResourceLoader loader = req.getCore().getResourceLoader();
@ -526,13 +775,13 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
op.getMap(PluginInfo.INVARIANTS, null);
op.getMap(PluginInfo.APPENDS, null);
if (op.hasError()) return overlay;
if(info.clazz == PluginBag.RuntimeLib.class) {
if(!PluginBag.RuntimeLib.isEnabled()){
if (info.clazz == RuntimeLib.class) {
if (!RuntimeLib.isEnabled()) {
op.addError("Solr not started with -Denable.runtime.lib=true");
return overlay;
}
try {
new PluginBag.RuntimeLib(req.getCore()).init(new PluginInfo(info.tag, op.getDataMap()));
new RuntimeLib(req.getCore().getCoreContainer()).init(new PluginInfo(info.tag, op.getDataMap()));
} catch (Exception e) {
op.addError(e.getMessage());
log.error("can't load this plugin ", e);
@ -559,13 +808,13 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
private boolean pluginExists(SolrConfig.SolrPluginInfo info, ConfigOverlay overlay, String name) {
List<PluginInfo> l = req.getCore().getSolrConfig().getPluginInfos(info.clazz.getName());
for (PluginInfo pluginInfo : l) if(name.equals( pluginInfo.name)) return true;
for (PluginInfo pluginInfo : l) if (name.equals(pluginInfo.name)) return true;
return overlay.getNamedPlugins(info.getCleanTag()).containsKey(name);
}
private boolean verifyClass(CommandOperation op, String clz, Class expected) {
if (clz == null) return true;
if (!"true".equals(String.valueOf(op.getStr("runtimeLib", null)))) {
if (!"true".equals(String.valueOf(op.getStr(RuntimeLib.TYPE, null)))) {
//this is not dynamically loaded so we can verify the class right away
try {
req.getCore().createInitInstance(new PluginInfo(SolrRequestHandler.TYPE, op.getDataMap()), expected, clz, "");
@ -666,235 +915,6 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa
}
public static String validateName(String s) {
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if ((c >= 'A' && c <= 'Z') ||
(c >= 'a' && c <= 'z') ||
(c >= '0' && c <= '9') ||
c == '_' ||
c == '-' ||
c == '.'
) continue;
else {
return formatString("''{0}'' name should only have chars [a-zA-Z_-.0-9] ", s);
}
}
return null;
}
@Override
public SolrRequestHandler getSubHandler(String path) {
if (subPaths.contains(path)) return this;
if (path.startsWith("/params/")) return this;
return null;
}
private static Set<String> subPaths = new HashSet<>(Arrays.asList("/overlay", "/params", "/updateHandler",
"/query", "/jmx", "/requestDispatcher", "/znodeVersion"));
static {
for (SolrConfig.SolrPluginInfo solrPluginInfo : SolrConfig.plugins)
subPaths.add("/" + solrPluginInfo.getCleanTag());
}
//////////////////////// SolrInfoMBeans methods //////////////////////
@Override
public String getDescription() {
return "Edit solrconfig.xml";
}
@Override
public Category getCategory() {
return Category.ADMIN;
}
public static final String SET_PROPERTY = "set-property";
public static final String UNSET_PROPERTY = "unset-property";
public static final String SET_USER_PROPERTY = "set-user-property";
public static final String UNSET_USER_PROPERTY = "unset-user-property";
public static final String SET = "set";
public static final String UPDATE = "update";
public static final String CREATE = "create";
private static Set<String> cmdPrefixes = ImmutableSet.of(CREATE, UPDATE, "delete", "add");
/**
* Block up to a specified maximum time until we see agreement on the schema
* version in ZooKeeper across all replicas for a collection.
*/
private static void waitForAllReplicasState(String collection,
ZkController zkController,
String prop,
int expectedVersion,
int maxWaitSecs) {
final RTimer timer = new RTimer();
// get a list of active replica cores to query for the schema zk version (skipping this core of course)
List<PerReplicaCallable> concurrentTasks = new ArrayList<>();
for (String coreUrl : getActiveReplicaCoreUrls(zkController, collection)) {
PerReplicaCallable e = new PerReplicaCallable(coreUrl, prop, expectedVersion, maxWaitSecs);
concurrentTasks.add(e);
}
if (concurrentTasks.isEmpty()) return; // nothing to wait for ...
log.info(formatString("Waiting up to {0} secs for {1} replicas to set the property {2} to be of version {3} for collection {4}",
maxWaitSecs, concurrentTasks.size(), prop, expectedVersion, collection));
// use an executor service to invoke schema zk version requests in parallel with a max wait time
int poolSize = Math.min(concurrentTasks.size(), 10);
ExecutorService parallelExecutor =
ExecutorUtil.newMDCAwareFixedThreadPool(poolSize, new DefaultSolrThreadFactory("solrHandlerExecutor"));
try {
List<Future<Boolean>> results =
parallelExecutor.invokeAll(concurrentTasks, maxWaitSecs, TimeUnit.SECONDS);
// determine whether all replicas have the update
List<String> failedList = null; // lazily init'd
for (int f = 0; f < results.size(); f++) {
Boolean success = false;
Future<Boolean> next = results.get(f);
if (next.isDone() && !next.isCancelled()) {
// looks to have finished, but need to check if it succeeded
try {
success = next.get();
} catch (ExecutionException e) {
// shouldn't happen since we checked isCancelled
}
}
if (!success) {
String coreUrl = concurrentTasks.get(f).coreUrl;
log.warn("Core " + coreUrl + "could not get the expected version " + expectedVersion);
if (failedList == null) failedList = new ArrayList<>();
failedList.add(coreUrl);
}
}
// if any tasks haven't completed within the specified timeout, it's an error
if (failedList != null)
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
formatString("{0} out of {1} the property {2} to be of version {3} within {4} seconds! Failed cores: {5}",
failedList.size(), concurrentTasks.size() + 1, prop, expectedVersion, maxWaitSecs, failedList));
} catch (InterruptedException ie) {
log.warn(formatString(
"Core was interrupted . trying to set the property {1} to version {2} to propagate to {3} replicas for collection {4}",
prop, expectedVersion, concurrentTasks.size(), collection));
Thread.currentThread().interrupt();
} finally {
ExecutorUtil.shutdownAndAwaitTermination(parallelExecutor);
}
log.info("Took {}ms to set the property {} to be of version {} for collection {}",
timer.getTime(), prop, expectedVersion, collection);
}
public static List<String> getActiveReplicaCoreUrls(ZkController zkController,
String collection) {
List<String> activeReplicaCoreUrls = new ArrayList<>();
ClusterState clusterState = zkController.getZkStateReader().getClusterState();
Set<String> liveNodes = clusterState.getLiveNodes();
final DocCollection docCollection = clusterState.getCollectionOrNull(collection);
if (docCollection != null && docCollection.getActiveSlices() != null && docCollection.getActiveSlices().size() > 0) {
final Collection<Slice> activeSlices = docCollection.getActiveSlices();
for (Slice next : activeSlices) {
Map<String, Replica> replicasMap = next.getReplicasMap();
if (replicasMap != null) {
for (Map.Entry<String, Replica> entry : replicasMap.entrySet()) {
Replica replica = entry.getValue();
if (replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName())) {
activeReplicaCoreUrls.add(replica.getCoreUrl());
}
}
}
}
}
return activeReplicaCoreUrls;
}
@Override
public Name getPermissionName(AuthorizationContext ctx) {
switch (ctx.getHttpMethod()) {
case "GET":
return Name.CONFIG_READ_PERM;
case "POST":
return Name.CONFIG_EDIT_PERM;
default:
return null;
}
}
private static class PerReplicaCallable extends SolrRequest implements Callable<Boolean> {
String coreUrl;
String prop;
int expectedZkVersion;
Number remoteVersion = null;
int maxWait;
PerReplicaCallable(String coreUrl, String prop, int expectedZkVersion, int maxWait) {
super(METHOD.GET, "/config/" + ZNODEVER);
this.coreUrl = coreUrl;
this.expectedZkVersion = expectedZkVersion;
this.prop = prop;
this.maxWait = maxWait;
}
@Override
public SolrParams getParams() {
return new ModifiableSolrParams()
.set(prop, expectedZkVersion)
.set(CommonParams.WT, CommonParams.JAVABIN);
}
@Override
public Boolean call() throws Exception {
final RTimer timer = new RTimer();
int attempts = 0;
try (HttpSolrClient solr = new HttpSolrClient.Builder(coreUrl).build()) {
// eventually, this loop will get killed by the ExecutorService's timeout
while (true) {
try {
long timeElapsed = (long) timer.getTime() / 1000;
if (timeElapsed >= maxWait) {
return false;
}
log.info("Time elapsed : {} secs, maxWait {}", timeElapsed, maxWait);
Thread.sleep(100);
NamedList<Object> resp = solr.httpUriRequest(this).future.get();
if (resp != null) {
Map m = (Map) resp.get(ZNODEVER);
if (m != null) {
remoteVersion = (Number) m.get(prop);
if (remoteVersion != null && remoteVersion.intValue() >= expectedZkVersion) break;
}
}
attempts++;
log.info(formatString("Could not get expectedVersion {0} from {1} for prop {2} after {3} attempts", expectedZkVersion, coreUrl, prop, attempts));
} catch (Exception e) {
if (e instanceof InterruptedException) {
break; // stop looping
} else {
log.warn("Failed to get /schema/zkversion from " + coreUrl + " due to: " + e);
}
}
}
}
return true;
}
@Override
protected SolrResponse createResponse(SolrClient client) {
return null;
}
}
@Override
public Collection<Api> getApis() {
return ApiBag.wrapRequestHandlers(this,

View File

@ -18,39 +18,65 @@
package org.apache.solr.handler.admin;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.solr.api.ApiBag;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.request.CollectionApiMapping;
import org.apache.solr.client.solrj.request.CollectionApiMapping.CommandMeta;
import org.apache.solr.client.solrj.request.CollectionApiMapping.Meta;
import org.apache.solr.client.solrj.request.CollectionApiMapping.V2EndPoint;
import org.apache.solr.common.Callable;
import org.apache.solr.common.MapWriter;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.cloud.ClusterProperties;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.CommandOperation;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.common.util.Utils;
import org.apache.solr.core.ConfigOverlay;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.RuntimeLib;
import org.apache.solr.handler.SolrConfigHandler;
import org.apache.solr.handler.admin.CollectionsHandler.CollectionOperation;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.util.RTimer;
import org.apache.zookeeper.data.Stat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static org.apache.solr.common.util.CommandOperation.captureErrors;
import static org.apache.solr.common.util.StrUtils.formatString;
public class CollectionHandlerApi extends BaseHandlerApiSupport {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
final CollectionsHandler handler;
static Collection<ApiCommand> apiCommands = createCollMapping();
public CollectionHandlerApi(CollectionsHandler handler) {
this.handler = handler;
}
private static Collection<ApiCommand> createCollMapping() {
Map<Meta, ApiCommand> result = new EnumMap<>(Meta.class);
Map<Meta, ApiCommand> apiMapping = new EnumMap<>(Meta.class);
for (Meta meta : Meta.values()) {
for (CollectionOperation op : CollectionOperation.values()) {
if (op.action == meta.action) {
result.put(meta, new ApiCommand() {
apiMapping.put(meta, new ApiCommand() {
@Override
public CommandMeta meta() {
return meta;
@ -65,30 +91,196 @@ public class CollectionHandlerApi extends BaseHandlerApiSupport {
}
}
//The following APIs have only V2 implementations
addApi(result, Meta.GET_NODES, params -> params.rsp.add("nodes", ((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getClusterState().getLiveNodes()));
addApi(result, Meta.SET_CLUSTER_PROPERTY_OBJ, params -> {
List<CommandOperation> commands = params.req.getCommands(true);
if (commands == null || commands.isEmpty()) throw new RuntimeException("Empty commands");
ClusterProperties clusterProperties = new ClusterProperties(((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getZkClient());
try {
clusterProperties.setClusterProperties(commands.get(0).getDataMap());
} catch (Exception e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error in API", e);
}
});
addApi(apiMapping, Meta.GET_NODES, CollectionHandlerApi::getNodes);
addApi(apiMapping, Meta.SET_CLUSTER_PROPERTY_OBJ, CollectionHandlerApi::setClusterObj);
addApi(apiMapping, Meta.ADD_RUNTIME_LIB, wrap(CollectionHandlerApi::addUpdateRuntimeLib));
addApi(apiMapping, Meta.UPDATE_RUNTIME_LIB, wrap(CollectionHandlerApi::addUpdateRuntimeLib));
addApi(apiMapping, Meta.DELETE_RUNTIME_LIB, wrap(CollectionHandlerApi::deleteRuntimeLib));
addApi(apiMapping, Meta.ADD_REQ_HANDLER, wrap(CollectionHandlerApi::addRequestHandler));
addApi(apiMapping, Meta.DELETE_REQ_HANDLER, wrap(CollectionHandlerApi::deleteReqHandler));
for (Meta meta : Meta.values()) {
if (result.get(meta) == null) {
if (apiMapping.get(meta) == null) {
log.error("ERROR_INIT. No corresponding API implementation for : " + meta.commandName);
}
}
return result.values();
return apiMapping.values();
}
private static void addApi(Map<Meta, ApiCommand> result, Meta metaInfo, Callable<ApiParams> fun) {
result.put(metaInfo, new ApiCommand() {
static Command wrap(Command cmd) {
return info -> {
CoreContainer cc = ((CollectionHandlerApi) info.apiHandler).handler.coreContainer;
boolean modified = cmd.call(info);
if (modified) {
Stat stat = new Stat();
Map<String, Object> clusterProperties = new ClusterProperties(cc.getZkController().getZkClient()).getClusterProperties(stat);
cc.getClusterPropertiesListener().onChange(clusterProperties);
log.info("current version of clusterprops.json is {} , trying to get every node to update ", stat.getVersion());
log.debug("The current clusterprops.json: {}",clusterProperties );
((CollectionHandlerApi) info.apiHandler).waitForStateSync(stat.getVersion(), cc);
}
if (info.op != null && info.op.hasError()) {
log.error("Error in running command {} , current clusterprops.json : {}", Utils.toJSONString(info.op), Utils.toJSONString(new ClusterProperties(cc.getZkController().getZkClient()).getClusterProperties()));
}
return modified;
};
}
private static boolean getNodes(ApiInfo params) {
params.rsp.add("nodes", ((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getClusterState().getLiveNodes());
return false;
}
private static boolean deleteReqHandler(ApiInfo params) throws Exception {
String name = params.op.getStr("");
ClusterProperties clusterProperties = new ClusterProperties(((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getZkClient());
Map<String, Object> map = clusterProperties.getClusterProperties();
if (Utils.getObjectByPath(map, false, asList(SolrRequestHandler.TYPE, name)) == null) {
params.op.addError("NO such requestHandler with name :");
return false;
}
Map m = new LinkedHashMap();
Utils.setObjectByPath(m, asList(SolrRequestHandler.TYPE, name), null, true);
clusterProperties.setClusterProperties(m);
return true;
}
private static boolean addRequestHandler(ApiInfo params) throws Exception {
Map data = params.op.getDataMap();
String name = (String) data.get("name");
CoreContainer coreContainer = ((CollectionHandlerApi) params.apiHandler).handler.coreContainer;
ClusterProperties clusterProperties = new ClusterProperties(coreContainer.getZkController().getZkClient());
Map<String, Object> map = clusterProperties.getClusterProperties();
if (Utils.getObjectByPath(map, false, asList(SolrRequestHandler.TYPE, name)) != null) {
params.op.addError("A requestHandler already exists with the said name");
return false;
}
Map m = new LinkedHashMap();
Utils.setObjectByPath(m, asList(SolrRequestHandler.TYPE, name), data, true);
clusterProperties.setClusterProperties(m);
return true;
}
private static boolean deleteRuntimeLib(ApiInfo params) throws Exception {
if (!RuntimeLib.isEnabled()) {
params.op.addError("node not started with enable.runtime.lib=true");
return false;
}
String name = params.op.getStr(CommandOperation.ROOT_OBJ);
ClusterProperties clusterProperties = new ClusterProperties(((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getZkClient());
Map<String, Object> props = clusterProperties.getClusterProperties();
List<String> pathToLib = asList(RuntimeLib.TYPE, name);
Map existing = (Map) Utils.getObjectByPath(props, false, pathToLib);
if(existing == null){
params.op.addError("No such runtimeLib : " + name);
return false;
}
Map delta = new LinkedHashMap();
Utils.setObjectByPath(delta, pathToLib, null, true);
clusterProperties.setClusterProperties(delta);
return true;
}
private static boolean addUpdateRuntimeLib(ApiInfo params) throws Exception {
if (!RuntimeLib.isEnabled()) {
params.op.addError("node not started with enable.runtime.lib=true");
return false;
}
CollectionHandlerApi handler = (CollectionHandlerApi) params.apiHandler;
RuntimeLib lib = new RuntimeLib(handler.handler.coreContainer);
CommandOperation op = params.op;
String name = op.getStr("name");
ClusterProperties clusterProperties = new ClusterProperties(((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getZkClient());
Map<String, Object> props = clusterProperties.getClusterProperties();
List<String> pathToLib = asList(RuntimeLib.TYPE, name);
Map existing = (Map) Utils.getObjectByPath(props, false, pathToLib);
if (Meta.ADD_RUNTIME_LIB.commandName.equals(op.name)) {
if (existing != null) {
op.addError(StrUtils.formatString("The jar with a name ''{0}'' already exists ", name));
return false;
}
} else {
if (existing == null) {
op.addError(StrUtils.formatString("The jar with a name ''{0}'' doesn not exist", name));
return false;
}
}
try {
lib.init(new PluginInfo(SolrRequestHandler.TYPE, op.getDataMap()));
} catch (SolrException e) {
log.error("Error loading runtimelib ", e);
op.addError(e.getMessage());
return false;
}
Map delta = new LinkedHashMap();
Utils.setObjectByPath(delta, pathToLib, op.getDataMap(), true);
clusterProperties.setClusterProperties(delta);
return true;
}
private static boolean setClusterObj(ApiInfo params) {
ClusterProperties clusterProperties = new ClusterProperties(((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getZkClient());
try {
clusterProperties.setClusterProperties(params.op.getDataMap());
} catch (Exception e) {
throw new SolrException(ErrorCode.SERVER_ERROR, "Error in API", e);
}
return false;
}
private void waitForStateSync(int expectedVersion, CoreContainer coreContainer) {
final RTimer timer = new RTimer();
int waitTimeSecs = 30;
// get a list of active replica cores to query for the schema zk version (skipping this core of course)
List<PerNodeCallable> concurrentTasks = new ArrayList<>();
ZkStateReader zkStateReader = coreContainer.getZkController().getZkStateReader();
for (String nodeName : zkStateReader.getClusterState().getLiveNodes()) {
PerNodeCallable e = new PerNodeCallable(zkStateReader.getBaseUrlForNodeName(nodeName), expectedVersion, waitTimeSecs);
concurrentTasks.add(e);
}
if (concurrentTasks.isEmpty()) return; // nothing to wait for ...
log.info("Waiting up to {} secs for {} nodes to update clusterprops to be of version {} ",
waitTimeSecs, concurrentTasks.size(), expectedVersion);
SolrConfigHandler.execInparallel(concurrentTasks, parallelExecutor -> {
try {
List<String> failedList = SolrConfigHandler.executeAll(expectedVersion, waitTimeSecs, concurrentTasks, parallelExecutor);
// if any tasks haven't completed within the specified timeout, it's an error
if (failedList != null)
throw new SolrException(ErrorCode.SERVER_ERROR,
formatString("{0} out of {1} the property {2} to be of version {3} within {4} seconds! Failed cores: {5}",
failedList.size(), concurrentTasks.size() + 1, expectedVersion, 30, failedList));
} catch (InterruptedException e) {
log.warn(formatString(
"Request was interrupted . trying to set the clusterprops to version {0} to propagate to {1} nodes ",
expectedVersion, concurrentTasks.size()));
Thread.currentThread().interrupt();
}
});
log.info("Took {}ms to update the clusterprops to be of version {} on {} nodes",
timer.getTime(), expectedVersion, concurrentTasks.size());
}
interface Command {
boolean call(ApiInfo info) throws Exception;
}
private static void addApi(Map<Meta, ApiCommand> mapping, Meta metaInfo, Command fun) {
mapping.put(metaInfo, new ApiCommand() {
@Override
public CommandMeta meta() {
return metaInfo;
@ -96,25 +288,25 @@ public class CollectionHandlerApi extends BaseHandlerApiSupport {
@Override
public void invoke(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler) throws Exception {
fun.call(new ApiParams(req, rsp, apiHandler));
CommandOperation op = null;
if (metaInfo.method == SolrRequest.METHOD.POST) {
List<CommandOperation> commands = req.getCommands(true);
if (commands == null || commands.size() != 1)
throw new SolrException(ErrorCode.BAD_REQUEST, "should have exactly one command");
op = commands.get(0);
}
fun.call(new ApiInfo(req, rsp, apiHandler, op));
if (op != null && op.hasError()) {
throw new ApiBag.ExceptionWithErrObject(ErrorCode.BAD_REQUEST, "error processing commands", captureErrors(singletonList(op)));
}
}
});
}
static class ApiParams {
final SolrQueryRequest req;
final SolrQueryResponse rsp;
final BaseHandlerApiSupport apiHandler;
ApiParams(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler) {
this.req = req;
this.rsp = rsp;
this.apiHandler = apiHandler;
}
}
public CollectionHandlerApi(CollectionsHandler handler) {
this.handler = handler;
@Override
protected List<V2EndPoint> getEndPoints() {
return asList(CollectionApiMapping.EndPoint.values());
}
@Override
@ -122,9 +314,39 @@ public class CollectionHandlerApi extends BaseHandlerApiSupport {
return apiCommands;
}
@Override
protected List<V2EndPoint> getEndPoints() {
return Arrays.asList(CollectionApiMapping.EndPoint.values());
public static class PerNodeCallable extends SolrConfigHandler.PerReplicaCallable {
static final List<String> path = Arrays.asList("metadata", CommonParams.VERSION);
PerNodeCallable(String baseUrl, int expectedversion, int waitTime) {
super(baseUrl, ConfigOverlay.ZNODEVER, expectedversion, waitTime);
}
@Override
protected boolean verifyResponse(MapWriter mw, int attempts) {
remoteVersion = (Number) mw._get(path, -1);
if (remoteVersion.intValue() >= expectedZkVersion) return true;
log.info(formatString("Could not get expectedVersion {0} from {1} , remote val= {2} after {3} attempts", expectedZkVersion, coreUrl, remoteVersion, attempts));
return false;
}
public String getPath() {
return "/____v2/node/ext";
}
}
static class ApiInfo {
final SolrQueryRequest req;
final SolrQueryResponse rsp;
final BaseHandlerApiSupport apiHandler;
final CommandOperation op;
ApiInfo(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler, CommandOperation op) {
this.req = req;
this.rsp = rsp;
this.apiHandler = apiHandler;
this.op = op;
}
}
}

View File

@ -39,6 +39,7 @@ public interface PermissionNameProvider {
CORE_READ_PERM("core-admin-read", null),
CORE_EDIT_PERM("core-admin-edit", null),
READ_PERM("read", "*"),
CUSTOM_PERM("custom-op", null),//custom operation , user-defined
UPDATE_PERM("update", "*"),
CONFIG_EDIT_PERM("config-edit", unmodifiableSet(new HashSet<>(asList("*", null)))),
CONFIG_READ_PERM("config-read", "*"),

View File

@ -21,7 +21,6 @@ import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import java.lang.invoke.MethodHandles;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
@ -104,22 +103,17 @@ public final class CryptoKeys implements CLIO {
* @param data The data tha is signed
*/
public static boolean verify(PublicKey publicKey, byte[] sig, ByteBuffer data) throws InvalidKeyException, SignatureException {
int oldPos = data.position();
Signature signature = null;
data = ByteBuffer.wrap(data.array(), data.arrayOffset(), data.limit());
try {
signature = Signature.getInstance("SHA1withRSA");
Signature signature = Signature.getInstance("SHA1withRSA");
signature.initVerify(publicKey);
signature.update(data);
boolean verify = signature.verify(sig);
return verify;
return signature.verify(sig);
} catch (NoSuchAlgorithmException e) {
//will not happen
} finally {
//Signature.update resets the position. set it back to old
data.position(oldPos);
//wil not happen
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
}
return false;
}
private static byte[][] evpBytesTokey(int key_len, int iv_len, MessageDigest md,

View File

@ -0,0 +1,27 @@
-----BEGIN RSA PRIVATE KEY-----
MIIEpQIBAAKCAQEA1fSq/8iz1sIppHhSKrC0g2uzfFKZzWZAbcvVQbyS/pwxC7VB
hR93DVINyGGT3XHnpemt/h0wrifCIEMyqSLTIhiu5bRJpfE7UO9vGgTcP5+i2wTe
cKHqrxDvbQ4D7co96Gvu2cShySbOHsFjZXL4eaqU2W2x8S7U+OjRBwtwMxB4vstX
5u75WtwVXwNRj+uXIfTTisplE/nA/slqByW4Q9QAg+du+Ejh4W7nF+Z9GRMR7MZe
c1TeGOYZd8YzYku7WyUZ1SRQ6JjaZrdphlLtysMgqP0MMajEoFs/ajeNHef0iCz0
TnB05PQd+GPO5+JrLPZ399mucl/jM+cbixn9pwIDAQABAoIBAQCpfA51XryvU9F+
+t1D+gSU0p00z44MeoJxN3WwhDwBOyNS/ftwA/Pf9m76m+lxEwkIkesFkIP+gXoy
6mhYOUD9EoaBaeapcEWnoHPnLZB3SfLNArdei7UHhyfSwLZ2CK4vzkg/85ahbH79
N/6P35pbbrhI4K+DubB1mJ/0r6fqmh6/04L47/liAPvsSM9ZJIMwbuZbYY21ggI9
ZGk+kO0C/CyzxplaVLJ8P86KnRloEfjSmMhP72z7bja/BE2NX42G12YbjY7tVMn7
duTWU2F4JWYriWAHr+4GwODDdtvn/R5jPirDIJeHCd6Bg1t7KibHRTcgYgtwDBqG
F65g4zqRAoGBAP2fry+6uXe3rAJDJrCSKPQVTv5QhOvG1466xsOaWRSe/rx1Mvnd
Z4pe+T8bdvsvqFnNMAkZKzzPjJ+oCSVKHhcerzMm2Cw6Gpv2yywA/1VykIoZmdNM
/vHjC7w35q7xwEUHxB/rt2vvijrAYnhaq86uIXzoiqTGaKJ/z34QsCppAoGBANf1
1wsISnZPjIipMIYtC7Co3GCUhsQ+ksVBhtsOHaKfon3Q69Qbz93l7dbCwgFbL6td
HW/ppnABZLVFHnoLJ5YrriVZ1Wizx90+RFGdNj74UTV8bfqr/C32UKTjqoYjPAZO
vEOzHkmpc9I1mrxm1Mcff5EHDFmXGXoZ2GLCpEWPAoGAOXroVFPoVtacuEKJ0Ti+
6Vqu9XpANcNx9RollA02JTNHnmSdcf2YysZtjLznwVPyvq9/NICsyPJs93443Geo
3CqLIHesRJHCmBhdwZJUTART98iHkVkA6sc/UKAGux11Ku/wph9hCahXVqtlZct+
5q+WTV3SljeVXUbEOtkDZAkCgYEArnd0R/xls5jmbs1IX01q4Ug56Wh0S3xFtEgQ
u013EZcnfb9Xld72Gk0TzOlANDpHk4hBLNU02c22X188lNoIHCCjqpcdel2rPIh+
RvTcCxku+ifQ7a8dpsAUPHGUpJM4fdwD6il9cYMNB6i4njXw9gDzXOW1y3bvZR4W
GwsmDO8CgYEA5vG0TdwkvdDcsJYimm3WQJ/VnYidE6JfjnAxnPwFFPjQoDRIS32f
TMMJFTHSSH4xgQLEhEfaAbrkptpPORM9QAjjRx2RXoa5yu2GMpDWua4MxpHdqiSY
v/rOw+6fZbe8YC9bZ8AE+GPuHdJDQFoSU7ieCGiF/iwWB2jhwCm7OyY=
-----END RSA PRIVATE KEY-----

View File

@ -0,0 +1,9 @@
-----BEGIN RSA PRIVATE KEY-----
MIIBOQIBAAJBAMgmSVfl+E2Nj/oKAP2TceWh17pk6Ugsw5A5nLH+OeMB/WeWJZg/
NEDda8SXfQDEVRmw5P+2IZypPASzfCrc6yECAwEAAQJAbZFwEztky+fUSrhRRIAE
GQaZV4PIpWdEA99WJaabv+YsWN5UUd7y+Evu50mhH3RQIxQd+R6SYs1ke9OlHlV2
cQIhAP8367gybVEu2A+Cg1fE9vbHfnHrurpDQrh9r0ZKooTtAiEAyMMxvlHlSh6Q
2cUTSxuyUEaQfN+W4efehgfIWBVlzIUCIEHBMZ0qeNnCvO36DUbuu0ZHjb9iIaDd
tXH9B8yPbCHdAiAaV3o0ZZx3MDGDUVdpuHWaENguekva0kihP24rGIul3QIgNqZS
EzA2aoQdNPl5oDfkhqAGjs5pb7qLgtmXJvVhi/Q=
-----END RSA PRIVATE KEY-----

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,48 @@
================priv_key2048.pem===================
openssl dgst -sha1 -sign ../cryptokeys/priv_key2048.pem runtimelibs.jar.bin | openssl enc -base64
NaTm3+i99/ZhS8YRsLc3NLz2Y6VuwEbu7DihY8GAWwWIGm+jpXgn1JiuaenfxFCc
fNKCC9WgZmEgbTZTzmV/OZMVn90u642YJbF3vTnzelW1pHB43ZRAJ1iesH0anM37
w03n3es+vFWQtuxc+2Go888fJoMkUX2C6Zk6Jn116KE45DWjeyPM4mp3vvGzwGvd
RxP5K9Q3suA+iuI/ULXM7m9mV4ruvs/MZvL+ELm5Jnmk1bBtixVJhQwJP2z++8tQ
KJghhyBxPIC/2fkAHobQpkhZrXu56JjP+v33ul3Ku4bbvfVMY/LVwCAEnxlvhk+C
6uRCKCeFMrzQ/k5inasXLw==
openssl dgst -sha1 -sign ../cryptokeys/priv_key2048.pem runtimelibs_v2.jar.bin | openssl enc -base64
jsPpNMs74ogRbx9M4n/OH3j3s85KOq9dOtgGJkUf6O5D8T9d9zU2lDwxnTYjQCaW
cRTLGH3Z8vpc0wyT3g4aXepgLUTSnrepbPffSFhQtFrCNxurPOLzbp6ERhwjZ0RL
GvZrlbbjR2SxqZ3BpHiGxslj0tPCkdevNCEy1glLhl8RWG5xsLCrRL1mrEtLg97A
53oCCrfGAHLEvW+olGeB1r7jqUaSrbfAUfDMSIvZfOIV+xdlvabkNiuzvsAc+B6Q
pXWm+Em2f5TO/bkOh2m/UInGXcNHCa0oqRMGKP1H252Cv9eXm/d0h3Dqxv+f80Gz
LfyA6/OKQ9FfskY4pltCsQ==
openssl dgst -sha1 -sign ../cryptokeys/priv_key2048.pem runtimelibs_v3.jar.bin | openssl enc -base64
BSx/v0eKWX+LzkWF+iIAzwGL9rezWMePsyRzi4TvV6boATZ9cSfeUAqUgRW50f/h
AHX4/hrHr2Piy8za9tIUoXbLqn3xJNNroOqpcVEgwh1Zii4c7zPwUSB9gtd9zlAK
4LAPLdjxILS8NXpTD2zLycc8kSpcyTpSTITqz6HA3HsPGC81WIq2k3IRqYAkacn4
6viW+nnEjA7OxDCOqoL//evjxDWQ6R1YggTGh4u5MSWZJCiCPJNQnTlPRzUZOAJj
tX7PblDrKeiunKGbjtiOhFLYkupe1lSlIRLiJV/qqopO4TQGO1bhbxeCKAX2vEz5
Ch5bGOa+VZLJJGaDo318UQ==
=====================priv_key512.pem=====================
openssl dgst -sha1 -sign ../cryptokeys/priv_key512.pem runtimelibs.jar.bin | openssl enc -base64
L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1
f/U3bOlMPINlSOM6LK3JpQ==
L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ==
openssl dgst -sha1 -sign ../cryptokeys/priv_key512.pem runtimelibs_v2.jar.bin | openssl enc -base64
j+Rflxi64tXdqosIhbusqi6GTwZq8znunC/dzwcWW0/dHlFGKDurOaE1Nz9FSPJu
XbHkVLj638yZ0Lp1ssnoYA==
openssl dgst -sha1 -sign ../cryptokeys/priv_key512.pem runtimelibs_v3.jar.bin | openssl enc -base64
pnH8uDHsTF0HWyQqABqVWmvo3rM/Mp2qpuo6S9YXZA9Ifg8NjHX8WzPe6EzlaqBc
YcusrEV0b+5NCBx4AS0TGA==
pnH8uDHsTF0HWyQqABqVWmvo3rM/Mp2qpuo6S9YXZA9Ifg8NjHX8WzPe6EzlaqBcYcusrEV0b+5NCBx4AS0TGA==

View File

@ -27,7 +27,7 @@ import org.junit.Test;
public class TestClusterProperties extends SolrCloudTestCase {
private ClusterProperties props;
@BeforeClass
public static void setupCluster() throws Exception {
configureCluster(1).configure();
@ -49,7 +49,7 @@ public class TestClusterProperties extends SolrCloudTestCase {
CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "false").process(cluster.getSolrClient());
assertEquals("false", props.getClusterProperty(ZkStateReader.LEGACY_CLOUD, "true"));
}
@Test
public void testSetPluginClusterProperty() throws Exception {
String propertyName = ClusterProperties.EXT_PROPRTTY_PREFIX + "pluginA.propertyA";
@ -57,7 +57,7 @@ public class TestClusterProperties extends SolrCloudTestCase {
.process(cluster.getSolrClient());
assertEquals("valueA", props.getClusterProperty(propertyName, null));
}
@Test(expected = SolrException.class)
public void testSetInvalidPluginClusterProperty() throws Exception {
String propertyName = "pluginA.propertyA";

View File

@ -195,7 +195,7 @@ public class TestCryptoKeys extends AbstractFullDistribZkTestBase {
}
private byte[] readFile(String fname) throws IOException {
public static byte[] readFile(String fname) throws IOException {
byte[] buf = null;
try (FileInputStream fis = new FileInputStream(getFile(fname))) {
buf = new byte[fis.available()];

View File

@ -97,7 +97,7 @@ public class TestDynamicLoading extends AbstractFullDistribZkTestBase {
assertNotNull(map = (Map) map.get("error"));
assertTrue("full output " + map, map.get("msg").toString().contains("no such blob or version available: colltest/1" ));
assertTrue("full output " + map, map.get("msg").toString().contains("no such resource available: colltest/1" ));
payload = " {\n" +
" 'set' : {'watched': {" +
" 'x':'X val',\n" +

View File

@ -0,0 +1,431 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Predicate;
import com.carrotsearch.ant.tasks.junit4.dependencies.com.google.common.collect.ImmutableMap;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrResponse;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.BaseHttpSolrClient;
import org.apache.solr.client.solrj.request.V2Request;
import org.apache.solr.client.solrj.response.V2Response;
import org.apache.solr.cloud.MiniSolrCloudCluster;
import org.apache.solr.cloud.SolrCloudTestCase;
import org.apache.solr.common.cloud.ClusterProperties;
import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.params.MapSolrParams;
import org.apache.solr.common.util.Pair;
import org.apache.solr.common.util.Utils;
import org.apache.solr.core.MemClassLoader;
import org.apache.solr.core.RuntimeLib;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.util.LogLevel;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.data.Stat;
import org.eclipse.jetty.server.Server;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.cloud.TestCryptoKeys.readFile;
import static org.apache.solr.common.util.Utils.getObjectByPath;
import static org.apache.solr.core.TestDynamicLoading.getFileContent;
import static org.apache.solr.core.TestDynamicLoadingUrl.runHttpServer;
@SolrTestCaseJ4.SuppressSSL
@LogLevel("org.apache.solr.common.cloud.ZkStateReader=DEBUG;org.apache.solr.handler.admin.CollectionHandlerApi=DEBUG;org.apache.solr.core.LibListener=DEBUG;org.apache.solr.common.cloud.ClusterProperties=DEBUG")
public class TestContainerReqHandler extends SolrCloudTestCase {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@BeforeClass
public static void setupCluster() throws Exception {
System.setProperty("enable.runtime.lib", "true");
}
static void assertResponseValues(int repeats, SolrClient client, SolrRequest req, Map vals) throws Exception {
for (int i = 0; i < repeats; i++) {
if (i > 0) {
Thread.sleep(100);
}
try {
SolrResponse rsp = req.process(client);
try {
for (Object e : vals.entrySet()) {
Map.Entry entry = (Map.Entry) e;
String key = (String) entry.getKey();
Object val = entry.getValue();
Predicate p = val instanceof Predicate ? (Predicate) val : o -> {
String v = o == null ? null : String.valueOf(o);
return Objects.equals(val, o);
};
assertTrue("attempt: " + i + " Mismatch for value : '" + key + "' in response " + Utils.toJSONString(rsp),
p.test(rsp.getResponse()._get(key, null)));
}
return;
} catch (Exception e) {
if (i >= repeats - 1) throw e;
continue;
}
} catch (Exception e) {
if (i >= repeats - 1) throw e;
log.error("exception in request", e);
continue;
}
}
}
private static Map<String, Object> assertVersionInSync(SolrZkClient zkClient, SolrClient solrClient) throws SolrServerException, IOException {
Stat stat = new Stat();
Map<String, Object> map = new ClusterProperties(zkClient).getClusterProperties(stat);
assertEquals(String.valueOf(stat.getVersion()), getExtResponse(solrClient)._getStr("metadata/version", null));
return map;
}
private static V2Response getExtResponse(SolrClient solrClient) throws SolrServerException, IOException {
return new V2Request.Builder("/node/ext")
.withMethod(SolrRequest.METHOD.GET)
.build().process(solrClient);
}
@Test
public void testRuntimeLib() throws Exception {
Map<String, Object> jars = Utils.makeMap(
"/jar1.jar", getFileContent("runtimecode/runtimelibs.jar.bin"),
"/jar2.jar", getFileContent("runtimecode/runtimelibs_v2.jar.bin"),
"/jar3.jar", getFileContent("runtimecode/runtimelibs_v3.jar.bin"));
Pair<Server, Integer> server = runHttpServer(jars);
int port = server.second();
MiniSolrCloudCluster cluster = configureCluster(4).configure();
try {
String payload = null;
try {
payload = "{add-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar1.jar', " +
"sha512 : 'wrong-sha512'}}";
new V2Request.Builder("/cluster")
.withPayload(payload)
.withMethod(SolrRequest.METHOD.POST)
.build().process(cluster.getSolrClient());
fail("Expected error");
} catch (BaseHttpSolrClient.RemoteExecutionException e) {
assertTrue("actual output : " + Utils.toJSONString(e.getMetaData()), e.getMetaData()._getStr("error/details[0]/errorMessages[0]", "").contains("expected sha512 hash :"));
}
try {
payload = "{add-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar0.jar', " +
"sha512 : 'd01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}}";
new V2Request.Builder("/cluster")
.withPayload(payload)
.withMethod(SolrRequest.METHOD.POST)
.build().process(cluster.getSolrClient());
fail("Expected error");
} catch (BaseHttpSolrClient.RemoteExecutionException e) {
assertTrue("Actual output : " + Utils.toJSONString(e.getMetaData()), e.getMetaData()._getStr("error/details[0]/errorMessages[0]", "").contains("no such resource available: foo"));
}
payload = "{add-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar1.jar', " +
"sha512 : 'd01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}}";
new V2Request.Builder("/cluster")
.withPayload(payload)
.withMethod(SolrRequest.METHOD.POST)
.build().process(cluster.getSolrClient());
assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "add-runtimelib/sha512"),
getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "runtimeLib/foo/sha512"));
new V2Request.Builder("/cluster")
.withPayload("{add-requesthandler:{name : 'bar', class : 'org.apache.solr.core.RuntimeLibReqHandler'}}")
.withMethod(SolrRequest.METHOD.POST)
.build().process(cluster.getSolrClient());
Map<String, Object> map = new ClusterProperties(cluster.getZkClient()).getClusterProperties();
V2Request request = new V2Request.Builder("/node/ext/bar")
.withMethod(SolrRequest.METHOD.POST)
.build();
assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
"class", "org.apache.solr.core.RuntimeLibReqHandler",
"loader", MemClassLoader.class.getName(),
"version", null));
assertEquals("org.apache.solr.core.RuntimeLibReqHandler",
getObjectByPath(map, true, Arrays.asList("requestHandler", "bar", "class")));
payload = "{update-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar3.jar', " +
"sha512 : 'f67a7735a89b4348e273ca29e4651359d6d976ba966cb871c4b468ea1dbd452e42fcde9d188b7788e5a1ef668283c690606032922364759d19588666d5862653'}}";
new V2Request.Builder("/cluster")
.withPayload(payload)
.withMethod(SolrRequest.METHOD.POST)
.build().process(cluster.getSolrClient());
assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "update-runtimelib/sha512"),
getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "runtimeLib/foo/sha512"));
request = new V2Request.Builder("/node/ext/bar")
.withMethod(SolrRequest.METHOD.POST)
.build();
assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
"class", "org.apache.solr.core.RuntimeLibReqHandler",
"loader", MemClassLoader.class.getName(),
"version", "3")
);
new V2Request.Builder("/cluster")
.withPayload("{delete-requesthandler: 'bar'}")
.withMethod(SolrRequest.METHOD.POST)
.build().process(cluster.getSolrClient());
request = new V2Request.Builder("/node/ext")
.withMethod(SolrRequest.METHOD.POST)
.build();
assertResponseValues(10, cluster.getSolrClient(), request, ImmutableMap.of(SolrRequestHandler.TYPE,
(Predicate<Object>) o -> o instanceof List && ((List) o).isEmpty()));
new V2Request.Builder("/cluster")
.withPayload("{delete-runtimelib : 'foo'}")
.withMethod(SolrRequest.METHOD.POST)
.build().process(cluster.getSolrClient());
assertResponseValues(10, cluster.getSolrClient(), request, ImmutableMap.of(RuntimeLib.TYPE,
(Predicate<Object>) o -> o instanceof List && ((List) o).isEmpty()));
} finally {
server.first().stop();
cluster.shutdown();
}
}
@Test
public void testRuntimeLibWithSig2048() throws Exception {
Map<String, Object> jars = Utils.makeMap(
"/jar1.jar", getFileContent("runtimecode/runtimelibs.jar.bin"),
"/jar2.jar", getFileContent("runtimecode/runtimelibs_v2.jar.bin"),
"/jar3.jar", getFileContent("runtimecode/runtimelibs_v3.jar.bin"));
Pair<Server, Integer> server = runHttpServer(jars);
int port = server.second();
MiniSolrCloudCluster cluster = configureCluster(4).configure();
try {
byte[] derFile = readFile("cryptokeys/pub_key2048.der");
cluster.getZkClient().makePath("/keys/exe", true);
cluster.getZkClient().create("/keys/exe/pub_key2048.der", derFile, CreateMode.PERSISTENT, true);
String signature = "NaTm3+i99/ZhS8YRsLc3NLz2Y6VuwEbu7DihY8GAWwWIGm+jpXgn1JiuaenfxFCcfNKCC9WgZmEgbTZTzmV/OZMVn90u642YJbF3vTnzelW1pHB43ZRAJ1iesH0anM37w03n3es+vFWQtuxc+2Go888fJoMkUX2C6Zk6Jn116KE45DWjeyPM4mp3vvGzwGvdRxP5K9Q3suA+iuI/ULXM7m9mV4ruvs/MZvL+ELm5Jnmk1bBtixVJhQwJP2z++8tQKJghhyBxPIC/2fkAHobQpkhZrXu56JjP+v33ul3Ku4bbvfVMY/LVwCAEnxlvhk+C6uRCKCeFMrzQ/k5inasXLw==";
String payload = "{add-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar1.jar', " +
"sig : 'EdYkvRpMZbvElN93/xUmyKXcj6xHP16AVk71TlTascEwCb5cFQ2AeKhPIlwYpkLWXEOcLZKfeXoWwOLaV5ZNhg==' ," +
"sha512 : 'd01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}}";
try {
new V2Request.Builder("/cluster")
.withPayload(payload)
.withMethod(SolrRequest.METHOD.POST)
.build().process(cluster.getSolrClient());
} catch (BaseHttpSolrClient.RemoteExecutionException e) {
//No key matched signature for jar
assertTrue(e.getMetaData()._getStr("error/details[0]/errorMessages[0]", "").contains("No key matched signature for jar"));
}
payload = "{add-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar1.jar', " +
"sig : '" + signature + "'," +
"sha512 : 'd01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}}";
new V2Request.Builder("/cluster")
.withPayload(payload)
.withMethod(SolrRequest.METHOD.POST)
.build().process(cluster.getSolrClient());
assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "add-runtimelib/sha512"),
getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "runtimeLib/foo/sha512"));
new V2Request.Builder("/cluster")
.withPayload("{add-requesthandler:{name : 'bar', class : 'org.apache.solr.core.RuntimeLibReqHandler'}}")
.withMethod(SolrRequest.METHOD.POST)
.build().process(cluster.getSolrClient());
Map<String, Object> map = new ClusterProperties(cluster.getZkClient()).getClusterProperties();
V2Request request = new V2Request.Builder("/node/ext/bar")
.withMethod(SolrRequest.METHOD.POST)
.build();
assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
"class", "org.apache.solr.core.RuntimeLibReqHandler",
"loader", MemClassLoader.class.getName(),
"version", null));
assertEquals("org.apache.solr.core.RuntimeLibReqHandler",
getObjectByPath(map, true, Arrays.asList("requestHandler", "bar", "class")));
payload = "{update-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar3.jar', " +
"sig : 'BSx/v0eKWX+LzkWF+iIAzwGL9rezWMePsyRzi4TvV6boATZ9cSfeUAqUgRW50f/hAHX4/hrHr2Piy8za9tIUoXbLqn3xJNNroOqpcVEgwh1Zii4c7zPwUSB9gtd9zlAK4LAPLdjxILS8NXpTD2zLycc8kSpcyTpSTITqz6HA3HsPGC81WIq2k3IRqYAkacn46viW+nnEjA7OxDCOqoL//evjxDWQ6R1YggTGh4u5MSWZJCiCPJNQnTlPRzUZOAJjtX7PblDrKeiunKGbjtiOhFLYkupe1lSlIRLiJV/qqopO4TQGO1bhbxeCKAX2vEz5Ch5bGOa+VZLJJGaDo318UQ==' ," +
"sha512 : 'f67a7735a89b4348e273ca29e4651359d6d976ba966cb871c4b468ea1dbd452e42fcde9d188b7788e5a1ef668283c690606032922364759d19588666d5862653'}}";
new V2Request.Builder("/cluster")
.withPayload(payload)
.withMethod(SolrRequest.METHOD.POST)
.build().process(cluster.getSolrClient());
assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "update-runtimelib/sha512"),
getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "runtimeLib/foo/sha512"));
request = new V2Request.Builder("/node/ext/bar")
.withMethod(SolrRequest.METHOD.POST)
.build();
assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
"class", "org.apache.solr.core.RuntimeLibReqHandler",
"loader", MemClassLoader.class.getName(),
"version", "3"));
} finally {
server.first().stop();
cluster.shutdown();
}
}
@Test
public void testRuntimeLibWithSig512() throws Exception {
Map<String, Object> jars = Utils.makeMap(
"/jar1.jar", getFileContent("runtimecode/runtimelibs.jar.bin"),
"/jar2.jar", getFileContent("runtimecode/runtimelibs_v2.jar.bin"),
"/jar3.jar", getFileContent("runtimecode/runtimelibs_v3.jar.bin"));
Pair<Server, Integer> server = runHttpServer(jars);
int port = server.second();
MiniSolrCloudCluster cluster = configureCluster(4).configure();
try {
byte[] derFile = readFile("cryptokeys/pub_key512.der");
cluster.getZkClient().makePath("/keys/exe", true);
cluster.getZkClient().create("/keys/exe/pub_key512.der", derFile, CreateMode.PERSISTENT, true);
String signature = "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ==";
String payload = "{add-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar1.jar', " +
"sig : '" + signature + "'," +
"sha512 : 'd01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}}";
new V2Request.Builder("/cluster")
.withPayload(payload)
.withMethod(SolrRequest.METHOD.POST)
.build().process(cluster.getSolrClient());
assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "add-runtimelib/sha512"),
getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "runtimeLib/foo/sha512"));
new V2Request.Builder("/cluster")
.withPayload("{add-requesthandler:{name : 'bar', class : 'org.apache.solr.core.RuntimeLibReqHandler'}}")
.withMethod(SolrRequest.METHOD.POST)
.build().process(cluster.getSolrClient());
Map<String, Object> map = new ClusterProperties(cluster.getZkClient()).getClusterProperties();
V2Request request = new V2Request.Builder("/node/ext/bar")
.withMethod(SolrRequest.METHOD.POST)
.build();
assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
"class", "org.apache.solr.core.RuntimeLibReqHandler",
"loader", MemClassLoader.class.getName(),
"version", null));
assertEquals("org.apache.solr.core.RuntimeLibReqHandler",
getObjectByPath(map, true, Arrays.asList("requestHandler", "bar", "class")));
payload = "{update-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar3.jar', " +
"sig : 'pnH8uDHsTF0HWyQqABqVWmvo3rM/Mp2qpuo6S9YXZA9Ifg8NjHX8WzPe6EzlaqBcYcusrEV0b+5NCBx4AS0TGA==' ," +
"sha512 : 'f67a7735a89b4348e273ca29e4651359d6d976ba966cb871c4b468ea1dbd452e42fcde9d188b7788e5a1ef668283c690606032922364759d19588666d5862653'}}";
new V2Request.Builder("/cluster")
.withPayload(payload)
.withMethod(SolrRequest.METHOD.POST)
.build().process(cluster.getSolrClient());
assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "update-runtimelib/sha512"),
getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "runtimeLib/foo/sha512"));
request = new V2Request.Builder("/node/ext/bar")
.withMethod(SolrRequest.METHOD.POST)
.build();
assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
"class", "org.apache.solr.core.RuntimeLibReqHandler",
"loader", MemClassLoader.class.getName(),
"version", "3"));
} finally {
server.first().stop();
cluster.shutdown();
}
}
@Test
public void testSetClusterReqHandler() throws Exception {
MiniSolrCloudCluster cluster = configureCluster(4).configure();
try {
SolrZkClient zkClient = cluster.getZkClient();
new V2Request.Builder("/cluster")
.withPayload("{add-requesthandler:{name : 'foo', class : 'org.apache.solr.handler.DumpRequestHandler'}}")
.withMethod(SolrRequest.METHOD.POST)
.build().process(cluster.getSolrClient());
Map<String, Object> map = assertVersionInSync(zkClient, cluster.getSolrClient());
assertEquals("org.apache.solr.handler.DumpRequestHandler",
getObjectByPath(map, true, Arrays.asList("requestHandler", "foo", "class")));
assertVersionInSync(zkClient, cluster.getSolrClient());
V2Response rsp = new V2Request.Builder("/node/ext/foo")
.withMethod(SolrRequest.METHOD.GET)
.withParams(new MapSolrParams((Map) Utils.makeMap("testkey", "testval")))
.build().process(cluster.getSolrClient());
assertEquals("testval", rsp._getStr("params/testkey", null));
new V2Request.Builder("/cluster")
.withPayload("{delete-requesthandler: 'foo'}")
.withMethod(SolrRequest.METHOD.POST)
.build().process(cluster.getSolrClient());
assertNull(getObjectByPath(map, true, Arrays.asList("requestHandler", "foo")));
} finally {
cluster.shutdown();
}
}
}

View File

@ -207,6 +207,23 @@ public class CollectionApiMapping {
POST,
null,
"set-obj-property", null),
ADD_RUNTIME_LIB(CLUSTER_CMD,
POST,null,
"add-runtimelib",null ),
UPDATE_RUNTIME_LIB(CLUSTER_CMD,
POST,null,
"update-runtimelib",null ),
DELETE_RUNTIME_LIB(CLUSTER_CMD,
POST,null,
"delete-runtimelib",null ),
ADD_REQ_HANDLER(CLUSTER_CMD,
POST,null,
"add-requesthandler",null ),
DELETE_REQ_HANDLER(CLUSTER_CMD,
POST,null,
"delete-requesthandler",null ),
UTILIZE_NODE(CLUSTER_CMD,
POST,
UTILIZENODE,

View File

@ -89,14 +89,18 @@ public class ClusterProperties {
return value;
}
public Map<String, Object> getClusterProperties() throws IOException {
return getClusterProperties(new Stat());
}
/**
* Return the cluster properties
* @throws IOException if there is an error reading properties from the cluster
*/
@SuppressWarnings("unchecked")
public Map<String, Object> getClusterProperties() throws IOException {
public Map<String, Object> getClusterProperties(Stat stat) throws IOException {
try {
Map<String, Object> properties = (Map<String, Object>) Utils.fromJSON(client.getData(ZkStateReader.CLUSTER_PROPS, null, new Stat(), true));
Map<String, Object> properties = (Map<String, Object>) Utils.fromJSON(client.getData(ZkStateReader.CLUSTER_PROPS, null, stat, true));
return convertCollectionDefaultsToNestedFormat(properties);
} catch (KeeperException.NoNodeException e) {
return Collections.emptyMap();
@ -105,6 +109,12 @@ public class ClusterProperties {
}
}
/**This applies the new map over the existing map. it's a merge operation, not an overwrite
* This applies the changes atomically over an existing object tree even if multiple nodes are
* trying to update this simultaneously
*
* @param properties The partial Object tree that needs to be applied
*/
public void setClusterProperties(Map<String, Object> properties) throws IOException, KeeperException, InterruptedException {
client.atomicUpdate(ZkStateReader.CLUSTER_PROPS, zkData -> {
if (zkData == null) return Utils.toJSON(convertCollectionDefaultsToNestedFormat(properties));

View File

@ -196,6 +196,7 @@ public class ZkStateReader implements SolrCloseable {
private final ConcurrentHashMap<String, PropsWatcher> collectionPropsWatchers = new ConcurrentHashMap<>();
private volatile SortedSet<String> liveNodes = emptySortedSet();
private volatile int clusterPropsVersion = -1;
private volatile Map<String, Object> clusterProperties = Collections.emptyMap();
@ -495,40 +496,20 @@ public class ZkStateReader implements SolrCloseable {
return collection.getZNodeVersion();
}
public synchronized void createClusterStateWatchersAndUpdate() throws KeeperException,
InterruptedException {
// We need to fetch the current cluster state and the set of live nodes
log.debug("Updating cluster state from ZooKeeper... ");
// Sanity check ZK structure.
if (!zkClient.exists(CLUSTER_STATE, true)) {
throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE,
"Cannot connect to cluster at " + zkClient.getZkServerAddress() + ": cluster not found/not ready");
private final Watcher clusterPropertiesWatcher = event -> {
// session events are not change events, and do not remove the watcher
if (Watcher.Event.EventType.None.equals(event.getType())) {
return;
}
// on reconnect of SolrZkClient force refresh and re-add watches.
loadClusterProperties();
refreshLiveNodes(new LiveNodeWatcher());
refreshLegacyClusterState(new LegacyClusterStateWatcher());
refreshStateFormat2Collections();
refreshCollectionList(new CollectionsChildWatcher());
refreshAliases(aliasesManager);
};
if (securityNodeListener != null) {
addSecurityNodeWatcher(pair -> {
ConfigData cd = new ConfigData();
cd.data = pair.first() == null || pair.first().length == 0 ? EMPTY_MAP : Utils.getDeepCopy((Map) fromJSON(pair.first()), 4, false);
cd.version = pair.second() == null ? -1 : pair.second().getVersion();
securityData = cd;
securityNodeListener.run();
});
securityData = getSecurityProps(true);
public void forceRefreshClusterProps(int expectedVersion) {
log.debug("Expected version of clusterprops.json is {} , my version is {}", expectedVersion, clusterPropsVersion);
if (expectedVersion > clusterPropsVersion) {
log.info("reloading clusterprops.json");
loadClusterProperties();
}
collectionPropsObservers.forEach((k, v) -> {
collectionPropsWatchers.computeIfAbsent(k, PropsWatcher::new).refreshAndWatch(true);
});
}
private void addSecurityNodeWatcher(final Callable<Pair<byte[], Stat>> callback)
@ -1104,22 +1085,52 @@ public class ZkStateReader implements SolrCloseable {
return Collections.unmodifiableMap(clusterProperties);
}
private final Watcher clusterPropertiesWatcher = event -> {
// session events are not change events, and do not remove the watcher
if (Watcher.Event.EventType.None.equals(event.getType())) {
return;
public synchronized void createClusterStateWatchersAndUpdate() throws KeeperException,
InterruptedException {
// We need to fetch the current cluster state and the set of live nodes
log.debug("Updating cluster state from ZooKeeper... ");
// Sanity check ZK structure.
if (!zkClient.exists(CLUSTER_STATE, true)) {
throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE,
"Cannot connect to cluster at " + zkClient.getZkServerAddress() + ": cluster not found/not ready");
}
// on reconnect of SolrZkClient force refresh and re-add watches.
loadClusterProperties();
};
refreshLiveNodes(new LiveNodeWatcher());
refreshLegacyClusterState(new LegacyClusterStateWatcher());
refreshStateFormat2Collections();
refreshCollectionList(new CollectionsChildWatcher());
refreshAliases(aliasesManager);
if (securityNodeListener != null) {
addSecurityNodeWatcher(pair -> {
ConfigData cd = new ConfigData();
cd.data = pair.first() == null || pair.first().length == 0 ? EMPTY_MAP : Utils.getDeepCopy((Map) fromJSON(pair.first()), 4, false);
cd.version = pair.second() == null ? -1 : pair.second().getVersion();
securityData = cd;
securityNodeListener.run();
});
securityData = getSecurityProps(true);
}
collectionPropsObservers.forEach((k, v) -> {
collectionPropsWatchers.computeIfAbsent(k, PropsWatcher::new).refreshAndWatch(true);
});
}
@SuppressWarnings("unchecked")
private void loadClusterProperties() {
try {
while (true) {
try {
byte[] data = zkClient.getData(ZkStateReader.CLUSTER_PROPS, clusterPropertiesWatcher, new Stat(), true);
Stat stat = new Stat();
byte[] data = zkClient.getData(ZkStateReader.CLUSTER_PROPS, clusterPropertiesWatcher, stat, true);
this.clusterProperties = ClusterProperties.convertCollectionDefaultsToNestedFormat((Map<String, Object>) Utils.fromJSON(data));
log.debug("Loaded cluster properties: {}", this.clusterProperties);
this.clusterPropsVersion = stat.getVersion();
log.debug("Loaded cluster properties: {} to version {}", this.clusterProperties, clusterPropsVersion);
for (ClusterPropertiesListener listener : clusterPropertiesListeners) {
listener.onChange(getClusterProperties());
@ -1127,6 +1138,7 @@ public class ZkStateReader implements SolrCloseable {
return;
} catch (KeeperException.NoNodeException e) {
this.clusterProperties = Collections.emptyMap();
this.clusterPropsVersion = -1;
log.debug("Loaded empty cluster properties");
// set an exists watch, and if the node has been created since the last call,
// read the data again
@ -1139,6 +1151,10 @@ public class ZkStateReader implements SolrCloseable {
}
}
public int getClusterPropsVersion() {
return clusterPropsVersion;
}
/**
* Get collection properties for a given collection. If the collection is watched, simply return it from the cache,
* otherwise fetch it directly from zookeeper. This is a convenience for {@code getCollectionProperties(collection,0)}

View File

@ -91,7 +91,7 @@ public class Utils {
public static final Function NEW_SYNCHRONIZED_ARRAYLIST_FUN = o -> Collections.synchronizedList(new ArrayList<>());
public static final Function NEW_HASHSET_FUN = o -> new HashSet<>();
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public static Map getDeepCopy(Map map, int maxDepth) {
return getDeepCopy(map, maxDepth, true, false);
}
@ -100,21 +100,18 @@ public class Utils {
return getDeepCopy(map, maxDepth, mutable, false);
}
public static Map getDeepCopy(Map map, int maxDepth, boolean mutable, boolean sorted) {
if(map == null) return null;
if (maxDepth < 1) return map;
Map copy;
if (sorted) {
copy = new TreeMap();
} else {
copy = map instanceof LinkedHashMap? new LinkedHashMap(map.size()): new HashMap(map.size());
public static final Function<JSONParser, ObjectBuilder> MAPWRITEROBJBUILDER = jsonParser -> {
try {
return new ObjectBuilder(jsonParser) {
@Override
public Object newObject() {
return new LinkedHashMapWriter();
}
};
} catch (IOException e) {
throw new RuntimeException(e);
}
for (Object o : map.entrySet()) {
Map.Entry e = (Map.Entry) o;
copy.put(e.getKey(), makeDeepCopy(e.getValue(),maxDepth, mutable, sorted));
}
return mutable ? copy : Collections.unmodifiableMap(copy);
}
};
public static void forEachMapEntry(Object o, String path, BiConsumer fun) {
Object val = Utils.getObjectByPath(o, false, path);
@ -144,6 +141,40 @@ public class Utils {
((Map) o).forEach((k, v) -> fun.accept(k, v));
}
}
public static final Function<JSONParser, ObjectBuilder> MAPOBJBUILDER = jsonParser -> {
try {
return new ObjectBuilder(jsonParser) {
@Override
public Object newObject() {
return new HashMap();
}
};
} catch (IOException e) {
throw new RuntimeException(e);
}
};
public static final Pattern ARRAY_ELEMENT_INDEX = Pattern
.compile("(\\S*?)\\[([-]?\\d+)\\]");
public static Map getDeepCopy(Map map, int maxDepth, boolean mutable, boolean sorted) {
if (map == null) return null;
if (maxDepth < 1) return map;
Map copy;
if (sorted) {
copy = new TreeMap();
} else {
copy = map instanceof LinkedHashMap ? new LinkedHashMap(map.size()) : new HashMap(map.size());
}
for (Object o : map.entrySet()) {
Map.Entry e = (Map.Entry) o;
copy.put(e.getKey(), makeDeepCopy(e.getValue(), maxDepth, mutable, sorted));
}
return mutable ? copy : Collections.unmodifiableMap(copy);
}
public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable) {
return getDeepCopy(c, maxDepth, mutable, false);
}
private static Object makeDeepCopy(Object v, int maxDepth, boolean mutable, boolean sorted) {
if (v instanceof MapWriter && maxDepth > 1) {
@ -151,7 +182,7 @@ public class Utils {
} else if (v instanceof IteratorWriter && maxDepth > 1) {
v = ((IteratorWriter) v).toList(new ArrayList<>());
if (sorted) {
Collections.sort((List)v);
Collections.sort((List) v);
}
}
@ -163,29 +194,6 @@ public class Utils {
return v;
}
public static InputStream toJavabin(Object o) throws IOException {
try (final JavaBinCodec jbc = new JavaBinCodec()) {
BinaryRequestWriter.BAOS baos = new BinaryRequestWriter.BAOS();
jbc.marshal(o,baos);
return new ByteBufferInputStream(ByteBuffer.wrap(baos.getbuf(),0,baos.size()));
}
}
public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable) {
return getDeepCopy(c, maxDepth, mutable, false);
}
public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable, boolean sorted) {
if (c == null || maxDepth < 1) return c;
Collection result = c instanceof Set ?
( sorted? new TreeSet() : new HashSet()) : new ArrayList();
for (Object o : c) result.add(makeDeepCopy(o, maxDepth, mutable, sorted));
if (sorted && (result instanceof List)) {
Collections.sort((List)result);
}
return mutable ? result : result instanceof Set ? unmodifiableSet((Set) result) : unmodifiableList((List) result);
}
public static void writeJson(Object o, OutputStream os, boolean indent) throws IOException {
writeJson(o, new OutputStreamWriter(os, UTF_8), indent)
.flush();
@ -199,35 +207,12 @@ public class Utils {
return writer;
}
private static class MapWriterJSONWriter extends JSONWriter {
public MapWriterJSONWriter(CharArr out, int indentSize) {
super(out, indentSize);
public static InputStream toJavabin(Object o) throws IOException {
try (final JavaBinCodec jbc = new JavaBinCodec()) {
BinaryRequestWriter.BAOS baos = new BinaryRequestWriter.BAOS();
jbc.marshal(o, baos);
return new ByteBufferInputStream(ByteBuffer.wrap(baos.getbuf(), 0, baos.size()));
}
@Override
public void handleUnknownClass(Object o) {
if (o instanceof MapWriter) {
Map m = ((MapWriter)o).toMap(new LinkedHashMap<>());
write(m);
} else {
super.handleUnknownClass(o);
}
}
}
public static byte[] toJSON(Object o) {
if(o == null) return new byte[0];
CharArr out = new CharArr();
if (!(o instanceof List) && !(o instanceof Map)) {
if (o instanceof MapWriter) {
o = ((MapWriter)o).toMap(new LinkedHashMap<>());
} else if(o instanceof IteratorWriter){
o = ((IteratorWriter)o).toList(new ArrayList<>());
}
}
new MapWriterJSONWriter(out, 2).write(o); // indentation by default
return toUTF8(out);
}
public static String toJSONString(Object o) {
@ -274,15 +259,29 @@ public class Utils {
return propMap;
}
public static Object fromJSON(InputStream is){
return fromJSON(new InputStreamReader(is, UTF_8));
}
public static Object fromJSON(Reader is){
try {
return STANDARDOBJBUILDER.apply(getJSONParser(is)).getVal();
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error", e);
public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable, boolean sorted) {
if (c == null || maxDepth < 1) return c;
Collection result = c instanceof Set ?
(sorted ? new TreeSet() : new HashSet()) : new ArrayList();
for (Object o : c) result.add(makeDeepCopy(o, maxDepth, mutable, sorted));
if (sorted && (result instanceof List)) {
Collections.sort((List) result);
}
return mutable ? result : result instanceof Set ? unmodifiableSet((Set) result) : unmodifiableList((List) result);
}
public static byte[] toJSON(Object o) {
if (o == null) return new byte[0];
CharArr out = new CharArr();
if (!(o instanceof List) && !(o instanceof Map)) {
if (o instanceof MapWriter) {
o = ((MapWriter) o).toMap(new LinkedHashMap<>());
} else if (o instanceof IteratorWriter) {
o = ((IteratorWriter) o).toList(new ArrayList<>());
}
}
new MapWriterJSONWriter(out, 2).write(o); // indentation by default
return toUTF8(out);
}
@ -293,35 +292,14 @@ public class Utils {
throw new RuntimeException(e);
}
};
public static final Function<JSONParser, ObjectBuilder> MAPWRITEROBJBUILDER = jsonParser -> {
try {
return new ObjectBuilder(jsonParser){
@Override
public Object newObject() {
return new LinkedHashMapWriter();
}
};
} catch (IOException e) {
throw new RuntimeException(e);
}
};
public static final Function<JSONParser, ObjectBuilder> MAPOBJBUILDER = jsonParser -> {
try {
return new ObjectBuilder(jsonParser){
@Override
public Object newObject() {
return new HashMap();
}
};
} catch (IOException e) {
throw new RuntimeException(e);
}
};
public static Object fromJSON(InputStream is) {
return fromJSON(new InputStreamReader(is, UTF_8));
}
public static Object fromJSON(InputStream is, Function<JSONParser, ObjectBuilder> objBuilderProvider) {
public static Object fromJSON(Reader is) {
try {
return objBuilderProvider.apply(getJSONParser((new InputStreamReader(is, StandardCharsets.UTF_8)))).getVal();
return STANDARDOBJBUILDER.apply(getJSONParser(is)).getVal();
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error", e);
}
@ -336,10 +314,19 @@ public class Utils {
return fromJSON(stream);
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
"Resource error: " + e.getMessage(), e);
"Resource error: " + e.getMessage(), e);
}
}
public static JSONParser getJSONParser(Reader reader){
public static Object fromJSON(InputStream is, Function<JSONParser, ObjectBuilder> objBuilderProvider) {
try {
return objBuilderProvider.apply(getJSONParser((new InputStreamReader(is, StandardCharsets.UTF_8)))).getVal();
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error", e);
}
}
public static JSONParser getJSONParser(Reader reader) {
JSONParser parser = new JSONParser(reader);
parser.setFlags(parser.getFlags() |
JSONParser.ALLOW_MISSING_COLON_COMMA_BEFORE_OBJECT |
@ -347,11 +334,11 @@ public class Utils {
return parser;
}
public static Object fromJSONString(String json) {
public static Object fromJSONString(String json) {
try {
return STANDARDOBJBUILDER.apply(getJSONParser(new StringReader(json))).getVal();
} catch (Exception e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error : "+ json, e );
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error : " + json, e);
}
}
@ -363,10 +350,10 @@ public class Utils {
public static boolean setObjectByPath(Object root, String hierarchy, Object value) {
List<String> parts = StrUtils.splitSmart(hierarchy, '/', true);
return setObjectByPath(root, parts, value);
return setObjectByPath(root, parts, value, true);
}
public static boolean setObjectByPath(Object root, List<String> hierarchy, Object value) {
public static boolean setObjectByPath(Object root, List<String> hierarchy, Object value, boolean insertMissing) {
if (root == null) return false;
if (!isMapLike(root)) throw new RuntimeException("must be a Map or NamedList");
Object obj = root;
@ -382,7 +369,10 @@ public class Utils {
}
if (i < hierarchy.size() - 1) {
Object o = getVal(obj, s, -1);
if (o == null) return false;
if (o == null) {
if (insertMissing) insertItem(o = new LinkedHashMap<>(), obj, s);
else return false;
}
if (idx > -1) {
List l = (List) o;
o = idx < l.size() ? l.get(idx) : null;
@ -391,14 +381,7 @@ public class Utils {
obj = o;
} else {
if (idx == -2) {
if (obj instanceof NamedList) {
NamedList namedList = (NamedList) obj;
int location = namedList.indexOf(s, 0);
if (location == -1) namedList.add(s, value);
else namedList.setVal(location, value);
} else if (obj instanceof Map) {
((Map) obj).put(s, value);
}
insertItem(value, obj, s);
return true;
} else {
Object v = getVal(obj, s, -1);
@ -422,10 +405,20 @@ public class Utils {
}
private static void insertItem(Object value, Object container, String name) {
if (container instanceof NamedList) {
NamedList namedList = (NamedList) container;
int location = namedList.indexOf(name, 0);
if (location == -1) namedList.add(name, value);
else namedList.setVal(location, value);
} else if (container instanceof Map) {
((Map) container).put(name, value);
}
}
public static Object getObjectByPath(Object root, boolean onlyPrimitive, List<String> hierarchy) {
if(root == null) return null;
if(!isMapLike(root)) return null;
if (root == null) return null;
if (!isMapLike(root)) return null;
Object obj = root;
for (int i = 0; i < hierarchy.size(); i++) {
int idx = -1;
@ -518,6 +511,7 @@ public class Utils {
try {
((MapWriter) obj).writeMap(new MapWriter.EntryWriter() {
int count = -1;
@Override
public MapWriter.EntryWriter put(CharSequence k, Object v) {
if (result[0] != null) return this;
@ -533,15 +527,14 @@ public class Utils {
throw new RuntimeException(e);
}
return result[0];
}
else if (obj instanceof Map) return ((Map) obj).get(key);
} else if (obj instanceof Map) return ((Map) obj).get(key);
else throw new RuntimeException("must be a NamedList or Map");
}
/**
* If the passed entity has content, make sure it is fully
* read and closed.
*
*
* @param entity to consume or null
*/
public static void consumeFully(HttpEntity entity) {
@ -562,31 +555,21 @@ public class Utils {
/**
* Make sure the InputStream is fully read.
*
*
* @param is to read
* @throws IOException on problem with IO
*/
private static void readFully(InputStream is) throws IOException {
is.skip(is.available());
while (is.read() != -1) {}
}
public static Map<String, Object> getJson(DistribStateManager distribStateManager, String path) throws InterruptedException, IOException, KeeperException {
VersionedData data = null;
try {
data = distribStateManager.getData(path);
} catch (KeeperException.NoNodeException | NoSuchElementException e) {
return Collections.emptyMap();
while (is.read() != -1) {
}
if (data == null || data.getData() == null || data.getData().length == 0) return Collections.emptyMap();
return (Map<String, Object>) Utils.fromJSON(data.getData());
}
/**
* Assumes data in ZooKeeper is a JSON string, deserializes it and returns as a Map
*
* @param zkClient the zookeeper client
* @param path the path to the znode being read
* @param zkClient the zookeeper client
* @param path the path to the znode being read
* @param retryOnConnLoss whether to retry the operation automatically on connection loss, see {@link org.apache.solr.common.cloud.ZkCmdExecutor#retryOperation(ZkOperation)}
* @return a Map if the node exists and contains valid JSON or an empty map if znode does not exist or has a null data
*/
@ -602,39 +585,23 @@ public class Utils {
return Collections.emptyMap();
}
public static final Pattern ARRAY_ELEMENT_INDEX = Pattern
.compile("(\\S*?)\\[([-]?\\d+)\\]");
public static SpecProvider getSpec(final String name) {
return () -> {
return ValidatingJsonMap.parse(CommonParams.APISPEC_LOCATION + name + ".json", CommonParams.APISPEC_LOCATION);
};
}
public static String parseMetricsReplicaName(String collectionName, String coreName) {
if (collectionName == null || !coreName.startsWith(collectionName)) {
return null;
} else {
// split "collection1_shard1_1_replica1" into parts
if (coreName.length() > collectionName.length()) {
String str = coreName.substring(collectionName.length() + 1);
int pos = str.lastIndexOf("_replica");
if (pos == -1) { // ?? no _replicaN part ??
return str;
} else {
return str.substring(pos + 1);
}
} else {
return null;
}
public static Map<String, Object> getJson(DistribStateManager distribStateManager, String path) throws InterruptedException, IOException, KeeperException {
VersionedData data = null;
try {
data = distribStateManager.getData(path);
} catch (KeeperException.NoNodeException | NoSuchElementException e) {
return Collections.emptyMap();
}
if (data == null || data.getData() == null || data.getData().length == 0) return Collections.emptyMap();
return (Map<String, Object>) Utils.fromJSON(data.getData());
}
/**Applies one json over other. The 'input' is applied over the sink
* The values in input isapplied over the values in 'sink' . If a value is 'null'
/**
* Applies one json over other. The 'input' is applied over the sink
* The values in input are applied over the values in 'sink' . If a value is 'null'
* that value is removed from sink
*
* @param sink the original json object to start with. Ensure that this Map is mutable
* @param sink the original json object to start with. Ensure that this Map is mutable
* @param input the json with new values
* @return whether there was any change made to sink or not.
*/
@ -672,20 +639,62 @@ public class Utils {
return isModified;
}
public static SpecProvider getSpec(final String name) {
return () -> {
return ValidatingJsonMap.parse(CommonParams.APISPEC_LOCATION + name + ".json", CommonParams.APISPEC_LOCATION);
};
}
public static String parseMetricsReplicaName(String collectionName, String coreName) {
if (collectionName == null || !coreName.startsWith(collectionName)) {
return null;
} else {
// split "collection1_shard1_1_replica1" into parts
if (coreName.length() > collectionName.length()) {
String str = coreName.substring(collectionName.length() + 1);
int pos = str.lastIndexOf("_replica");
if (pos == -1) { // ?? no _replicaN part ??
return str;
} else {
return str.substring(pos + 1);
}
} else {
return null;
}
}
}
public static String getBaseUrlForNodeName(final String nodeName, String urlScheme) {
final int _offset = nodeName.indexOf("_");
if (_offset < 0) {
throw new IllegalArgumentException("nodeName does not contain expected '_' separator: " + nodeName);
}
final String hostAndPort = nodeName.substring(0,_offset);
final String hostAndPort = nodeName.substring(0, _offset);
try {
final String path = URLDecoder.decode(nodeName.substring(1+_offset), "UTF-8");
final String path = URLDecoder.decode(nodeName.substring(1 + _offset), "UTF-8");
return urlScheme + "://" + hostAndPort + (path.isEmpty() ? "" : ("/" + path));
} catch (UnsupportedEncodingException e) {
throw new IllegalStateException("JVM Does not seem to support UTF-8", e);
}
}
private static class MapWriterJSONWriter extends JSONWriter {
public MapWriterJSONWriter(CharArr out, int indentSize) {
super(out, indentSize);
}
@Override
public void handleUnknownClass(Object o) {
if (o instanceof MapWriter) {
Map m = ((MapWriter) o).toMap(new LinkedHashMap<>());
write(m);
} else {
super.handleUnknownClass(o);
}
}
}
public static long time(TimeSource timeSource, TimeUnit unit) {
return unit.convert(timeSource.getTimeNs(), TimeUnit.NANOSECONDS);
}

View File

@ -169,6 +169,43 @@
"required": [
"name"
]
},
"add-runtimelib": {
"documentation": "",
"description" : "Add a remote jar to the classpath",
"#include": "cluster.Commands.runtimelib.properties"
},
"update-runtimelib": {
"documentation": "",
"description" : "Update the jar details",
"#include": "cluster.Commands.runtimelib.properties"
},
"delete-runtimelib": {
"documentation": "",
"description" : "delete a lib",
"type": "string"
},
"add-requesthandler": {
"type": "object",
"documentation": "",
"description" : "Create a node level request handler",
"properties": {
"name": {
"type": "string",
"description": "Name of the request handler. This is the path"
},
"class": {
"type": "string",
"description": "The class name"
}
},
"required": ["name", "class"],
"additionalProperties": true
},
"delete-requesthandler" : {
"description" : "delete a requesthandler",
"type": "string"
}
}
}

View File

@ -0,0 +1,23 @@
{
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "A name for the library"
},
"url": {
"type": "string",
"description": "The remote url"
},
"sha512": {
"type": "string",
"description": "The sha512 hash of the jar"
},
"sig": {
"type": "string",
"description": "the signature of the jar"
}
},
"required" : ["name","url","sha512"]
}

View File

@ -0,0 +1,13 @@
{
"methods": [
"POST",
"GET",
"DELETE"
],
"url": {
"paths": [
"/node/ext/{handlerName}",
"/node/ext"
]
}
}

View File

@ -16,8 +16,6 @@
*/
package org.apache.solr.cloud;
import static org.apache.solr.common.util.Utils.makeMap;
import java.io.File;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
@ -108,6 +106,8 @@ import org.noggit.JSONWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.common.util.Utils.makeMap;
/**
* TODO: we should still test this works as a custom update chain as well as
* what we test now - the default update chain

View File

@ -200,8 +200,8 @@ public class SolrCloudTestCase extends SolrTestCaseJ4 {
* Configure and run the {@link MiniSolrCloudCluster}
* @throws Exception if an error occurs on startup
*/
public void configure() throws Exception {
cluster = build();
public MiniSolrCloudCluster configure() throws Exception {
return cluster = build();
}
/**