SOLR-5653: Create a RestManager to provide REST API endpoints for reconfigurable plugins

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1576939 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Steven Rowe 2014-03-12 21:52:49 +00:00
parent 110097483b
commit 979230aca4
39 changed files with 3075 additions and 126 deletions

View File

@ -29,6 +29,7 @@ import org.apache.solr.handler.component.SearchComponent;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.response.QueryResponseWriter;
import org.apache.solr.response.transform.TransformerFactory;
import org.apache.solr.rest.RestManager;
import org.apache.solr.search.CacheConfig;
import org.apache.solr.search.FastLRUCache;
import org.apache.solr.search.QParserPlugin;
@ -267,7 +268,7 @@ public class SolrConfig extends Config {
loadPluginInfo(UpdateLog.class,"updateHandler/updateLog");
loadPluginInfo(IndexSchemaFactory.class,"schemaFactory",
REQUIRE_CLASS);
loadPluginInfo(RestManager.class, "restManager");
updateHandlerInfo = loadUpdatehandlerInfo();
multipartUploadLimitKB = getInt(

View File

@ -100,6 +100,9 @@ import org.apache.solr.response.SchemaXmlResponseWriter;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.response.XMLResponseWriter;
import org.apache.solr.response.transform.TransformerFactory;
import org.apache.solr.rest.ManagedResourceStorage;
import org.apache.solr.rest.RestManager;
import org.apache.solr.rest.ManagedResourceStorage.StorageIO;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.IndexSchemaFactory;
@ -167,11 +170,17 @@ public final class SolrCore implements SolrInfoMBean {
private DirectoryFactory directoryFactory;
private IndexReaderFactory indexReaderFactory;
private final Codec codec;
private final ReentrantLock ruleExpiryLock;
public long getStartTime() { return startTime; }
private RestManager restManager;
public RestManager getRestManager() {
return restManager;
}
static int boolean_query_max_clause_count = Integer.MIN_VALUE;
// only change the BooleanQuery maxClauseCount once for ALL cores...
void booleanQueryMaxClauseCount() {
@ -184,8 +193,7 @@ public final class SolrCore implements SolrInfoMBean {
}
}
}
/**
* The SolrResourceLoader used to load all resources for this core.
* @since solr 1.3
@ -831,6 +839,9 @@ public final class SolrCore implements SolrInfoMBean {
if (iwRef != null) iwRef.decref();
}
// Initialize the RestManager
restManager = initRestManager();
// Finally tell anyone who wants to know
resourceLoader.inform(resourceLoader);
resourceLoader.inform(this); // last call before the latch is released.
@ -2286,7 +2297,44 @@ public final class SolrCore implements SolrInfoMBean {
"update your config to use <string name='facet.sort'>.");
}
}
/**
* Creates and initializes a RestManager based on configuration args in solrconfig.xml.
* RestManager provides basic storage support for managed resource data, such as to
* persist stopwords to ZooKeeper if running in SolrCloud mode.
*/
@SuppressWarnings("unchecked")
protected RestManager initRestManager() throws SolrException {
PluginInfo restManagerPluginInfo =
getSolrConfig().getPluginInfo(RestManager.class.getName());
NamedList<String> initArgs = null;
RestManager mgr = null;
if (restManagerPluginInfo != null) {
if (restManagerPluginInfo.className != null) {
mgr = resourceLoader.newInstance(restManagerPluginInfo.className, RestManager.class);
}
if (restManagerPluginInfo.initArgs != null) {
initArgs = (NamedList<String>)restManagerPluginInfo.initArgs;
}
}
if (mgr == null)
mgr = new RestManager();
if (initArgs == null)
initArgs = new NamedList<>();
String collection = coreDescriptor.getCollectionName();
StorageIO storageIO =
ManagedResourceStorage.newStorageIO(collection, resourceLoader, initArgs);
mgr.init(resourceLoader, initArgs, storageIO);
return mgr;
}
public CoreDescriptor getCoreDescriptor() {
return coreDescriptor;
}

View File

@ -33,6 +33,7 @@ import org.apache.solr.handler.component.SearchComponent;
import org.apache.solr.handler.component.ShardHandlerFactory;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.response.QueryResponseWriter;
import org.apache.solr.rest.RestManager;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.ManagedIndexSchemaFactory;
import org.apache.solr.schema.SimilarityFactory;
@ -79,7 +80,11 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
static final String project = "solr";
static final String base = "org.apache" + "." + project;
static final String[] packages = {"","analysis.","schema.","handler.","search.","update.","core.","response.","request.","update.processor.","util.", "spelling.", "handler.component.", "handler.dataimport.", "spelling.suggest.", "spelling.suggest.fst." };
static final String[] packages = {
"", "analysis.", "schema.", "handler.", "search.", "update.", "core.", "response.", "request.",
"update.processor.", "util.", "spelling.", "handler.component.", "handler.dataimport.",
"spelling.suggest.", "spelling.suggest.fst.", "rest.schema.analysis."
};
protected URLClassLoader classLoader;
private final String instanceDir;
@ -94,7 +99,20 @@ public class SolrResourceLoader implements ResourceLoader,Closeable
private final Properties coreProperties;
private volatile boolean live;
// Provide a registry so that managed resources can register themselves while the XML configuration
// documents are being parsed ... after all are registered, they are asked by the RestManager to
// initialize themselves. This two-step process is required because not all resources are available
// (such as the SolrZkClient) when XML docs are being parsed.
private RestManager.Registry managedResourceRegistry;
public synchronized RestManager.Registry getManagedResourceRegistry() {
if (managedResourceRegistry == null) {
managedResourceRegistry = new RestManager.Registry();
}
return managedResourceRegistry;
}
/**
* <p>
* This loader will delegate to the context classloader when possible,

View File

@ -1,4 +1,4 @@
package org.apache.solr.rest.schema;
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@ -48,31 +48,27 @@ import java.nio.charset.Charset;
/**
* Base class of all Solr Schema Restlet resource classes.
* Base class of all Solr Restlet server resource classes.
*/
abstract class BaseSchemaResource extends ServerResource {
private static final Charset UTF8 = Charset.forName("UTF-8");
public abstract class BaseSolrResource extends ServerResource {
protected static final Charset UTF8 = Charset.forName("UTF-8");
protected static final String SHOW_DEFAULTS = "showDefaults";
private SolrCore solrCore;
private IndexSchema schema;
private SolrQueryRequest solrRequest;
private SolrQueryResponse solrResponse;
private QueryResponseWriter responseWriter;
private String contentType;
private boolean doIndent;
protected SolrCore getSolrCore() { return solrCore; }
protected IndexSchema getSchema() { return schema; }
protected SolrQueryRequest getSolrRequest() { return solrRequest; }
protected SolrQueryResponse getSolrResponse() { return solrResponse; }
protected String getContentType() { return contentType; }
public SolrCore getSolrCore() { return solrCore; }
public IndexSchema getSchema() { return schema; }
public SolrQueryRequest getSolrRequest() { return solrRequest; }
public SolrQueryResponse getSolrResponse() { return solrResponse; }
public String getContentType() { return contentType; }
protected BaseSchemaResource() {
protected BaseSolrResource() {
super();
doIndent = true; // default to indenting
}
/**
@ -113,9 +109,8 @@ abstract class BaseSchemaResource extends ServerResource {
responseWriterName = "json"; // Default to json writer
}
String indent = solrRequest.getParams().get("indent");
if (null != indent && ("".equals(indent) || "off".equals(indent))) {
doIndent = false;
} else { // indent by default
if (null == indent || ! ("off".equals(indent) || "false".equals(indent))) {
// indent by default
ModifiableSolrParams newParams = new ModifiableSolrParams(solrRequest.getParams());
newParams.remove(indent);
newParams.add("indent", "on");
@ -124,7 +119,8 @@ abstract class BaseSchemaResource extends ServerResource {
responseWriter = solrCore.getQueryResponseWriter(responseWriterName);
contentType = responseWriter.getContentType(solrRequest, solrResponse);
final String path = getRequest().getRootRef().getPath();
if ( ! "/schema".equals(path)) {
if ( ! RestManager.SCHEMA_BASE_PATH.equals(path)
&& ! RestManager.CONFIG_BASE_PATH.equals(path)) {
// don't set webapp property on the request when context and core/collection are excluded
final int cutoffPoint = path.indexOf("/", 1);
final String firstPathElement = -1 == cutoffPoint ? path : path.substring(0, cutoffPoint);
@ -148,7 +144,7 @@ abstract class BaseSchemaResource extends ServerResource {
*/
public class SolrOutputRepresentation extends OutputRepresentation {
SolrOutputRepresentation() {
public SolrOutputRepresentation() {
// No normalization, in case of a custom media type
super(MediaType.valueOf(contentType));
// TODO: For now, don't send the Vary: header, but revisit if/when content negotiation is added

View File

@ -0,0 +1,27 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.restlet.representation.Representation;
import org.restlet.resource.Delete;
/** Marker interface for resource classes that handle DELETE requests. */
public interface DELETEable {
@Delete
public Representation delete();
}

View File

@ -0,0 +1,439 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.util.DateUtil;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.rest.ManagedResourceStorage.StorageIO;
import org.restlet.data.Status;
import org.restlet.representation.Representation;
import org.restlet.resource.ResourceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Supports Solr components that have external data that
* needs to be managed using the REST API.
*/
public abstract class ManagedResource {
public static final Logger log = LoggerFactory.getLogger(ManagedResource.class);
/**
* Marker interface to indicate a ManagedResource implementation class also supports
* managing child resources at path: /&lt;resource&gt;/{child}
*/
public static interface ChildResourceSupport {}
public static final String INIT_ARGS_JSON_FIELD = "initArgs";
public static final String MANAGED_JSON_LIST_FIELD = "managedList";
public static final String MANAGED_JSON_MAP_FIELD = "managedMap";
public static final String INITIALIZED_ON_JSON_FIELD = "initializedOn";
public static final String UPDATED_SINCE_INIT_JSON_FIELD = "updatedSinceInit";
private final String resourceId;
protected final SolrResourceLoader solrResourceLoader;
protected final ManagedResourceStorage storage;
protected NamedList<Object> managedInitArgs;
protected Date initializedOn;
protected Date lastUpdateSinceInitialization;
/**
* Initializes this managed resource, including setting up JSON-based storage using
* the provided storageIO implementation, such as ZK.
*/
protected ManagedResource(String resourceId, SolrResourceLoader loader, StorageIO storageIO)
throws SolrException {
this.resourceId = resourceId;
this.solrResourceLoader = loader;
this.storage = createStorage(storageIO, loader);
}
/**
* Called once during core initialization to get the managed
* data loaded from storage and notify observers.
*/
public void loadManagedDataAndNotify(List<ManagedResourceObserver> observers)
throws SolrException {
// load managed data from storage
reloadFromStorage();
// important!!! only affect the Solr component once during core initialization
// also, as most analysis components will alter the initArgs it is processes them
// we need to clone the managed initArgs
notifyObserversDuringInit(managedInitArgs, observers);
// some basic date tracking around when the data was initialized and updated
initializedOn = new Date();
lastUpdateSinceInitialization = null;
}
/**
* Notifies all registered observers that the ManagedResource is initialized.
* This event only occurs once when the core is loaded. Thus, you need to
* reload the core to get updates applied to the analysis components that
* depend on the ManagedResource data.
*/
@SuppressWarnings("unchecked")
protected void notifyObserversDuringInit(NamedList<?> args, List<ManagedResourceObserver> observers)
throws SolrException {
if (observers == null || observers.isEmpty()) {
log.warn("No registered observers for {}", getResourceId());
return;
}
for (ManagedResourceObserver observer : observers) {
// clone the args for each observer as some components
// remove args as they process them, e.g. AbstractAnalysisFactory
NamedList<?> clonedArgs = args.clone();
observer.onManagedResourceInitialized(clonedArgs,this);
}
log.info("Notified {} observers of {}", observers.size(), getResourceId());
}
/**
* Potential extension point allowing concrete implementations to supply their own storage
* implementation. The default implementation uses JSON as the storage format and delegates
* the loading and saving of JSON bytes to the supplied StorageIO class.
*/
protected ManagedResourceStorage createStorage(StorageIO storageIO, SolrResourceLoader loader)
throws SolrException {
return new ManagedResourceStorage.JsonStorage(storageIO, loader);
}
/**
* Returns the resource loader used by this resource.
*/
public SolrResourceLoader getResourceLoader() {
return solrResourceLoader;
}
/**
* Gets the resource ID for this managed resource.
*/
public String getResourceId() {
return resourceId;
}
/**
* Gets the ServerResource class to register this endpoint with the Rest API router;
* in most cases, the default RestManager.ManagedEndpoint class is sufficient but
* ManagedResource implementations can override this method if a different ServerResource
* class is needed.
*/
public Class<? extends BaseSolrResource> getServerResourceClass() {
return RestManager.ManagedEndpoint.class;
}
/**
* Called from {@link #doPut(BaseSolrResource,Representation,Object)}
* to update this resource's init args using the given updatedArgs
*/
@SuppressWarnings("unchecked")
protected boolean updateInitArgs(NamedList<?> updatedArgs) {
if (updatedArgs == null || updatedArgs.size() == 0) {
return false;
}
boolean madeChanges = false;
if ( ! managedInitArgs.equals(updatedArgs)) {
managedInitArgs = (NamedList<Object>)updatedArgs.clone();
madeChanges = true;
}
return madeChanges;
}
/**
* Invoked when this object determines it needs to reload the stored data.
*/
@SuppressWarnings("unchecked")
protected synchronized void reloadFromStorage() throws SolrException {
String resourceId = getResourceId();
Object data = null;
try {
data = storage.load(resourceId);
} catch (FileNotFoundException fnf) {
log.warn("No stored data found for {}", resourceId);
} catch (IOException ioExc) {
throw new SolrException(ErrorCode.SERVER_ERROR,
"Failed to load stored data for "+resourceId+" due to: "+ioExc, ioExc);
}
Object managedData = null;
if (data != null) {
if (!(data instanceof Map)) {
throw new SolrException(ErrorCode.SERVER_ERROR,
"Stored data for "+resourceId+" is not a valid JSON object!");
}
Map<String,Object> jsonMap = (Map<String,Object>)data;
Map<String,Object> initArgsMap = (Map<String,Object>)jsonMap.get(INIT_ARGS_JSON_FIELD);
managedInitArgs = new NamedList<>(initArgsMap);
log.info("Loaded initArgs {} for {}", managedInitArgs, resourceId);
if (jsonMap.containsKey(MANAGED_JSON_LIST_FIELD)) {
Object jsonList = jsonMap.get(MANAGED_JSON_LIST_FIELD);
if (!(jsonList instanceof List)) {
String errMsg =
String.format(Locale.ROOT,
"Expected JSON array as value for %s but client sent a %s instead!",
MANAGED_JSON_LIST_FIELD, jsonList.getClass().getName());
throw new SolrException(ErrorCode.SERVER_ERROR, errMsg);
}
managedData = jsonList;
} else if (jsonMap.containsKey(MANAGED_JSON_MAP_FIELD)) {
Object jsonObj = jsonMap.get(MANAGED_JSON_MAP_FIELD);
if (!(jsonObj instanceof Map)) {
String errMsg =
String.format(Locale.ROOT,
"Expected JSON map as value for %s but client sent a %s instead!",
MANAGED_JSON_MAP_FIELD, jsonObj.getClass().getName());
throw new SolrException(ErrorCode.SERVER_ERROR, errMsg);
}
managedData = jsonObj;
}
}
if (managedInitArgs == null) {
managedInitArgs = new NamedList<>();
}
onManagedDataLoadedFromStorage(managedInitArgs, managedData);
}
/**
* Method called after data has been loaded from storage to give the concrete
* implementation a chance to post-process the data.
*/
protected abstract void onManagedDataLoadedFromStorage(NamedList<?> managedInitArgs, Object managedData)
throws SolrException;
/**
* Persists managed data to the configured storage IO as a JSON object.
*/
public synchronized void storeManagedData(Object managedData) {
Map<String,Object> toStore = buildMapToStore(managedData);
String resourceId = getResourceId();
try {
storage.store(resourceId, toStore);
// keep track that the managed data has been updated
lastUpdateSinceInitialization = new Date();
} catch (Throwable storeErr) {
// store failed, so try to reset the state of this object by reloading
// from storage and then failing the store request
try {
reloadFromStorage();
} catch (Exception reloadExc) {
// note: the data we're managing now remains in a dubious state
// however the text analysis component remains unaffected
// (at least until core reload)
log.error("Failed to load stop words from storage due to: "+reloadExc);
}
String errMsg = String.format(Locale.ROOT,
"Failed to store data for %s due to: %s",
resourceId, storeErr.toString());
log.error(errMsg, storeErr);
throw new ResourceException(Status.SERVER_ERROR_INTERNAL, errMsg, storeErr);
}
}
/**
* Returns this resource's initialization timestamp.
*/
public String getInitializedOn() {
StringBuilder dateBuf = new StringBuilder();
try {
DateUtil.formatDate(initializedOn, null, dateBuf);
} catch (IOException e) {
// safe to ignore
}
return dateBuf.toString();
}
/**
* Returns the timestamp of the most recent update,
* or null if this resource has not been updated since initialization.
*/
public String getUpdatedSinceInitialization() {
String dateStr = null;
if (lastUpdateSinceInitialization != null) {
StringBuilder dateBuf = new StringBuilder();
try {
DateUtil.formatDate(lastUpdateSinceInitialization, null, dateBuf);
dateStr = dateBuf.toString();
} catch (IOException e) {
// safe to ignore here
}
}
return dateStr;
}
/**
* Returns true if this resource has been changed since initialization.
*/
public boolean hasChangesSinceInitialization() {
return (lastUpdateSinceInitialization != null);
}
/**
* Builds the JSON object to be stored, containing initArgs and managed data fields.
*/
protected Map<String,Object> buildMapToStore(Object managedData) {
Map<String,Object> toStore = new LinkedHashMap<>();
toStore.put(INIT_ARGS_JSON_FIELD, convertNamedListToMap(managedInitArgs));
// report important dates when data was init'd / updated
toStore.put(INITIALIZED_ON_JSON_FIELD, getInitializedOn());
// if the managed data has been updated since initialization (ie. it's dirty)
// return that in the response as well ... which gives a good hint that the
// client needs to re-load the collection / core to apply the updates
if (hasChangesSinceInitialization()) {
toStore.put(UPDATED_SINCE_INIT_JSON_FIELD, getUpdatedSinceInitialization());
}
if (managedData != null) {
if (managedData instanceof List || managedData instanceof Set) {
toStore.put(MANAGED_JSON_LIST_FIELD, managedData);
} else if (managedData instanceof Map) {
toStore.put(MANAGED_JSON_MAP_FIELD, managedData);
} else {
throw new IllegalArgumentException(
"Invalid managed data type "+managedData.getClass().getName()+
"! Only List, Set, or Map objects are supported by this ManagedResource!");
}
}
return toStore;
}
/**
* Converts a NamedList&lt;?&gt; into an ordered Map for returning as JSON.
*/
protected Map<String,Object> convertNamedListToMap(NamedList<?> args) {
Map<String,Object> argsMap = new LinkedHashMap<>();
if (args != null) {
for (Map.Entry<String,?> entry : args) {
argsMap.put(entry.getKey(), entry.getValue());
}
}
return argsMap;
}
/**
* Just calls {@link #doPut(BaseSolrResource,Representation,Object)};
* override to change the behavior of POST handling.
*/
public void doPost(BaseSolrResource endpoint, Representation entity, Object json) {
doPut(endpoint, entity, json);
}
/**
* Applies changes to initArgs or managed data.
*/
@SuppressWarnings("unchecked")
public synchronized void doPut(BaseSolrResource endpoint, Representation entity, Object json) {
log.info("Processing update to {}: {} is a "+json.getClass().getName(), getResourceId(), json);
boolean updatedInitArgs = false;
Object managedData = null;
if (json instanceof Map) {
// hmmmm ... not sure how flexible we want to be here?
Map<String,Object> jsonMap = (Map<String,Object>)json;
if (jsonMap.containsKey(INIT_ARGS_JSON_FIELD) ||
jsonMap.containsKey(MANAGED_JSON_LIST_FIELD) ||
jsonMap.containsKey(MANAGED_JSON_MAP_FIELD))
{
Map<String,Object> initArgsMap = (Map<String,Object>)jsonMap.get(INIT_ARGS_JSON_FIELD);
updatedInitArgs = updateInitArgs(new NamedList<>(initArgsMap));
if (jsonMap.containsKey(MANAGED_JSON_LIST_FIELD)) {
managedData = jsonMap.get(MANAGED_JSON_LIST_FIELD);
} else if (jsonMap.containsKey(MANAGED_JSON_MAP_FIELD)) {
managedData = jsonMap.get(MANAGED_JSON_MAP_FIELD);
}
} else {
managedData = jsonMap;
}
} else if (json instanceof List) {
managedData = json;
} else {
throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST,
"Unsupported update format "+json.getClass().getName());
}
Object updated = null;
if (managedData != null) {
updated = applyUpdatesToManagedData(managedData);
}
if (updatedInitArgs || updated != null) {
storeManagedData(updated);
}
// PUT just returns success status code with an empty body
}
/**
* Called by the RestManager framework after this resource has been deleted
* to allow this resource to close and clean-up any resources used by this.
*
* @throws IOException if an error occurs in the underlying storage when
* trying to delete
*/
public void onResourceDeleted() throws IOException {
storage.delete(resourceId);
}
/**
* Called during PUT/POST processing to apply updates to the managed data passed from the client.
*/
protected abstract Object applyUpdatesToManagedData(Object updates);
/**
* Called by {@link RestManager.ManagedEndpoint#delete()}
* to delete a named part (the given childId) of the
* resource at the given endpoint
*/
public abstract void doDeleteChild(BaseSolrResource endpoint, String childId);
/**
* Called by {@link RestManager.ManagedEndpoint#get()}
* to retrieve a named part (the given childId) of the
* resource at the given endpoint
*/
public abstract void doGet(BaseSolrResource endpoint, String childId);
}

View File

@ -0,0 +1,38 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
/**
* Allows a Solr component to register as an observer of important
* ManagedResource events, such as when the managed data is loaded.
*/
public interface ManagedResourceObserver {
/**
* Event notification raised once during core initialization to notify
* listeners that a ManagedResource is fully initialized. The most
* common implementation of this method is to pull the managed data from
* the concrete ManagedResource and use it to initialize an analysis component.
* For example, the ManagedStopFilterFactory implements this method to
* receive the list of managed stop words needed to create a CharArraySet
* for the StopFilter.
*/
void onManagedResourceInitialized(NamedList<?> args, ManagedResource res)
throws SolrException;
}

View File

@ -0,0 +1,500 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import org.apache.lucene.util.BytesRef;
import org.apache.solr.cloud.ZkSolrResourceLoader;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrResourceLoader;
import org.noggit.JSONParser;
import org.noggit.JSONUtil;
import org.noggit.ObjectBuilder;
import org.restlet.data.Status;
import org.restlet.resource.ResourceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Abstract base class that provides most of the functionality needed
* to store arbitrary data for managed resources. Concrete implementations
* need to decide the underlying format that data is stored in, such as JSON.
*
* The underlying storage I/O layer will be determined by the environment
* Solr is running in, e.g. in cloud mode, data will be stored and loaded
* from ZooKeeper.
*/
public abstract class ManagedResourceStorage {
/**
* Hides the underlying storage implementation for data being managed
* by a ManagedResource. For instance, a ManagedResource may use JSON as
* the data format and an instance of this class to persist and load
* the JSON bytes to/from some backing store, such as ZooKeeper.
*/
public static interface StorageIO {
String getInfo();
void configure(SolrResourceLoader loader, NamedList<String> initArgs) throws SolrException;
boolean exists(String storedResourceId) throws IOException;
InputStream openInputStream(String storedResourceId) throws IOException;
OutputStream openOutputStream(String storedResourceId) throws IOException;
boolean delete(String storedResourceId) throws IOException;
}
public static final String STORAGE_IO_CLASS_INIT_ARG = "storageIO";
public static final String STORAGE_DIR_INIT_ARG = "storageDir";
/**
* Creates a new StorageIO instance for a Solr core, taking into account
* whether the core is running in cloud mode as well as initArgs.
*/
public static StorageIO newStorageIO(String collection, SolrResourceLoader resourceLoader, NamedList<String> initArgs) {
StorageIO storageIO = null;
SolrZkClient zkClient = null;
String zkConfigName = null;
if (resourceLoader instanceof ZkSolrResourceLoader) {
zkClient = ((ZkSolrResourceLoader)resourceLoader).getZkController().getZkClient();
try {
zkConfigName = ((ZkSolrResourceLoader)resourceLoader).getZkController().
getZkStateReader().readConfigName(collection);
} catch (Exception e) {
log.error("Failed to get config name for collection {} due to: {}",
collection, e.toString());
}
if (zkConfigName == null) {
throw new SolrException(ErrorCode.SERVER_ERROR,
"Could not find config name for collection:" + collection);
}
}
if (initArgs.get(STORAGE_IO_CLASS_INIT_ARG) != null) {
storageIO = resourceLoader.newInstance(initArgs.get(STORAGE_IO_CLASS_INIT_ARG), StorageIO.class);
} else {
if (zkClient != null) {
String znodeBase = "/configs/"+zkConfigName;
log.info("Setting up ZooKeeper-based storage for the RestManager with znodeBase: "+znodeBase);
storageIO = new ManagedResourceStorage.ZooKeeperStorageIO(zkClient, znodeBase);
} else {
storageIO = new FileStorageIO();
}
}
if (storageIO instanceof FileStorageIO) {
// using local fs, if storageDir is not set in the solrconfig.xml, assume the configDir for the core
if (initArgs.get(STORAGE_DIR_INIT_ARG) == null) {
initArgs.add(STORAGE_DIR_INIT_ARG, resourceLoader.getConfigDir());
}
}
storageIO.configure(resourceLoader, initArgs);
return storageIO;
}
/**
* Local file-based storage implementation.
*/
public static class FileStorageIO implements StorageIO {
private String storageDir;
@Override
public void configure(SolrResourceLoader loader, NamedList<String> initArgs) throws SolrException {
String storageDirArg = initArgs.get("storageDir");
if (storageDirArg == null || storageDirArg.trim().length() == 0)
throw new IllegalArgumentException("Required configuration parameter 'storageDir' not provided!");
File dir = new File(storageDirArg);
if (!dir.isDirectory())
dir.mkdirs();
storageDir = dir.getAbsolutePath();
log.info("File-based storage initialized to use dir: "+storageDir);
}
@Override
public boolean exists(String storedResourceId) throws IOException {
return (new File(storageDir, storedResourceId)).exists();
}
@Override
public InputStream openInputStream(String storedResourceId) throws IOException {
return new FileInputStream(storageDir+"/"+storedResourceId);
}
@Override
public OutputStream openOutputStream(String storedResourceId) throws IOException {
return new FileOutputStream(storageDir+"/"+storedResourceId);
}
@Override
public boolean delete(String storedResourceId) throws IOException {
File storedFile = new File(storageDir, storedResourceId);
return storedFile.isFile() ? storedFile.delete() : false;
}
@Override
public String getInfo() {
return "file:dir="+storageDir;
}
} // end FileStorageIO
/**
* ZooKeeper based storage implementation that uses the SolrZkClient provided
* by the CoreContainer.
*/
public static class ZooKeeperStorageIO implements StorageIO {
protected SolrZkClient zkClient;
protected String znodeBase;
protected boolean retryOnConnLoss = true;
public ZooKeeperStorageIO(SolrZkClient zkClient, String znodeBase) {
this.zkClient = zkClient;
this.znodeBase = znodeBase;
}
@Override
public void configure(SolrResourceLoader loader, NamedList<String> initArgs) throws SolrException {
// validate connectivity and the configured znode base
try {
if (!zkClient.exists(znodeBase, retryOnConnLoss)) {
zkClient.makePath(znodeBase, retryOnConnLoss);
}
} catch (Exception exc) {
String errMsg = String.format
(Locale.ROOT, "Failed to verify znode at %s due to: %s", znodeBase, exc.toString());
log.error(errMsg, exc);
throw new SolrException(ErrorCode.SERVER_ERROR, errMsg, exc);
}
log.info("Configured ZooKeeperStorageIO with znodeBase: "+znodeBase);
}
@Override
public boolean exists(String storedResourceId) throws IOException {
final String znodePath = getZnodeForResource(storedResourceId);
try {
return zkClient.exists(znodePath, retryOnConnLoss);
} catch (Exception e) {
if (e instanceof IOException) {
throw (IOException)e;
} else {
throw new IOException("Failed to read data at "+znodePath, e);
}
}
}
@Override
public InputStream openInputStream(String storedResourceId) throws IOException {
final String znodePath = getZnodeForResource(storedResourceId);
byte[] znodeData = null;
try {
if (zkClient.exists(znodePath, retryOnConnLoss)) {
znodeData = zkClient.getData(znodePath, null, null, retryOnConnLoss);
}
} catch (Exception e) {
if (e instanceof IOException) {
throw (IOException)e;
} else {
throw new IOException("Failed to read data at "+znodePath, e);
}
}
if (znodeData != null) {
log.info("Read {} bytes from znode {}", znodeData.length, znodePath);
} else {
znodeData = new byte[0];
log.info("No data found for znode {}", znodePath);
}
return new ByteArrayInputStream(znodeData);
}
@Override
public OutputStream openOutputStream(String storedResourceId) throws IOException {
final String znodePath = getZnodeForResource(storedResourceId);
final boolean retryOnConnLoss = this.retryOnConnLoss;
ByteArrayOutputStream baos = new ByteArrayOutputStream() {
@Override
public void close() {
byte[] znodeData = toByteArray();
try {
if (zkClient.exists(znodePath, retryOnConnLoss)) {
zkClient.setData(znodePath, znodeData, retryOnConnLoss);
log.info("Wrote {} bytes to existing znode {}", znodeData.length, znodePath);
} else {
zkClient.makePath(znodePath, znodeData, retryOnConnLoss);
log.info("Wrote {} bytes to new znode {}", znodeData.length, znodePath);
}
} catch (Exception e) {
// have to throw a runtimer here as we're in close,
// which doesn't throw IOException
if (e instanceof RuntimeException) {
throw (RuntimeException)e;
} else {
throw new ResourceException(Status.SERVER_ERROR_INTERNAL,
"Failed to save data to ZooKeeper znode: "+znodePath+" due to: "+e, e);
}
}
}
};
return baos;
}
/**
* Returns the Znode for the given storedResourceId by combining it
* with the znode base.
*/
protected String getZnodeForResource(String storedResourceId) {
return String.format(Locale.ROOT, "%s/%s", znodeBase, storedResourceId);
}
@Override
public boolean delete(String storedResourceId) throws IOException {
boolean wasDeleted = false;
final String znodePath = getZnodeForResource(storedResourceId);
// this might be overkill for a delete operation
try {
if (zkClient.exists(znodePath, retryOnConnLoss)) {
log.info("Attempting to delete znode {}", znodePath);
zkClient.delete(znodePath, -1, retryOnConnLoss);
wasDeleted = zkClient.exists(znodePath, retryOnConnLoss);
if (wasDeleted) {
log.info("Deleted znode {}", znodePath);
} else {
log.warn("Failed to delete znode {}", znodePath);
}
} else {
log.warn("Znode {} does not exist; delete operation ignored.", znodePath);
}
} catch (Exception e) {
if (e instanceof IOException) {
throw (IOException)e;
} else {
throw new IOException("Failed to read data at "+znodePath, e);
}
}
return wasDeleted;
}
@Override
public String getInfo() {
return "ZooKeeperStorageIO:path="+znodeBase;
}
} // end ZooKeeperStorageIO
/**
* Memory-backed storage IO; not really intended for storage large amounts
* of data in production, but useful for testing and other transient workloads.
*/
public static class InMemoryStorageIO implements StorageIO {
Map<String,BytesRef> storage = new HashMap<>();
@Override
public void configure(SolrResourceLoader loader, NamedList<String> initArgs)
throws SolrException {}
@Override
public boolean exists(String storedResourceId) throws IOException {
return storage.containsKey(storedResourceId);
}
@Override
public InputStream openInputStream(String storedResourceId)
throws IOException {
BytesRef storedVal = storage.get(storedResourceId);
if (storedVal == null)
throw new FileNotFoundException(storedResourceId);
return new ByteArrayInputStream(storedVal.bytes, storedVal.offset, storedVal.length);
}
@Override
public OutputStream openOutputStream(final String storedResourceId)
throws IOException {
ByteArrayOutputStream boas = new ByteArrayOutputStream() {
@Override
public void close() {
storage.put(storedResourceId, new BytesRef(toByteArray()));
}
};
return boas;
}
@Override
public boolean delete(String storedResourceId) throws IOException {
return (storage.remove(storedResourceId) != null);
}
@Override
public String getInfo() {
return "InMemoryStorage";
}
} // end InMemoryStorageIO class
/**
* Default storage implementation that uses JSON as the storage format for managed data.
*/
public static class JsonStorage extends ManagedResourceStorage {
public JsonStorage(StorageIO storageIO, SolrResourceLoader loader) {
super(storageIO, loader);
}
/**
* Determines the relative path (from the storage root) for the given resource.
* In this case, it returns a file named with the .json extension.
*/
@Override
public String getStoredResourceId(String resourceId) {
return resourceId.replace('/','_')+".json";
}
@Override
protected Object parseText(Reader reader, String resourceId) throws IOException {
return ObjectBuilder.getVal(new JSONParser(reader));
}
@Override
public void store(String resourceId, Object toStore) throws IOException {
String json = JSONUtil.toJSON(toStore);
String storedResourceId = getStoredResourceId(resourceId);
OutputStreamWriter writer = null;
try {
writer = new OutputStreamWriter(storageIO.openOutputStream(storedResourceId), UTF_8);
writer.write(json);
writer.flush();
} finally {
if (writer != null) {
try {
writer.close();
} catch (Exception ignore){}
}
}
log.info("Saved JSON object to path {} using {}",
storedResourceId, storageIO.getInfo());
}
} // end JsonStorage
public static final Logger log = LoggerFactory.getLogger(ManagedResourceStorage.class);
public static final Charset UTF_8 = Charset.forName("UTF-8");
protected StorageIO storageIO;
protected SolrResourceLoader loader;
protected ManagedResourceStorage(StorageIO storageIO, SolrResourceLoader loader) {
this.storageIO = storageIO;
this.loader = loader;
}
/** Returns the resource loader used by this storage instance */
public SolrResourceLoader getResourceLoader() {
return loader;
}
/** Returns the storageIO instance used by this storage instance */
public StorageIO getStorageIO() {
return storageIO;
}
/**
* Gets the unique identifier for a stored resource, typically based
* on the resourceId and some storage-specific information, such as
* file extension and storage root directory.
*/
public abstract String getStoredResourceId(String resourceId);
/**
* Loads a resource from storage; the default implementation makes
* the assumption that the data is stored as UTF-8 encoded text,
* such as JSON. This method should be overridden if that assumption
* is invalid.
*/
public Object load(String resourceId) throws IOException {
String storedResourceId = getStoredResourceId(resourceId);
log.info("Reading {} using {}", storedResourceId, storageIO.getInfo());
InputStream inputStream = storageIO.openInputStream(storedResourceId);
if (inputStream == null) {
return null;
}
Object parsed = null;
InputStreamReader reader = null;
try {
reader = new InputStreamReader(inputStream, UTF_8);
parsed = parseText(reader, resourceId);
} finally {
if (reader != null) {
try {
reader.close();
} catch (Exception ignore){}
}
}
String objectType = (parsed != null) ? parsed.getClass().getSimpleName() : "null";
log.info(String.format(Locale.ROOT, "Loaded %s at path %s using %s",
objectType, storedResourceId, storageIO.getInfo()));
return parsed;
}
/**
* Called by {@link ManagedResourceStorage#load(String)} to convert the
* serialized resource into its in-memory representation.
*/
protected Object parseText(Reader reader, String resourceId) throws IOException {
// no-op: base classes should override this if they deal with text.
return null;
}
/** Persists the given toStore object with the given resourceId. */
public abstract void store(String resourceId, Object toStore) throws IOException;
/** Removes the given resourceId's persisted representation. */
public boolean delete(String resourceId) throws IOException {
return storageIO.delete(getStoredResourceId(resourceId));
}
}

View File

@ -0,0 +1,749 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestInfo;
import org.apache.solr.rest.ManagedResourceStorage.StorageIO;
import org.noggit.ObjectBuilder;
import org.restlet.Request;
import org.restlet.data.MediaType;
import org.restlet.data.Method;
import org.restlet.data.Status;
import org.restlet.representation.Representation;
import org.restlet.resource.ResourceException;
import org.restlet.routing.Router;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Supports runtime mapping of REST API endpoints to ManagedResource
* implementations; endpoints can be registered at either the /schema
* or /config base paths, depending on which base path is more appropriate
* for the type of managed resource.
*/
public class RestManager {
public static final Logger log = LoggerFactory.getLogger(RestManager.class);
public static final String SCHEMA_BASE_PATH = "/schema";
public static final String CONFIG_BASE_PATH = "/config";
public static final String MANAGED_ENDPOINT = "/managed";
// used for validating resourceIds provided during registration
private static final Pattern resourceIdRegex = Pattern.compile("(/config|/schema)(/.*)");
/**
* Used internally to keep track of registrations during core initialization
*/
private static class ManagedResourceRegistration {
String resourceId;
Class<? extends ManagedResource> implClass;
List<ManagedResourceObserver> observers = new ArrayList<>();
private ManagedResourceRegistration(String resourceId,
Class<? extends ManagedResource> implClass,
ManagedResourceObserver observer)
{
this.resourceId = resourceId;
this.implClass = implClass;
if (observer != null) {
this.observers.add(observer);
}
}
/** Returns resourceId, class, and number of observers of this registered resource */
public Map<String,String> getInfo() {
Map<String,String> info = new HashMap<>();
info.put("resourceId", resourceId);
info.put("class", implClass.getName());
info.put("numObservers", String.valueOf(observers.size()));
return info;
}
}
/**
* Per-core registry of ManagedResources found during core initialization.
*
* Registering of managed resources can happen before the RestManager is
* fully initialized. To avoid timing issues, resources register themselves
* and then the RestManager initializes all ManagedResources before the core
* is activated.
*/
public static class Registry {
private Map<String,ManagedResourceRegistration> registered = new TreeMap<>();
// REST API endpoints that need to be protected against dynamic endpoint creation
private final Set<String> reservedEndpoints = new HashSet<>();
private final Pattern reservedEndpointsPattern;
public Registry() {
reservedEndpoints.add(CONFIG_BASE_PATH + MANAGED_ENDPOINT);
reservedEndpoints.add(SCHEMA_BASE_PATH + MANAGED_ENDPOINT);
for (String reservedEndpoint : SolrSchemaRestApi.getReservedEndpoints()) {
reservedEndpoints.add(reservedEndpoint);
}
for (String reservedEndpoint : SolrConfigRestApi.getReservedEndpoints()) {
reservedEndpoints.add(reservedEndpoint);
}
reservedEndpointsPattern = getReservedEndpointsPattern();
}
/**
* Returns the set of non-registerable endpoints.
*/
public Set<String> getReservedEndpoints() {
return Collections.unmodifiableSet(reservedEndpoints);
}
/**
* Returns a Pattern, to be used with Matcher.matches(), that will recognize
* prefixes or full matches against reserved endpoints that need to be protected
* against dynamic endpoint registration. group(1) will contain the match
* regardless of whether it's a full match or a prefix.
*/
private Pattern getReservedEndpointsPattern() {
// Match any of the reserved endpoints exactly, or followed by a slash and more stuff
StringBuilder builder = new StringBuilder();
builder.append("(");
boolean notFirst = false;
for (String reservedEndpoint : reservedEndpoints) {
if (notFirst) {
builder.append("|");
} else {
notFirst = true;
}
builder.append(reservedEndpoint);
}
builder.append(")(?:|/.*)");
return Pattern.compile(builder.toString());
}
/**
* Get a view of the currently registered resources.
*/
public Collection<ManagedResourceRegistration> getRegistered() {
return Collections.unmodifiableCollection(registered.values());
}
/**
* Register the need to use a ManagedResource; this method is typically called
* by a Solr component during core initialization to register itself as an
* observer of a specific type of ManagedResource. As many Solr components may
* share the same ManagedResource, this method only serves to associate the
* observer with an endpoint and implementation class. The actual construction
* of the ManagedResource and loading of data from storage occurs later once
* the RestManager is fully initialized.
* @param resourceId - An endpoint in the Rest API to manage the resource; must
* start with /config and /schema.
* @param implClass - Class that implements ManagedResource.
* @param observer - Solr component that needs to know when the data being managed
* by the ManagedResource is loaded, such as a TokenFilter.
*/
public synchronized void registerManagedResource(String resourceId,
Class<? extends ManagedResource> implClass, ManagedResourceObserver observer) {
if (resourceId == null)
throw new IllegalArgumentException(
"Must provide a non-null resourceId to register a ManagedResource!");
Matcher resourceIdValidator = resourceIdRegex.matcher(resourceId);
if (!resourceIdValidator.matches()) {
String errMsg = String.format(Locale.ROOT,
"Invalid resourceId '%s'; must start with %s or %s.",
resourceId, CONFIG_BASE_PATH, SCHEMA_BASE_PATH);
throw new SolrException(ErrorCode.SERVER_ERROR, errMsg);
}
// protect reserved REST API endpoints from being used by another
Matcher reservedEndpointsMatcher = reservedEndpointsPattern.matcher(resourceId);
if (reservedEndpointsMatcher.matches()) {
throw new SolrException(ErrorCode.SERVER_ERROR,
reservedEndpointsMatcher.group(1)
+ " is a reserved endpoint used by the Solr REST API!");
}
// IMPORTANT: this code should assume there is no RestManager at this point
// it's ok to re-register the same class for an existing path
ManagedResourceRegistration reg = registered.get(resourceId);
if (reg != null) {
if (!reg.implClass.equals(implClass)) {
String errMsg = String.format(Locale.ROOT,
"REST API path %s already registered to instances of %s",
resourceId, reg.implClass.getName());
throw new SolrException(ErrorCode.SERVER_ERROR, errMsg);
}
if (observer != null) {
reg.observers.add(observer);
log.info("Added observer of type {} to existing ManagedResource {}",
observer.getClass().getName(), resourceId);
}
} else {
registered.put(resourceId,
new ManagedResourceRegistration(resourceId, implClass, observer));
log.info("Registered ManagedResource impl {} for path {}",
implClass.getName(), resourceId);
}
}
}
/**
* Locates the RestManager using ThreadLocal SolrRequestInfo.
*/
public static RestManager getRestManager(SolrRequestInfo solrRequestInfo) {
if (solrRequestInfo == null)
throw new ResourceException(Status.SERVER_ERROR_INTERNAL,
"No SolrRequestInfo in this Thread!");
SolrQueryRequest req = solrRequestInfo.getReq();
RestManager restManager =
(req != null) ? req.getCore().getRestManager() : null;
if (restManager == null)
throw new ResourceException(Status.SERVER_ERROR_INTERNAL,
"No RestManager found!");
return restManager;
}
/**
* The Restlet router needs a lightweight extension of ServerResource to delegate a request
* to. ManagedResource implementations are heavy-weight objects that live for the duration of
* a SolrCore, so this class acts as the proxy between Restlet and a ManagedResource when
* doing request processing.
*/
public static class ManagedEndpoint extends BaseSolrResource
implements GETable, PUTable, POSTable, DELETEable
{
/**
* Determines the ManagedResource resourceId from the Restlet request.
*/
public static String resolveResourceId(Request restletReq) {
String resourceId = restletReq.getResourceRef().
getRelativeRef(restletReq.getRootRef().getParentRef()).getPath();
// all resources are registered with the leading slash
if (!resourceId.startsWith("/"))
resourceId = "/"+resourceId;
return resourceId;
}
protected ManagedResource managedResource;
protected String childId;
/**
* Initialize objects needed to handle a request to the REST API. Specifically,
* we lookup the RestManager using the ThreadLocal SolrRequestInfo and then
* dynamically locate the ManagedResource associated with the request URI.
*/
@Override
public void doInit() throws ResourceException {
super.doInit();
// get the relative path to the requested resource, which is
// needed to locate ManagedResource impls at runtime
String resourceId = resolveResourceId(getRequest());
// supports a request for a registered resource or its child
RestManager restManager =
RestManager.getRestManager(SolrRequestInfo.getRequestInfo());
managedResource = restManager.getManagedResourceOrNull(resourceId);
if (managedResource == null) {
// see if we have a registered endpoint one-level up ...
int lastSlashAt = resourceId.lastIndexOf('/');
if (lastSlashAt != -1) {
String parentResourceId = resourceId.substring(0,lastSlashAt);
log.info("Resource not found for {}, looking for parent: {}",
resourceId, parentResourceId);
managedResource = restManager.getManagedResourceOrNull(parentResourceId);
if (managedResource != null) {
// verify this resource supports child resources
if (!(managedResource instanceof ManagedResource.ChildResourceSupport)) {
String errMsg = String.format(Locale.ROOT,
"%s does not support child resources!", managedResource.getResourceId());
throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, errMsg);
}
childId = resourceId.substring(lastSlashAt+1);
log.info("Found parent resource {} for child: {}",
parentResourceId, childId);
}
}
}
if (managedResource == null) {
if (Method.PUT.equals(getMethod()) || Method.POST.equals(getMethod())) {
// delegate create requests to the RestManager
managedResource = restManager.endpoint;
} else {
throw new ResourceException(Status.CLIENT_ERROR_NOT_FOUND,
"No REST managed resource registered for path "+resourceId);
}
}
log.info("Found ManagedResource ["+managedResource+"] for "+resourceId);
}
@Override
public Representation put(Representation entity) {
try {
managedResource.doPut(this, entity, parseJsonFromRequestBody(entity));
} catch (Exception e) {
getSolrResponse().setException(e);
}
handlePostExecution(log);
return new SolrOutputRepresentation();
}
@Override
public Representation post(Representation entity) {
try {
managedResource.doPost(this, entity, parseJsonFromRequestBody(entity));
} catch (Exception e) {
getSolrResponse().setException(e);
}
handlePostExecution(log);
return new SolrOutputRepresentation();
}
@Override
public Representation delete() {
// only delegate delete child resources to the ManagedResource
// as deleting the actual resource is best handled by the
// RestManager
if (childId != null) {
try {
managedResource.doDeleteChild(this, childId);
} catch (Exception e) {
getSolrResponse().setException(e);
}
} else {
try {
RestManager restManager =
RestManager.getRestManager(SolrRequestInfo.getRequestInfo());
restManager.deleteManagedResource(managedResource);
} catch (Exception e) {
getSolrResponse().setException(e);
}
}
handlePostExecution(log);
return new SolrOutputRepresentation();
}
@Override
public Representation get() {
try {
managedResource.doGet(this, childId);
} catch (Exception e) {
getSolrResponse().setException(e);
}
handlePostExecution(log);
return new SolrOutputRepresentation();
}
/**
* Parses and validates the JSON passed from the to the ManagedResource.
*/
protected Object parseJsonFromRequestBody(Representation entity) {
if (entity.getMediaType() == null) {
entity.setMediaType(MediaType.APPLICATION_JSON);
}
if (!entity.getMediaType().equals(MediaType.APPLICATION_JSON, true)) {
String errMsg = String.format(Locale.ROOT,
"Invalid content type %s; only %s is supported.",
entity.getMediaType(), MediaType.APPLICATION_JSON.toString());
log.error(errMsg);
throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, errMsg);
}
String text = null;
try {
text = entity.getText();
} catch (IOException ioExc) {
String errMsg = "Failed to read entity text due to: "+ioExc;
log.error(errMsg, ioExc);
throw new ResourceException(Status.SERVER_ERROR_INTERNAL, errMsg, ioExc);
}
if (text == null || text.trim().length() == 0) {
throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "Empty request body!");
}
Object parsedJson = null;
try {
parsedJson = ObjectBuilder.fromJSON(text);
} catch (IOException ioExc) {
String errMsg = String.format(Locale.ROOT,
"Failed to parse request [%s] into JSON due to: %s",
text, ioExc.toString());
log.error(errMsg, ioExc);
throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, errMsg, ioExc);
}
return parsedJson;
}
} // end ManagedEndpoint class
/**
* The RestManager itself supports some endpoints for creating and listing managed resources.
* Effectively, this resource provides the API endpoint for doing CRUD on the registry.
*/
private static class RestManagerManagedResource extends ManagedResource {
private static final String REST_MANAGER_STORAGE_ID = "/rest/managed";
private final RestManager restManager;
public RestManagerManagedResource(RestManager restManager) throws SolrException {
super(REST_MANAGER_STORAGE_ID, restManager.loader, restManager.storageIO);
this.restManager = restManager;
}
/**
* Loads and initializes any ManagedResources that have been created but
* are not associated with any Solr components.
*/
@SuppressWarnings("unchecked")
@Override
protected void onManagedDataLoadedFromStorage(NamedList<?> managedInitArgs, Object managedData)
throws SolrException {
if (managedData == null) {
return; // this is OK, just means there are no stored registrations
}
Map<String,Object> storedMap = (Map<String,Object>)managedData;
List<Object> managedList = (List<Object>)storedMap.get(MANAGED_JSON_LIST_FIELD);
for (Object next : managedList) {
Map<String,String> info = (Map<String,String>)next;
String implClass = info.get("class");
String resourceId = info.get("resourceId");
Class<? extends ManagedResource> clazz = solrResourceLoader.findClass(implClass, ManagedResource.class);
ManagedResourceRegistration existingReg = restManager.registry.registered.get(resourceId);
if (existingReg == null) {
restManager.registry.registerManagedResource(resourceId, clazz, null);
} // else already registered, no need to take any action
}
}
/**
* Creates a new ManagedResource in the RestManager.
*/
@SuppressWarnings("unchecked")
@Override
public synchronized void doPut(BaseSolrResource endpoint, Representation entity, Object json) {
if (json instanceof Map) {
String resourceId = ManagedEndpoint.resolveResourceId(endpoint.getRequest());
Map<String,String> info = (Map<String,String>)json;
info.put("resourceId", resourceId);
storeManagedData(applyUpdatesToManagedData(json));
} else {
throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST,
"Expected Map to create a new ManagedResource but received a "+json.getClass().getName());
}
// PUT just returns success status code with an empty body
}
/**
* Registers a new {@link ManagedResource}.
*
* Called during PUT/POST processing to apply updates to the managed data passed from the client.
*/
@SuppressWarnings("unchecked")
@Override
protected Object applyUpdatesToManagedData(Object updates) {
Map<String,String> info = (Map<String,String>)updates;
// this is where we'd register a new ManagedResource
String implClass = info.get("class");
String resourceId = info.get("resourceId");
log.info("Creating a new ManagedResource of type {} at path {}",
implClass, resourceId);
Class<? extends ManagedResource> clazz =
solrResourceLoader.findClass(implClass, ManagedResource.class);
// add this new resource to the RestManager
restManager.addManagedResource(resourceId, clazz);
// we only store ManagedResources that don't have observers as those that do
// are already implicitly defined
List<Map<String,String>> managedList = new ArrayList<>();
for (ManagedResourceRegistration reg : restManager.registry.getRegistered()) {
if (reg.observers.isEmpty()) {
managedList.add(reg.getInfo());
}
}
return managedList;
}
/**
* Deleting of child resources not supported by this implementation.
*/
@Override
public void doDeleteChild(BaseSolrResource endpoint, String childId) {
throw new ResourceException(Status.SERVER_ERROR_NOT_IMPLEMENTED);
}
@Override
public void doGet(BaseSolrResource endpoint, String childId) {
// filter results by /schema or /config
String path = ManagedEndpoint.resolveResourceId(endpoint.getRequest());
Matcher resourceIdMatcher = resourceIdRegex.matcher(path);
if (!resourceIdMatcher.matches()) {
// extremely unlikely but didn't want to squelch it either
throw new ResourceException(Status.SERVER_ERROR_NOT_IMPLEMENTED, path);
}
String filter = resourceIdMatcher.group(1);
List<Map<String,String>> regList = new ArrayList<>();
for (ManagedResourceRegistration reg : restManager.registry.getRegistered()) {
if (!reg.resourceId.startsWith(filter))
continue; // doesn't match filter
if (RestManagerManagedResource.class.isAssignableFrom(reg.implClass))
continue; // internal, no need to expose to outside
regList.add(reg.getInfo());
}
endpoint.getSolrResponse().add("managedResources", regList);
}
} // end RestManagerManagedResource
protected StorageIO storageIO;
protected Registry registry;
protected Map<String,ManagedResource> managed = new TreeMap<>();
protected RestManagerManagedResource endpoint;
protected SolrResourceLoader loader;
// refs to these are needed to bind new ManagedResources created using the API
protected Router schemaRouter;
protected Router configRouter;
/**
* Initializes the RestManager with the storageIO being optionally created outside of this implementation
* such as to use ZooKeeper instead of the local FS.
*/
public void init(SolrResourceLoader loader,
NamedList<String> initArgs,
StorageIO storageIO)
throws SolrException
{
log.info("Initializing RestManager with initArgs: "+initArgs);
if (storageIO == null)
throw new IllegalArgumentException(
"Must provide a valid StorageIO implementation to the RestManager!");
this.storageIO = storageIO;
this.loader = loader;
registry = loader.getManagedResourceRegistry();
// the RestManager provides metadata about managed resources via the /managed endpoint
// and allows you to create new ManagedResources dynamically by PUT'ing to this endpoint
endpoint = new RestManagerManagedResource(this);
endpoint.loadManagedDataAndNotify(null); // no observers for my endpoint
// responds to requests to /config/managed and /schema/managed
managed.put(CONFIG_BASE_PATH+MANAGED_ENDPOINT, endpoint);
managed.put(SCHEMA_BASE_PATH+MANAGED_ENDPOINT, endpoint);
// init registered managed resources
log.info("Initializing {} registered ManagedResources", registry.registered.size());
for (ManagedResourceRegistration reg : registry.registered.values()) {
// keep track of this for lookups during request processing
managed.put(reg.resourceId, createManagedResource(reg));
}
}
/**
* If not already registered, registers the given {@link ManagedResource} subclass
* at the given resourceId, creates an instance, and attaches it to the appropriate
* Restlet router. Returns the corresponding instance.
*/
public synchronized ManagedResource addManagedResource(String resourceId, Class<? extends ManagedResource> clazz) {
ManagedResource res = null;
ManagedResourceRegistration existingReg = registry.registered.get(resourceId);
if (existingReg == null) {
registry.registerManagedResource(resourceId, clazz, null);
res = createManagedResource(registry.registered.get(resourceId));
managed.put(resourceId, res);
log.info("Registered new managed resource {}", resourceId);
// attach this new resource to the Restlet router
Matcher resourceIdValidator = resourceIdRegex.matcher(resourceId);
boolean validated = resourceIdValidator.matches();
assert validated : "managed resourceId '" + resourceId
+ "' should already be validated by registerManagedResource()";
String routerPath = resourceIdValidator.group(1);
String path = resourceIdValidator.group(2);
Router router = SCHEMA_BASE_PATH.equals(routerPath) ? schemaRouter : configRouter;
if (router != null) {
attachManagedResource(res, path, router);
}
} else {
res = getManagedResource(resourceId);
}
return res;
}
/**
* Creates a ManagedResource using registration information.
*/
protected ManagedResource createManagedResource(ManagedResourceRegistration reg) throws SolrException {
ManagedResource res = null;
try {
Constructor<? extends ManagedResource> ctor =
reg.implClass.getConstructor(String.class, SolrResourceLoader.class, StorageIO.class);
res = ctor.newInstance(reg.resourceId, loader, storageIO);
res.loadManagedDataAndNotify(reg.observers);
} catch (Exception e) {
String errMsg =
String.format(Locale.ROOT,
"Failed to create new ManagedResource %s of type %s due to: %s",
reg.resourceId, reg.implClass.getName(), e);
throw new SolrException(ErrorCode.SERVER_ERROR, errMsg, e);
}
return res;
}
/**
* Returns the {@link ManagedResource} subclass instance corresponding
* to the given resourceId from the registry.
*
* @throws ResourceException if no managed resource is registered with
* the given resourceId.
*/
public ManagedResource getManagedResource(String resourceId) {
ManagedResource res = getManagedResourceOrNull(resourceId);
if (res == null) {
throw new ResourceException(Status.SERVER_ERROR_INTERNAL,
"No ManagedResource registered for path: "+resourceId);
}
return res;
}
/**
* Returns the {@link ManagedResource} subclass instance corresponding
* to the given resourceId from the registry, or null if no resource
* has been registered with the given resourceId.
*/
public synchronized ManagedResource getManagedResourceOrNull(String resourceId) {
return managed.get(resourceId);
}
/**
* Deletes a managed resource if it is not being used by any Solr components.
*/
public synchronized void deleteManagedResource(ManagedResource res) {
String resourceId = res.getResourceId();
ManagedResourceRegistration existingReg = registry.registered.get(resourceId);
int numObservers = existingReg.observers.size();
if (numObservers > 0) {
String errMsg =
String.format(Locale.ROOT,
"Cannot delete managed resource %s as it is being used by %d Solr components",
resourceId, numObservers);
throw new SolrException(ErrorCode.FORBIDDEN, errMsg);
}
registry.registered.remove(resourceId);
managed.remove(resourceId);
try {
res.onResourceDeleted();
} catch (IOException e) {
// the resource is already deleted so just log this
log.error("Error when trying to clean-up after deleting "+resourceId, e);
}
}
/**
* Attach managed resource paths to the given Restlet Router.
* @param router - Restlet Router
*/
public synchronized void attachManagedResources(String routerPath, Router router) {
if (CONFIG_BASE_PATH.equals(routerPath)) {
this.configRouter = router;
} else if (SCHEMA_BASE_PATH.equals(routerPath)) {
this.schemaRouter = router;
} else {
throw new SolrException(ErrorCode.SERVER_ERROR,
routerPath+" not supported by the RestManager");
}
int numAttached = 0;
for (String resourceId : managed.keySet()) {
if (resourceId.startsWith(routerPath)) {
// the way restlet works is you attach a path w/o the routerPath
String path = resourceId.substring(routerPath.length());
attachManagedResource(managed.get(resourceId), path, router);
++numAttached;
}
}
log.info("Attached {} ManagedResource endpoints to Restlet router: {}",
numAttached, routerPath);
}
/**
* Attaches a ManagedResource and optionally a path for child resources
* to the given Restlet Router.
*/
protected void attachManagedResource(ManagedResource res, String path, Router router) {
router.attach(path, res.getServerResourceClass());
log.info("Attached managed resource at path: {}",path);
// Determine if we should also route requests for child resources
// ManagedResource.ChildResourceSupport is a marker interface that
// indicates the ManagedResource also manages child resources at
// a path one level down from the main resourceId
if (ManagedResource.ChildResourceSupport.class.isAssignableFrom(res.getClass())) {
router.attach(path+"/{child}", res.getServerResourceClass());
}
}
}

View File

@ -0,0 +1,75 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.request.SolrRequestInfo;
import org.restlet.Application;
import org.restlet.Restlet;
import org.restlet.routing.Router;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.Set;
/**
* Restlet servlet handling /&lt;context&gt;/&lt;collection&gt;/config/* URL paths
*/
public class SolrConfigRestApi extends Application {
public static final Logger log = LoggerFactory.getLogger(SolrConfigRestApi.class);
private Router router;
public SolrConfigRestApi() {
router = new Router(getContext());
}
/**
* TODO: If and when this API has reserved endpoints, add them to the set returned here.
* @see SolrSchemaRestApi#getReservedEndpoints()
*/
public static Set<String> getReservedEndpoints() {
return Collections.emptySet();
}
@Override
public void stop() throws Exception {
if (null != router) {
router.stop();
}
}
/**
* Bind URL paths to the appropriate ServerResource subclass.
*/
@Override
public synchronized Restlet createInboundRoot() {
log.info("createInboundRoot started for /config");
router.attachDefault(RestManager.ManagedEndpoint.class);
// attach all the dynamically registered /config resources
RestManager restManager =
RestManager.getRestManager(SolrRequestInfo.getRequestInfo());
restManager.attachManagedResources(RestManager.CONFIG_BASE_PATH, router);
log.info("createInboundRoot complete for /config");
return router;
}
}

View File

@ -16,8 +16,8 @@ package org.apache.solr.rest;
* limitations under the License.
*/
import org.apache.solr.request.SolrRequestInfo;
import org.apache.solr.rest.schema.CopyFieldCollectionResource;
import org.apache.solr.rest.schema.DefaultSchemaResource;
import org.apache.solr.rest.schema.SchemaResource;
import org.apache.solr.rest.schema.DefaultSearchFieldResource;
import org.apache.solr.rest.schema.DynamicFieldCollectionResource;
@ -39,10 +39,16 @@ import org.restlet.routing.Router;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
public class SolrRestApi extends Application {
public static final Logger log = LoggerFactory.getLogger(SolrRestApi.class);
/**
* Restlet servlet handling /&lt;context&gt;/&lt;collection&gt;/schema/* URL paths
*/
public class SolrSchemaRestApi extends Application {
public static final Logger log = LoggerFactory.getLogger(SolrSchemaRestApi.class);
public static final String FIELDS_PATH = "/" + IndexSchema.FIELDS;
public static final String DYNAMIC_FIELDS = IndexSchema.DYNAMIC_FIELDS.toLowerCase(Locale.ROOT);
@ -73,9 +79,28 @@ public class SolrRestApi extends Application {
public static final String UNIQUE_KEY_FIELD = IndexSchema.UNIQUE_KEY.toLowerCase(Locale.ROOT);
public static final String UNIQUE_KEY_FIELD_PATH = "/" + UNIQUE_KEY_FIELD;
/**
* Returns reserved endpoints under /schema
*/
public static Set<String> getReservedEndpoints() {
Set<String> reservedEndpoints = new HashSet<>();
reservedEndpoints.add(RestManager.SCHEMA_BASE_PATH + FIELDS_PATH);
reservedEndpoints.add(RestManager.SCHEMA_BASE_PATH + DYNAMIC_FIELDS_PATH);
reservedEndpoints.add(RestManager.SCHEMA_BASE_PATH + FIELDTYPES_PATH);
reservedEndpoints.add(RestManager.SCHEMA_BASE_PATH + NAME_PATH);
reservedEndpoints.add(RestManager.SCHEMA_BASE_PATH + COPY_FIELDS_PATH);
reservedEndpoints.add(RestManager.SCHEMA_BASE_PATH + VERSION_PATH);
reservedEndpoints.add(RestManager.SCHEMA_BASE_PATH + DEFAULT_SEARCH_FIELD_PATH);
reservedEndpoints.add(RestManager.SCHEMA_BASE_PATH + SIMILARITY_PATH);
reservedEndpoints.add(RestManager.SCHEMA_BASE_PATH + SOLR_QUERY_PARSER_PATH);
reservedEndpoints.add(RestManager.SCHEMA_BASE_PATH + DEFAULT_OPERATOR_PATH);
reservedEndpoints.add(RestManager.SCHEMA_BASE_PATH + UNIQUE_KEY_FIELD_PATH);
return Collections.unmodifiableSet(reservedEndpoints);
}
private Router router;
public SolrRestApi() {
public SolrSchemaRestApi() {
router = new Router(getContext());
}
@ -92,8 +117,8 @@ public class SolrRestApi extends Application {
@Override
public synchronized Restlet createInboundRoot() {
log.info("createInboundRoot started");
log.info("createInboundRoot started for /schema");
router.attach("", SchemaResource.class);
// Allow a trailing slash on full-schema requests
router.attach("/", SchemaResource.class);
@ -131,10 +156,14 @@ public class SolrRestApi extends Application {
router.attach(SOLR_QUERY_PARSER_PATH, SolrQueryParserResource.class);
router.attach(DEFAULT_OPERATOR_PATH, SolrQueryParserDefaultOperatorResource.class);
router.attachDefault(DefaultSchemaResource.class);
router.attachDefault(RestManager.ManagedEndpoint.class);
// attach all the dynamically registered schema resources
RestManager.getRestManager(SolrRequestInfo.getRequestInfo())
.attachManagedResources(RestManager.SCHEMA_BASE_PATH, router);
log.info("createInboundRoot complete");
log.info("createInboundRoot complete for /schema");
return router;
}
}
}

View File

@ -18,6 +18,7 @@ package org.apache.solr.rest.schema;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.rest.BaseSolrResource;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.restlet.resource.ResourceException;
@ -28,7 +29,7 @@ import java.util.LinkedHashSet;
/**
* Base class for Schema Field and DynamicField requests.
*/
abstract class BaseFieldResource extends BaseSchemaResource {
abstract class BaseFieldResource extends BaseSolrResource {
protected static final String INCLUDE_DYNAMIC_PARAM = "includeDynamic";
private static final String DYNAMIC_BASE = "dynamicBase";

View File

@ -18,6 +18,7 @@ package org.apache.solr.rest.schema;
*/
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.rest.BaseSolrResource;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.IndexSchema;
import org.restlet.resource.ResourceException;
@ -27,7 +28,7 @@ import java.util.List;
/**
* Base class for the FieldType resource classes.
*/
abstract class BaseFieldTypeResource extends BaseSchemaResource {
abstract class BaseFieldTypeResource extends BaseSolrResource {
private boolean showDefaults;
protected BaseFieldTypeResource() {

View File

@ -1,58 +0,0 @@
package org.apache.solr.rest.schema;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.rest.GETable;
import org.restlet.representation.Representation;
import org.restlet.resource.ResourceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class is invoked when a request URL starts with /schema/
* but then further path elements don't match any defined resources.
*/
public class DefaultSchemaResource extends BaseSchemaResource implements GETable {
private static final Logger log = LoggerFactory.getLogger(DefaultSchemaResource.class);
public DefaultSchemaResource() {
super();
}
@Override
public void doInit() throws ResourceException {
super.doInit();
}
@Override
public Representation get() {
try {
final String path = getRequest().getOriginalRef().getPath();
final String message = "Unknown path '" + path + "'";
throw new SolrException(ErrorCode.NOT_FOUND, message);
} catch (Exception e) {
getSolrResponse().setException(e);
}
handlePostExecution(log);
return new SolrOutputRepresentation();
}
}

View File

@ -18,6 +18,7 @@ package org.apache.solr.rest.schema;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.rest.BaseSolrResource;
import org.apache.solr.rest.GETable;
import org.apache.solr.schema.IndexSchema;
import org.restlet.representation.Representation;
@ -29,7 +30,7 @@ import org.slf4j.LoggerFactory;
/**
* This class responds to requests at /solr/(corename)/schema/defaultsearchfield
*/
public class DefaultSearchFieldResource extends BaseSchemaResource implements GETable {
public class DefaultSearchFieldResource extends BaseSolrResource implements GETable {
private static final Logger log = LoggerFactory.getLogger(DefaultSearchFieldResource.class);
public DefaultSearchFieldResource() {

View File

@ -17,6 +17,7 @@ package org.apache.solr.rest.schema;
*/
import org.apache.solr.common.SolrException;
import org.apache.solr.rest.BaseSolrResource;
import org.apache.solr.rest.GETable;
import org.apache.solr.schema.IndexSchema;
import org.restlet.representation.Representation;
@ -28,7 +29,7 @@ import org.slf4j.LoggerFactory;
/**
* This class responds to requests at /solr/(corename)/schema/name
*/
public class SchemaNameResource extends BaseSchemaResource implements GETable {
public class SchemaNameResource extends BaseSolrResource implements GETable {
private static final Logger log = LoggerFactory.getLogger(SchemaNameResource.class);
public SchemaNameResource() {

View File

@ -16,6 +16,7 @@ package org.apache.solr.rest.schema;
* limitations under the License.
*/
import org.apache.solr.rest.BaseSolrResource;
import org.apache.solr.rest.GETable;
import org.apache.solr.schema.IndexSchema;
import org.restlet.representation.Representation;
@ -26,7 +27,7 @@ import org.slf4j.LoggerFactory;
/**
* This class responds to requests at /solr/(corename)/schema
*/
public class SchemaResource extends BaseSchemaResource implements GETable {
public class SchemaResource extends BaseSolrResource implements GETable {
private static final Logger log = LoggerFactory.getLogger(SchemaResource.class);
public SchemaResource() {

View File

@ -16,8 +16,8 @@ package org.apache.solr.rest.schema;
* limitations under the License.
*/
import org.apache.solr.rest.BaseSolrResource;
import org.apache.solr.rest.GETable;
import org.apache.solr.rest.SolrRestApi;
import org.apache.solr.schema.IndexSchema;
import org.restlet.representation.Representation;
import org.restlet.resource.ResourceException;
@ -28,7 +28,7 @@ import org.slf4j.LoggerFactory;
/**
* This class responds to requests at /solr/(corename)/schema/similarity
*/
public class SchemaSimilarityResource extends BaseSchemaResource implements GETable {
public class SchemaSimilarityResource extends BaseSolrResource implements GETable {
private static final Logger log = LoggerFactory.getLogger(SchemaSimilarityResource.class);
public SchemaSimilarityResource() {

View File

@ -16,8 +16,8 @@ package org.apache.solr.rest.schema;
* limitations under the License.
*/
import org.apache.solr.rest.BaseSolrResource;
import org.apache.solr.rest.GETable;
import org.apache.solr.rest.SolrRestApi;
import org.apache.solr.schema.IndexSchema;
import org.restlet.representation.Representation;
import org.restlet.resource.ResourceException;
@ -28,7 +28,7 @@ import org.slf4j.LoggerFactory;
/**
* This class responds to requests at /solr/(corename)/schema/version
*/
public class SchemaVersionResource extends BaseSchemaResource implements GETable {
public class SchemaVersionResource extends BaseSolrResource implements GETable {
private static final Logger log = LoggerFactory.getLogger(SchemaVersionResource.class);
public SchemaVersionResource() {

View File

@ -16,6 +16,7 @@ package org.apache.solr.rest.schema;
* limitations under the License.
*/
import org.apache.solr.rest.BaseSolrResource;
import org.apache.solr.rest.GETable;
import org.apache.solr.schema.IndexSchema;
import org.restlet.representation.Representation;
@ -27,7 +28,7 @@ import org.slf4j.LoggerFactory;
/**
* This class responds to requests at /solr/(corename)/schema/solrqueryparser/defaultoperator
*/
public class SolrQueryParserDefaultOperatorResource extends BaseSchemaResource implements GETable {
public class SolrQueryParserDefaultOperatorResource extends BaseSolrResource implements GETable {
private static final Logger log = LoggerFactory.getLogger(SolrQueryParserDefaultOperatorResource.class);
public SolrQueryParserDefaultOperatorResource() {

View File

@ -17,6 +17,7 @@ package org.apache.solr.rest.schema;
*/
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.rest.BaseSolrResource;
import org.apache.solr.rest.GETable;
import org.apache.solr.schema.IndexSchema;
import org.restlet.representation.Representation;
@ -28,7 +29,7 @@ import org.slf4j.LoggerFactory;
/**
* This class responds to requests at /solr/(corename)/schema/solrqueryparser
*/
public class SolrQueryParserResource extends BaseSchemaResource implements GETable {
public class SolrQueryParserResource extends BaseSolrResource implements GETable {
private static final Logger log = LoggerFactory.getLogger(SolrQueryParserResource.class);
public SolrQueryParserResource() {

View File

@ -16,6 +16,7 @@ package org.apache.solr.rest.schema;
* limitations under the License.
*/
import org.apache.solr.rest.BaseSolrResource;
import org.apache.solr.rest.GETable;
import org.apache.solr.schema.IndexSchema;
import org.restlet.representation.Representation;
@ -27,7 +28,7 @@ import org.slf4j.LoggerFactory;
/**
* This class responds to requests at /solr/(corename)/schema/uniquekey
*/
public class UniqueKeyFieldResource extends BaseSchemaResource implements GETable {
public class UniqueKeyFieldResource extends BaseSolrResource implements GETable {
private static final Logger log = LoggerFactory.getLogger(UniqueKeyFieldResource.class);
public UniqueKeyFieldResource() {

View File

@ -0,0 +1,193 @@
package org.apache.solr.rest.schema.analysis;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.rest.BaseSolrResource;
import org.apache.solr.rest.ManagedResource;
import org.apache.solr.rest.ManagedResourceStorage.StorageIO;
/**
* ManagedResource implementation for managing a set of words using the REST API;
* useful for managing stop words and/or protected words for analysis components
* like the KeywordMarkerFilter.
*/
public class ManagedWordSetResource extends ManagedResource
implements ManagedResource.ChildResourceSupport {
public static final String WORD_SET_JSON_FIELD = "wordSet";
public static final String IGNORE_CASE_INIT_ARG = "ignoreCase";
private SortedSet<String> managedWords = null;
public ManagedWordSetResource(String resourceId, SolrResourceLoader loader, StorageIO storageIO)
throws SolrException {
super(resourceId, loader, storageIO);
}
/**
* Returns the set of words in this managed word set.
*/
public Set<String> getWordSet() {
return Collections.unmodifiableSet(managedWords);
}
/**
* Returns the boolean value of the {@link #IGNORE_CASE_INIT_ARG} init arg,
* or the default value (false) if it has not been specified
*/
public boolean getIgnoreCase() {
return getIgnoreCase(managedInitArgs);
}
/**
* Returns the boolean value of the {@link #IGNORE_CASE_INIT_ARG} init arg,
* or the default value (false) if it has not been specified
*/
public boolean getIgnoreCase(NamedList<?> initArgs) {
Boolean ignoreCase = initArgs.getBooleanArg(IGNORE_CASE_INIT_ARG);
// ignoreCase = false by default
return null == ignoreCase ? false : ignoreCase;
}
/**
* Invoked when loading data from storage to initialize the
* list of words managed by this instance. A load of the
* data can happen many times throughout the life cycle of this
* object.
*/
@SuppressWarnings("unchecked")
@Override
protected void onManagedDataLoadedFromStorage(NamedList<?> initArgs, Object data)
throws SolrException {
// the default behavior is to not ignore case,
boolean ignoreCase = getIgnoreCase(initArgs);
if (null == initArgs.get(IGNORE_CASE_INIT_ARG)) {
// Explicitly include the default value of ignoreCase
((NamedList<Object>)initArgs).add(IGNORE_CASE_INIT_ARG, false);
}
managedWords = new TreeSet<>();
if (data != null) {
List<String> wordList = (List<String>)data;
if (ignoreCase) {
// if we're ignoring case, just lowercase all terms as we add them
for (String word : wordList) {
managedWords.add(word.toLowerCase(Locale.ROOT));
}
} else {
managedWords.addAll(wordList);
}
}
log.info("Loaded "+managedWords.size()+" words for "+getResourceId());
}
/**
* Implements the GET request to provide the list of words to the client.
* Alternatively, if a specific word is requested, then it is returned
* or a 404 is raised, indicating that the requested word does not exist.
*/
@Override
public void doGet(BaseSolrResource endpoint, String childId) {
SolrQueryResponse response = endpoint.getSolrResponse();
if (childId != null) {
// downcase arg if we're configured to ignoreCase
String key = getIgnoreCase() ? childId.toLowerCase(Locale.ROOT) : childId;
if (!managedWords.contains(key))
throw new SolrException(ErrorCode.NOT_FOUND,
String.format(Locale.ROOT, "%s not found in %s", childId, getResourceId()));
response.add(childId, key);
} else {
response.add(WORD_SET_JSON_FIELD, buildMapToStore(managedWords));
}
}
/**
* Deletes words managed by this resource.
*/
@Override
public synchronized void doDeleteChild(BaseSolrResource endpoint, String childId) {
// downcase arg if we're configured to ignoreCase
String key = getIgnoreCase() ? childId.toLowerCase(Locale.ROOT) : childId;
if (!managedWords.contains(key))
throw new SolrException(ErrorCode.NOT_FOUND,
String.format(Locale.ROOT, "%s not found in %s", childId, getResourceId()));
managedWords.remove(key);
storeManagedData(managedWords);
log.info("Removed word: {}", key);
}
/**
* Applies updates to the word set being managed by this resource.
*/
@SuppressWarnings("unchecked")
@Override
protected Object applyUpdatesToManagedData(Object updates) {
boolean madeChanges = false;
List<String> words = (List<String>)updates;
log.info("Applying updates: "+words);
boolean ignoreCase = getIgnoreCase();
for (String word : words) {
if (ignoreCase)
word = word.toLowerCase(Locale.ROOT);
if (managedWords.add(word)) {
madeChanges = true;
log.info("Added word: {}", word);
}
}
return madeChanges ? managedWords : null;
}
@Override
protected boolean updateInitArgs(NamedList<?> updatedArgs) {
if (updatedArgs == null || updatedArgs.size() == 0) {
return false;
}
boolean currentIgnoreCase = getIgnoreCase(managedInitArgs);
boolean updatedIgnoreCase = getIgnoreCase(updatedArgs);
if (currentIgnoreCase == true && updatedIgnoreCase == false) {
throw new SolrException(ErrorCode.BAD_REQUEST,
"Changing a managed word set's ignoreCase arg from true to false is not permitted.");
} else if (currentIgnoreCase == false && updatedIgnoreCase == true) {
// rebuild the word set on policy change from case-sensitive to case-insensitive
SortedSet<String> updatedWords = new TreeSet<>();
for (String word : managedWords) {
updatedWords.add(word.toLowerCase(Locale.ROOT));
}
managedWords = updatedWords;
}
// otherwise currentIgnoreCase == updatedIgnoreCase: nothing to do
return super.updateInitArgs(updatedArgs);
}
}

View File

@ -0,0 +1,29 @@
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
<p>
Analysis-related functionality for RESTful API access to the Solr Schema using Restlet.
</p>
</body>
</html>

View File

@ -20,6 +20,7 @@ package org.apache.solr.servlet;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpHead;
@ -364,8 +365,9 @@ public class SolrDispatchFilter implements Filter
// get or create/cache the parser for the core
SolrRequestParsers parser = config.getRequestParsers();
// Handle /schema/* paths via Restlet
if( path.startsWith("/schema") ) {
// Handle /schema/* and /config/* paths via Restlet
if( path.equals("/schema") || path.startsWith("/schema/")
|| path.equals("/config") || path.startsWith("/config/")) {
solrReq = parser.parse(core, path, req);
SolrRequestInfo.setRequestInfo(new SolrRequestInfo(solrReq, new SolrQueryResponse()));
if( path.equals(req.getServletPath()) ) {
@ -536,6 +538,9 @@ public class SolrDispatchFilter implements Filter
entityRequest.setEntity(entity);
method = entityRequest;
}
else if ("DELETE".equals(req.getMethod())) {
method = new HttpDelete(urlstr);
}
else {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
"Unexpected method type: " + req.getMethod());

View File

@ -665,7 +665,9 @@ public class SolrRequestParsers
{
String method = req.getMethod().toUpperCase(Locale.ROOT);
if ("GET".equals(method) || "HEAD".equals(method)
|| ("PUT".equals(method) && req.getRequestURI().contains("/schema"))) {
|| (("PUT".equals(method) || "DELETE".equals(method))
&& (req.getRequestURI().contains("/schema")
|| req.getRequestURI().contains("/config")))) {
return parseQueryString(req.getQueryString());
}
if ("POST".equals( method ) ) {

View File

@ -575,5 +575,13 @@
<processor class="solr.RunUpdateProcessorFactory" />
</updateRequestProcessorChain>
<restManager>
<!--
IMPORTANT: Due to the Lucene SecurityManager, tests can only write to their runtime directory or below.
But its easier to just keep everything in memory for testing so no remnants are left behind.
-->
<str name="storageIO">org.apache.solr.rest.ManagedResourceStorage$InMemoryStorageIO</str>
</restManager>
</config>

View File

@ -24,13 +24,26 @@ import org.restlet.ext.servlet.ServerServlet;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* Base class for Solr Restlet-based tests. Creates test harness,
* including "extra" servlets for all Solr Restlet Application subclasses.
*/
abstract public class SolrRestletTestBase extends RestTestBase {
/**
* Creates test harness, including "extra" servlets for all
* Solr Restlet Application subclasses.
*/
@BeforeClass
public static void init() throws Exception {
final SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
final ServletHolder solrRestApi = new ServletHolder("SolrRestApi", ServerServlet.class);
solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrRestApi");
extraServlets.put(solrRestApi, "/schema/*"); // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...'
final ServletHolder solrSchemaRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class);
solrSchemaRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi");
extraServlets.put(solrSchemaRestApi, "/schema/*"); // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...'
final ServletHolder solrConfigRestApi = new ServletHolder("SolrConfigRestApi", ServerServlet.class);
solrConfigRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrConfigRestApi");
extraServlets.put(solrConfigRestApi, "/config/*");
createJettyAndHarness(TEST_HOME(), "solrconfig.xml", "schema-rest.xml", "/solr", true, extraServlets);
}

View File

@ -0,0 +1,327 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.lucene.util.BytesRef;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.rest.ManagedResourceStorage.StorageIO;
import org.junit.Test;
import org.noggit.JSONParser;
import org.noggit.ObjectBuilder;
/**
* Tests {@link ManagedResource} functionality.
*/
public class TestManagedResource extends SolrTestCaseJ4 {
/**
* Mock class that acts like an analysis component that depends on
* data managed by a ManagedResource
*/
private class MockAnalysisComponent implements ManagedResourceObserver {
private boolean wasNotified = false;
@SuppressWarnings("unchecked")
@Override
public void onManagedResourceInitialized(NamedList<?> args, ManagedResource res) throws SolrException {
assertEquals("someVal", args.get("someArg"));
assertTrue(res instanceof ManagedTestResource);
ManagedTestResource testRes = (ManagedTestResource)res;
List<String> data = (List<String>)testRes.managedData;
assertTrue(data.contains("1"));
assertTrue(data.contains("2"));
assertTrue(data.contains("3"));
wasNotified = true;
}
}
private class ManagedTestResource extends ManagedResource {
private Object managedData;
private ManagedTestResource(String resourceId, SolrResourceLoader loader,
StorageIO storageIO) throws SolrException {
super(resourceId, loader, storageIO);
}
@Override
protected void onManagedDataLoadedFromStorage(NamedList<?> managedInitArgs, Object managedData)
throws SolrException {
assertNotNull(managedData);
assertTrue(managedData instanceof List);
// {'initArgs':{'someArg':'someVal', 'arg2':true, 'arg3':['one','two','three'],
// 'arg4':18, 'arg5':0.9, 'arg6':{ 'uno':1, 'dos':2 }},'"
assertEquals("someVal", managedInitArgs.get("someArg"));
assertEquals(true, managedInitArgs.get("arg2"));
List<String> arg3List = Arrays.asList("one", "two", "three");
assertEquals(arg3List, managedInitArgs.get("arg3"));
assertEquals(18L, managedInitArgs.get("arg4"));
assertEquals(0.9, managedInitArgs.get("arg5"));
Map<String,Long> arg6map = new LinkedHashMap<>(2);
arg6map.put("uno", 1L);
arg6map.put("dos", 2L);
assertEquals(arg6map, managedInitArgs.get("arg6"));
this.managedData = managedData;
}
// NOTE: These methods are better tested from the REST API
// so they are stubbed out here and not used in this test
@Override
protected Object applyUpdatesToManagedData(Object updates) {
return null;
}
@Override
public void doDeleteChild(BaseSolrResource endpoint, String childId) {}
@Override
public void doGet(BaseSolrResource endpoint, String childId) {}
}
/**
* Implements a Java serialization based storage format.
*/
private class SerializableStorage extends ManagedResourceStorage {
SerializableStorage(StorageIO storageIO, SolrResourceLoader loader) {
super(storageIO, loader);
}
@Override
public Object load(String resourceId) throws IOException {
String storedId = getStoredResourceId(resourceId);
InputStream inputStream = storageIO.openInputStream(storedId);
if (inputStream == null) {
return null;
}
Object serialized = null;
ObjectInputStream ois = null;
try {
ois = new ObjectInputStream(inputStream);
serialized = ois.readObject();
} catch (ClassNotFoundException e) {
// unlikely
throw new IOException(e);
} finally {
if (ois != null) {
try {
ois.close();
} catch (Exception ignore){}
}
}
return serialized;
}
@Override
public void store(String resourceId, Object toStore) throws IOException {
if (!(toStore instanceof Serializable))
throw new IOException("Instance of "+
toStore.getClass().getName()+" is not Serializable!");
String storedId = getStoredResourceId(resourceId);
ObjectOutputStream oos = null;
try {
oos = new ObjectOutputStream(storageIO.openOutputStream(storedId));
oos.writeObject(toStore);
oos.flush();
} finally {
if (oos != null) {
try {
oos.close();
} catch (Exception ignore){}
}
}
}
@Override
public String getStoredResourceId(String resourceId) {
return resourceId.replace('/','_')+".bin";
}
}
private class CustomStorageFormatResource extends ManagedTestResource {
private CustomStorageFormatResource(String resourceId, SolrResourceLoader loader,
StorageIO storageIO) throws SolrException {
super(resourceId, loader, storageIO);
}
@Override
protected ManagedResourceStorage createStorage(StorageIO storageIO, SolrResourceLoader loader)
throws SolrException
{
return new SerializableStorage(storageIO, loader);
}
}
/**
* Tests managed data storage to and loading from {@link ManagedResourceStorage.InMemoryStorageIO}.
*/
@SuppressWarnings("unchecked")
@Test
public void testLoadingAndStoringOfManagedData() throws Exception {
String resourceId = "/config/test/foo";
String storedResourceId = "_config_test_foo.json";
MockAnalysisComponent observer = new MockAnalysisComponent();
List<ManagedResourceObserver> observers =
Arrays.asList((ManagedResourceObserver)observer);
// put some data in the storage impl so that we can test
// initialization of managed data from storage
String storedJson = "{'initArgs':{'someArg':'someVal', 'arg2':true, 'arg3':['one','two','three'],"
+ " 'arg4':18, 'arg5':0.9, 'arg6':{ 'uno':1, 'dos':2}},'"
+ ManagedResource.MANAGED_JSON_LIST_FIELD+"':['1','2','3']}";
ManagedResourceStorage.InMemoryStorageIO storageIO =
new ManagedResourceStorage.InMemoryStorageIO();
storageIO.storage.put(storedResourceId, new BytesRef(json(storedJson)));
ManagedTestResource res =
new ManagedTestResource(resourceId, new SolrResourceLoader("./"), storageIO);
res.loadManagedDataAndNotify(observers);
assertTrue("Observer was not notified by ManagedResource!", observer.wasNotified);
// now update the managed data (as if it came from the REST API)
List<String> updatedData = new ArrayList<>();
updatedData.add("1");
updatedData.add("2");
updatedData.add("3");
updatedData.add("4");
res.storeManagedData(updatedData);
StringReader stringReader =
new StringReader(storageIO.storage.get(storedResourceId).utf8ToString());
Map<String,Object> jsonObject =
(Map<String,Object>) ObjectBuilder.getVal(new JSONParser(stringReader));
List<String> jsonList =
(List<String>)jsonObject.get(ManagedResource.MANAGED_JSON_LIST_FIELD);
assertTrue("Managed data was not updated correctly!", jsonList.contains("4"));
}
/**
* The ManagedResource storage framework allows the end developer to use a different
* storage format other than JSON, as demonstrated by this test.
*/
@SuppressWarnings("rawtypes")
@Test
public void testCustomStorageFormat() throws Exception {
String resourceId = "/schema/test/foo";
String storedResourceId = "_schema_test_foo.bin";
MockAnalysisComponent observer = new MockAnalysisComponent();
List<ManagedResourceObserver> observers =
Arrays.asList((ManagedResourceObserver)observer);
// put some data in the storage impl so that we can test
// initialization of managed data from storage
Map<String,Object> storedData = new HashMap<>();
Map<String,Object> initArgs = new HashMap<>();
// {'initArgs':{'someArg':'someVal', 'arg2':true, 'arg3':['one','two','three'],
// 'arg4':18, 'arg5':0.9, 'arg6':{ 'uno':1, 'dos':2 }},'"
initArgs.put("someArg", "someVal");
initArgs.put("arg2", Boolean.TRUE);
List<String> arg3list = Arrays.asList("one", "two", "three");
initArgs.put("arg3", arg3list);
initArgs.put("arg4", 18L);
initArgs.put("arg5", 0.9);
Map<String,Long> arg6map = new HashMap<>();
arg6map.put("uno", 1L);
arg6map.put("dos", 2L);
initArgs.put("arg6", arg6map);
storedData.put("initArgs", initArgs);
List<String> managedList = new ArrayList<>();
managedList.add("1");
managedList.add("2");
managedList.add("3");
storedData.put(ManagedResource.MANAGED_JSON_LIST_FIELD, managedList);
ManagedResourceStorage.InMemoryStorageIO storageIO =
new ManagedResourceStorage.InMemoryStorageIO();
storageIO.storage.put(storedResourceId, ser2bytes((Serializable)storedData));
CustomStorageFormatResource res =
new CustomStorageFormatResource(resourceId, new SolrResourceLoader("./"), storageIO);
res.loadManagedDataAndNotify(observers);
assertTrue("Observer was not notified by ManagedResource!", observer.wasNotified);
// now store some data (as if it came from the REST API)
List<String> updatedData = new ArrayList<>();
updatedData.add("1");
updatedData.add("2");
updatedData.add("3");
updatedData.add("4");
res.storeManagedData(updatedData);
Object stored = res.storage.load(resourceId);
assertNotNull(stored);
assertTrue(stored instanceof Map);
Map storedMap = (Map)stored;
assertNotNull(storedMap.get("initArgs"));
List storedList = (List)storedMap.get(ManagedResource.MANAGED_JSON_LIST_FIELD);
assertTrue(storedList.contains("4"));
}
/**
* Converts the given Serializable object to bytes
*/
private BytesRef ser2bytes(Serializable ser) throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
ObjectOutputStream oos = null;
try {
oos = new ObjectOutputStream(out);
oos.writeObject(ser);
oos.flush();
} finally {
if (oos != null) {
try {
oos.close();
} catch (Exception ignore){}
}
}
return new BytesRef(out.toByteArray());
}
}

View File

@ -0,0 +1,134 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.cloud.AbstractZkTestCase;
import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.rest.ManagedResourceStorage.StorageIO;
import org.apache.solr.rest.ManagedResourceStorage.FileStorageIO;
import org.apache.solr.rest.ManagedResourceStorage.ZooKeeperStorageIO;
import org.apache.solr.rest.ManagedResourceStorage.JsonStorage;
import org.junit.Test;
/**
* Depends on ZK for testing ZooKeeper backed storage logic.
*/
@Slow
public class TestManagedResourceStorage extends AbstractZkTestCase {
/**
* Runs persisted managed resource creation and update tests on Zookeeper storage.
*/
@Test
public void testZkBasedJsonStorage() throws Exception {
// test using ZooKeeper
assertTrue("Not using ZooKeeper", h.getCoreContainer().isZooKeeperAware());
SolrZkClient zkClient = h.getCoreContainer().getZkController().getZkClient();
SolrResourceLoader loader = new SolrResourceLoader("./");
// Solr unit tests can only write to their working directory due to
// a custom Java Security Manager installed in the test environment
NamedList<String> initArgs = new NamedList<>();
try {
ZooKeeperStorageIO zkStorageIO = new ZooKeeperStorageIO(zkClient, "/test");
zkStorageIO.configure(loader, initArgs);
doStorageTests(loader, zkStorageIO);
} finally {
zkClient.close();
}
}
/**
* Runs persisted managed resource creation and update tests on JSON storage.
*/
@Test
public void testFileBasedJsonStorage() throws Exception {
SolrResourceLoader loader = new SolrResourceLoader("./");
// Solr unit tests can only write to their working directory due to
// a custom Java Security Manager installed in the test environment
NamedList<String> initArgs = new NamedList<>();
initArgs.add(ManagedResourceStorage.STORAGE_DIR_INIT_ARG, "./managed");
FileStorageIO fileStorageIO = new FileStorageIO();
fileStorageIO.configure(loader, initArgs);
doStorageTests(loader, fileStorageIO);
}
/**
* Called from tests for each storage type to run creation and update tests
* on a persisted managed resource.
*/
@SuppressWarnings("unchecked")
private void doStorageTests(SolrResourceLoader loader, StorageIO storageIO) throws Exception {
String resourceId = "/test/foo";
JsonStorage jsonStorage = new JsonStorage(storageIO, loader);
Map<String,String> managedInitArgs = new HashMap<>();
managedInitArgs.put("ignoreCase","true");
managedInitArgs.put("dontIgnoreCase", "false");
List<String> managedList = new ArrayList<>(); // we need a mutable List for this test
managedList.addAll(Arrays.asList("a","b","c","d","e"));
Map<String,Object> toStore = new HashMap<>();
toStore.put(ManagedResource.INIT_ARGS_JSON_FIELD, managedInitArgs);
toStore.put(ManagedResource.MANAGED_JSON_LIST_FIELD, managedList);
jsonStorage.store(resourceId, toStore);
String storedResourceId = jsonStorage.getStoredResourceId(resourceId);
assertTrue(storedResourceId+" file not found!", storageIO.exists(storedResourceId));
Object fromStorage = jsonStorage.load(resourceId);
assertNotNull(fromStorage);
Map<String,Object> storedMap = (Map<String,Object>)fromStorage;
Map<String,Object> storedArgs = (Map<String,Object>)storedMap.get(ManagedResource.INIT_ARGS_JSON_FIELD);
assertNotNull(storedArgs);
assertEquals("true", storedArgs.get("ignoreCase"));
List<String> storedList = (List<String>)storedMap.get(ManagedResource.MANAGED_JSON_LIST_FIELD);
assertNotNull(storedList);
assertTrue(storedList.size() == managedList.size());
assertTrue(storedList.contains("a"));
// now verify you can update existing data
managedInitArgs.put("anotherArg", "someValue");
managedList.add("f");
jsonStorage.store(resourceId, toStore);
fromStorage = jsonStorage.load(resourceId);
assertNotNull(fromStorage);
storedMap = (Map<String,Object>)fromStorage;
storedArgs = (Map<String,Object>)storedMap.get(ManagedResource.INIT_ARGS_JSON_FIELD);
assertNotNull(storedArgs);
assertEquals("someValue", storedArgs.get("anotherArg"));
storedList = (List<String>)storedMap.get(ManagedResource.MANAGED_JSON_LIST_FIELD);
assertNotNull(storedList);
assertTrue(storedList.size() == managedList.size());
assertTrue(storedList.contains("e"));
}
}

View File

@ -0,0 +1,225 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.Arrays;
import java.util.Locale;
import java.util.Set;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.rest.ManagedResourceStorage.StorageIO;
import org.apache.solr.rest.schema.analysis.ManagedWordSetResource;
import org.junit.Test;
import org.noggit.JSONUtil;
/**
* Tests {@link RestManager} functionality, including resource registration,
* and REST API requests and responses.
*/
public class TestRestManager extends SolrRestletTestBase {
private class BogusManagedResource extends ManagedResource {
protected BogusManagedResource(String resourceId,
SolrResourceLoader loader, StorageIO storageIO) throws SolrException {
super(resourceId, loader, storageIO);
}
@Override
protected void onManagedDataLoadedFromStorage(NamedList<?> managedInitArgs, Object managedData)
throws SolrException {}
@Override
protected Object applyUpdatesToManagedData(Object updates) {
return null;
}
@Override
public void doDeleteChild(BaseSolrResource endpoint, String childId) {}
@Override
public void doGet(BaseSolrResource endpoint, String childId) {}
}
private class MockAnalysisComponent implements ManagedResourceObserver {
@Override
public void onManagedResourceInitialized(NamedList<?> args, ManagedResource res)
throws SolrException {
assertTrue(res instanceof ManagedWordSetResource);
}
}
/**
* Test RestManager initialization and handling of registered ManagedResources.
*/
@Test
public void testManagedResourceRegistrationAndInitialization() throws Exception {
// first, we need to register some ManagedResources, which is done with the registry
// provided by the SolrResourceLoader
SolrResourceLoader loader = new SolrResourceLoader("./");
RestManager.Registry registry = loader.getManagedResourceRegistry();
assertNotNull("Expected a non-null RestManager.Registry from the SolrResourceLoader!", registry);
String resourceId = "/config/test/foo";
registry.registerManagedResource(resourceId,
ManagedWordSetResource.class,
new MockAnalysisComponent());
// verify the two different components can register the same ManagedResource in the registry
registry.registerManagedResource(resourceId,
ManagedWordSetResource.class,
new MockAnalysisComponent());
// verify we can register another resource under a different resourceId
registry.registerManagedResource("/config/test/foo2",
ManagedWordSetResource.class,
new MockAnalysisComponent());
ignoreException("REST API path .* already registered to instances of ");
String failureMessage = "Should not be able to register a different"
+ " ManagedResource implementation for {}";
// verify that some other hooligan cannot register another type
// of ManagedResource implementation under the same resourceId
try {
registry.registerManagedResource(resourceId,
BogusManagedResource.class,
new MockAnalysisComponent());
fail(String.format(Locale.ROOT, failureMessage, resourceId));
} catch (SolrException solrExc) {
// expected output
}
resetExceptionIgnores();
ignoreException("is a reserved endpoint used by the Solr REST API!");
failureMessage = "Should not be able to register reserved endpoint {}";
// verify that already-spoken-for REST API endpoints can't be registered
Set<String> reservedEndpoints = registry.getReservedEndpoints();
assertTrue(reservedEndpoints.size() > 2);
assertTrue(reservedEndpoints.contains(RestManager.SCHEMA_BASE_PATH + RestManager.MANAGED_ENDPOINT));
assertTrue(reservedEndpoints.contains(RestManager.CONFIG_BASE_PATH + RestManager.MANAGED_ENDPOINT));
for (String endpoint : reservedEndpoints) {
try {
registry.registerManagedResource
(endpoint, BogusManagedResource.class, new MockAnalysisComponent());
fail(String.format(Locale.ROOT, failureMessage, endpoint));
} catch (SolrException solrExc) {
// expected output
}
// also try to register already-spoken-for REST API endpoints with a child segment
endpoint += "/kid";
try {
registry.registerManagedResource
(endpoint, BogusManagedResource.class, new MockAnalysisComponent());
fail(String.format(Locale.ROOT, failureMessage, endpoint));
} catch (SolrException solrExc) {
// expected output
}
}
resetExceptionIgnores();
NamedList<String> initArgs = new NamedList<>();
RestManager restManager = new RestManager();
restManager.init(loader, initArgs, new ManagedResourceStorage.InMemoryStorageIO());
ManagedResource res = restManager.getManagedResource(resourceId);
assertTrue(res instanceof ManagedWordSetResource);
assertEquals(res.getResourceId(), resourceId);
restManager.getManagedResource("/config/test/foo2"); // exception if it isn't registered
}
/**
* Tests {@link RestManager}'s responses to REST API requests on /config/managed
* and /schema/managed. Also tests {@link ManagedWordSetResource} functionality
* through the REST API.
*/
@Test
public void testRestManagerEndpoints() throws Exception {
// relies on these ManagedResources being activated in the schema-rest.xml used by this test
assertJQ("/schema/managed",
"/responseHeader/status==0");
/*
* TODO: can't assume these will be here unless schema-rest.xml includes these declarations
*
"/managedResources/[0]/class=='org.apache.solr.rest.schema.analysis.ManagedWordSetResource'",
"/managedResources/[0]/resourceId=='/schema/analysis/stopwords/english'",
"/managedResources/[1]/class=='org.apache.solr.rest.schema.analysis.ManagedSynonymFilterFactory$SynonymManager'",
"/managedResources/[1]/resourceId=='/schema/analysis/synonyms/english'");
*/
// no pre-existing managed config components
assertJQ("/config/managed", "/managedResources==[]");
// add a ManagedWordSetResource for managing protected words (for stemming)
String newEndpoint = "/schema/analysis/protwords/english";
assertJPut(newEndpoint, json("{ 'class':'solr.ManagedWordSetResource' }"), "/responseHeader/status==0");
assertJQ("/schema/managed"
,"/managedResources/[0]/class=='org.apache.solr.rest.schema.analysis.ManagedWordSetResource'"
,"/managedResources/[0]/resourceId=='/schema/analysis/protwords/english'");
// query the resource we just created
assertJQ(newEndpoint, "/wordSet/managedList==[]");
// add some words to this new word list manager
assertJPut(newEndpoint, JSONUtil.toJSON(Arrays.asList("this", "is", "a", "test")), "/responseHeader/status==0");
assertJQ(newEndpoint
,"/wordSet/managedList==['a','is','test','this']"
,"/wordSet/initArgs=={'ignoreCase':false}"); // make sure the default is serialized even if not specified
// Test for case-sensitivity - "Test" lookup should fail
assertJQ(newEndpoint + "/Test", "/responseHeader/status==404");
// Switch to case-insensitive
assertJPut(newEndpoint, json("{ 'initArgs':{ 'ignoreCase':'true' } }"), "/responseHeader/status==0");
// Test for case-insensitivity - "Test" lookup should succeed
assertJQ(newEndpoint + "/Test", "/responseHeader/status==0");
// Switch to case-sensitive - this request should fail: changing ignoreCase from true to false is not permitted
assertJPut(newEndpoint, json("{ 'initArgs':{ 'ignoreCase':false } }"), "/responseHeader/status==400");
// Test XML response format
assertQ(newEndpoint + "?wt=xml"
,"/response/lst[@name='responseHeader']/int[@name='status']=0"
,"/response/lst[@name='wordSet']/arr[@name='managedList']/str[1]='a'"
,"/response/lst[@name='wordSet']/arr[@name='managedList']/str[2]='is'"
,"/response/lst[@name='wordSet']/arr[@name='managedList']/str[3]='test'"
,"/response/lst[@name='wordSet']/arr[@name='managedList']/str[4]='this'");
// delete the one we created above
assertJDelete(newEndpoint, "/responseHeader/status==0");
// make sure it's really gone
assertJQ("/config/managed", "/managedResources==[]");
}
}

View File

@ -31,8 +31,8 @@ public class TestClassNameShortening extends RestTestBase {
@BeforeClass
public static void init() throws Exception {
final SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
final ServletHolder solrRestApi = new ServletHolder("SolrRestApi", ServerServlet.class);
solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrRestApi");
final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class);
solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi");
extraServlets.put(solrRestApi, "/schema/*"); // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...'
createJettyAndHarness(TEST_HOME(), "solrconfig-minimal.xml", "schema-class-name-shortening-on-serialization.xml",

View File

@ -47,8 +47,8 @@ public class TestManagedSchemaFieldResource extends RestTestBase {
FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile());
final SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
final ServletHolder solrRestApi = new ServletHolder("SolrRestApi", ServerServlet.class);
solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrRestApi");
final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class);
solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi");
extraServlets.put(solrRestApi, "/schema/*"); // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...'
System.setProperty("managed.schema.mutable", "true");

View File

@ -31,8 +31,8 @@ public class TestSerializedLuceneMatchVersion extends RestTestBase {
@BeforeClass
public static void init() throws Exception {
final SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
final ServletHolder solrRestApi = new ServletHolder("SolrRestApi", ServerServlet.class);
solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrRestApi");
final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class);
solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi");
extraServlets.put(solrRestApi, "/schema/*"); // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...'
createJettyAndHarness(TEST_HOME(), "solrconfig-minimal.xml", "schema-rest-lucene-match-version.xml",

View File

@ -58,8 +58,8 @@ public class TestCloudManagedSchemaAddField extends AbstractFullDistribZkTestBas
@Override
public SortedMap<ServletHolder,String> getExtraServlets() {
final SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
final ServletHolder solrRestApi = new ServletHolder("SolrRestApi", ServerServlet.class);
solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrRestApi");
final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class);
solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi");
extraServlets.put(solrRestApi, "/schema/*"); // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...'
return extraServlets;
}

View File

@ -80,6 +80,31 @@ public class NamedList<T> implements Cloneable, Serializable, Iterable<Map.Entry
nvPairs = nameValueMapToList(nameValuePairs);
}
/**
* Creates a NamedList instance containing the "name,value" pairs contained in the
* Map.
*
* <p>
* Modifying the contents of the Map after calling this constructor may change
* the NamedList (in future versions of Solr), but this is not guaranteed and should
* not be relied upon. To modify the NamedList, refer to {@link #add(String, Object)}
* or {@link #remove(String)}.
* </p>
*
* @param nameValueMap the name value pairs
*/
public NamedList(Map<String,? extends T> nameValueMap) {
if (null == nameValueMap) {
nvPairs = new ArrayList<>();
} else {
nvPairs = new ArrayList<>(nameValueMap.size());
for (Map.Entry<String,? extends T> ent : nameValueMap.entrySet()) {
nvPairs.add(ent.getKey());
nvPairs.add(ent.getValue());
}
}
}
/**
* Creates an instance backed by an explicitly specified list of
* pairwise names/values.
@ -528,6 +553,30 @@ public class NamedList<T> implements Cloneable, Serializable, Iterable<Map.Entry
* not a Boolean or a String.
*/
public Boolean removeBooleanArg(final String name) {
Boolean bool = getBooleanArg(name);
if (null != bool) {
remove(name);
}
return bool;
}
/**
* Used for getting a boolean argument from a NamedList object. If the name
* is not present, returns null. If there is more than one value with that
* name, or if the value found is not a Boolean or a String, throws an
* exception. If there is only one value present and it is a Boolean or a
* String, the value is returned as a Boolean. The NamedList is not
* modified. See {@link #remove(String)}, {@link #removeAll(String)}
* and {@link #removeConfigArgs(String)} for additional ways of gathering
* configuration information from a NamedList.
*
* @param name The key to look up in the NamedList.
* @return The boolean value found.
* @throws SolrException
* If multiple values are found for the name or the value found is
* not a Boolean or a String.
*/
public Boolean getBooleanArg(final String name) {
Boolean bool;
List<T> values = getAll(name);
if (0 == values.size()) {
@ -544,12 +593,11 @@ public class NamedList<T> implements Cloneable, Serializable, Iterable<Map.Entry
bool = Boolean.parseBoolean(o.toString());
} else {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
"'" + name + "' must have type 'bool' or 'str'; found " + o.getClass());
"'" + name + "' must have type Boolean or CharSequence; found " + o.getClass());
}
remove(name);
return bool;
}
/**
* Used for getting one or many arguments from NamedList objects that hold
* configuration parameters. Finds all entries in the NamedList that match

View File

@ -422,8 +422,73 @@ abstract public class RestTestBase extends SolrJettyTestBase {
}
}
}
/**
* Deletes a resource and then matches some JSON test expressions against the
* response using the default double delta tolerance.
* @see org.apache.solr.JSONTestUtil#DEFAULT_DELTA
* @see #assertJDelete(String,double,String...)
*/
public static void assertJDelete(String request, String... tests) throws Exception {
assertJDelete(request, JSONTestUtil.DEFAULT_DELTA, tests);
}
/**
* Deletes a resource and then matches some JSON test expressions against the
* response using the specified double delta tolerance.
*/
public static void assertJDelete(String request, double delta, String... tests) throws Exception {
int queryStartPos = request.indexOf('?');
String query;
String path;
if (-1 == queryStartPos) {
query = "";
path = request;
} else {
query = request.substring(queryStartPos + 1);
path = request.substring(0, queryStartPos);
}
query = setParam(query, "wt", "json");
request = path + '?' + setParam(query, "indent", "on");
String response;
boolean failed = true;
try {
response = restTestHarness.delete(request);
failed = false;
} finally {
if (failed) {
log.error("REQUEST FAILED: " + request);
}
}
for (String test : tests) {
if (null == test || 0 == test.length()) continue;
String testJSON = json(test);
try {
failed = true;
String err = JSONTestUtil.match(response, testJSON, delta);
failed = false;
if (err != null) {
log.error("query failed JSON validation. error=" + err +
"\n expected =" + testJSON +
"\n response = " + response +
"\n request = " + request + "\n"
);
throw new RuntimeException(err);
}
} finally {
if (failed) {
log.error("JSON query validation threw an exception." +
"\n expected =" + testJSON +
"\n response = " + response +
"\n request = " + request + "\n"
);
}
}
}
}
/**
* Insures that the given param is included in the query with the given value.

View File

@ -23,6 +23,7 @@ import javax.xml.xpath.XPathExpressionException;
import org.apache.http.HttpEntity;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
@ -109,6 +110,18 @@ public class RestTestHarness extends BaseTestHarness {
return getResponse(httpPut);
}
/**
* Processes a DELETE request using a URL path (with no context path) + optional query params,
* e.g. "/schema/analysis/protwords/english", and returns the response content.
*
* @param request the URL path and optional query params
* @return The response to the DELETE request
*/
public String delete(String request) throws IOException {
HttpDelete httpDelete = new HttpDelete(getBaseURL() + request);
return getResponse(httpDelete);
}
/**
* Processes a POST request using a URL path (with no context path) + optional query params,
* e.g. "/schema/fields/newfield", PUTs the given content, and returns the response content.
@ -160,7 +173,10 @@ public class RestTestHarness extends BaseTestHarness {
throw new RuntimeException(e);
}
}
/**
* Executes the given request and returns the response.
*/
private String getResponse(HttpUriRequest request) throws IOException {
HttpEntity entity = null;
try {

View File

@ -131,7 +131,16 @@
<servlet-class>org.restlet.ext.servlet.ServerServlet</servlet-class>
<init-param>
<param-name>org.restlet.application</param-name>
<param-value>org.apache.solr.rest.SolrRestApi</param-value>
<param-value>org.apache.solr.rest.SolrSchemaRestApi</param-value>
</init-param>
</servlet>
<servlet>
<servlet-name>SolrConfigRestApi</servlet-name>
<servlet-class>org.restlet.ext.servlet.ServerServlet</servlet-class>
<init-param>
<param-name>org.restlet.application</param-name>
<param-value>org.apache.solr.rest.SolrConfigRestApi</param-value>
</init-param>
</servlet>
@ -168,6 +177,11 @@
<url-pattern>/schema/*</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>SolrConfigRestApi</servlet-name>
<url-pattern>/config/*</url-pattern>
</servlet-mapping>
<mime-mapping>
<extension>.xsl</extension>
<!-- per http://www.w3.org/TR/2006/PR-xslt20-20061121/ -->