SOLR-4503: Add REST API methods to get schema information: fields, dynamicFields, fieldTypes, and copyFields. Restlet 2.1.1 is integrated and is used to service these requests.

Also fixes bugs in dynamic copyField logic described in SOLR-3798.
Also fixes a bug with proxied SolrCloud requests (SOLR-4210) when using the GET method.

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1453161 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Steven Rowe 2013-03-06 04:50:33 +00:00
parent f607a0caaa
commit 4618a5a0fe
58 changed files with 4556 additions and 589 deletions

View File

@ -49,6 +49,7 @@
<tika.version>1.3</tika.version>
<httpcomponents.version>4.2.3</httpcomponents.version>
<commons-io.version>2.1</commons-io.version>
<restlet.version>2.1.1</restlet.version>
<!-- RandomizedTesting library system properties -->
<tests.iters>1</tests.iters>
@ -390,6 +391,16 @@
<artifactId>jetty-webapp</artifactId>
<version>${jetty.version}</version>
</dependency>
<dependency>
<groupId>org.restlet.jee</groupId>
<artifactId>org.restlet</artifactId>
<version>${restlet.version}</version>
</dependency>
<dependency>
<groupId>org.restlet.jee</groupId>
<artifactId>org.restlet.ext.servlet</artifactId>
<version>${restlet.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>

View File

@ -135,6 +135,16 @@
<groupId>commons-fileupload</groupId>
<artifactId>commons-fileupload</artifactId>
</dependency>
<dependency>
<groupId>org.restlet.jee</groupId>
<artifactId>org.restlet</artifactId>
<version>${restlet.version}</version>
</dependency>
<dependency>
<groupId>org.restlet.jee</groupId>
<artifactId>org.restlet.ext.servlet</artifactId>
<version>${restlet.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>

View File

@ -76,6 +76,13 @@
</mailingList>
</mailingLists>
<inceptionYear>2006</inceptionYear>
<repositories>
<repository>
<id>maven-restlet</id>
<name>Public online Restlet repository</name>
<url>http://maven.restlet.org</url>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>org.slf4j</groupId>

View File

@ -29,6 +29,7 @@ import java.nio.charset.CharsetDecoder;
import java.nio.charset.CodingErrorAction;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
@ -49,6 +50,9 @@ import java.util.regex.PatternSyntaxException;
*/
public abstract class AbstractAnalysisFactory {
/** The original args, before init() processes them */
private Map<String,String> originalArgs;
/** The init args */
protected Map<String,String> args;
@ -59,13 +63,18 @@ public abstract class AbstractAnalysisFactory {
* Initialize this factory via a set of key-value pairs.
*/
public void init(Map<String,String> args) {
this.args = args;
originalArgs = Collections.unmodifiableMap(args);
this.args = new HashMap<String,String>(args);
}
public Map<String,String> getArgs() {
return args;
}
public Map<String,String> getOriginalArgs() {
return originalArgs;
}
/** this method can be called in the {@link org.apache.lucene.analysis.util.TokenizerFactory#create(java.io.Reader)}
* or {@link org.apache.lucene.analysis.util.TokenFilterFactory#create(org.apache.lucene.analysis.TokenStream)} methods,
* to inform user, that for this factory a {@link #luceneMatchVersion} is required */

View File

@ -30,6 +30,7 @@
<resolvers>
<ibiblio name="sonatype-releases" root="http://oss.sonatype.org/content/repositories/releases" m2compatible="true" />
<ibiblio name="maven.restlet.org" root="http://maven.restlet.org" m2compatible="true" />
<!-- you might need to tweak this from china so it works -->
<ibiblio name="working-chinese-mirror" root="http://mirror.netcologne.de/maven2" m2compatible="true" />
@ -48,6 +49,7 @@
<!-- <resolver ref="local-maven-2" /> -->
<resolver ref="main"/>
<resolver ref="sonatype-releases" />
<resolver ref="maven.restlet.org" />
<resolver ref="working-chinese-mirror" />
</chain>
</resolvers>

View File

@ -106,6 +106,10 @@ New Features
override the default for the field type.
(hossman)
* SOLR-4503: Add REST API methods, via Restlet integration, for reading schema
elements, at /schema/fields/, /schema/dynamicfields/, /schema/fieldtypes/,
and /schema/copyfields/. (Steve Rowe)
Bug Fixes
----------------------
@ -210,6 +214,10 @@ Bug Fixes
* SOLR-4518: Improved CurrencyField error messages when attempting to
use a Currency that is not supported by the current JVM. (hossman)
* SOLR-3798: Fix copyField implementation in IndexSchema to handle
dynamic field references that aren't string-equal to the name of
the referenced dynamic field. (Steve Rowe)
Optimizations
----------------------

View File

@ -503,3 +503,15 @@ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
=========================================================================
== Restlet Notice ==
=========================================================================
Copyright (C) 2005-2013 Restlet S.A.S.
Restlet is a registered trademark of Restlet S.A.S.
This product contains software developed by the Restlet project.
See http://www.restlet.org/

View File

@ -30,6 +30,8 @@
<dependency org="org.objenesis" name="objenesis" rev="1.2" transitive="false"/>
<dependency org="com.spatial4j" name="spatial4j" rev="0.3" transitive="false"/>
<dependency org="javax.servlet" name="javax.servlet-api" rev="3.0.1" transitive="false"/>
<dependency org="org.restlet.jee" name="org.restlet" rev="2.1.1" transitive="false"/>
<dependency org="org.restlet.jee" name="org.restlet.ext.servlet" rev="2.1.1" transitive="false"/>
<exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/>
</dependencies>
</ivy-module>

View File

@ -18,9 +18,13 @@
package org.apache.solr.client.solrj.embedded;
import java.io.IOException;
import java.util.Collections;
import java.util.EnumSet;
import java.util.LinkedList;
import java.util.Map;
import java.util.Random;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicLong;
import java.net.URL;
@ -39,8 +43,10 @@ import javax.servlet.http.HttpServletResponse;
import org.apache.solr.servlet.SolrDispatchFilter;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.bio.SocketConnector;
import org.eclipse.jetty.server.handler.ContextHandlerCollection;
import org.eclipse.jetty.server.nio.SelectChannelConnector;
import org.eclipse.jetty.server.ssl.SslConnector;
import org.eclipse.jetty.server.ssl.SslSocketConnector;
@ -49,6 +55,7 @@ import org.eclipse.jetty.server.handler.GzipHandler;
import org.eclipse.jetty.server.session.HashSessionIdManager;
import org.eclipse.jetty.servlet.FilterHolder;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.util.component.LifeCycle;
import org.eclipse.jetty.util.log.Logger;
import org.eclipse.jetty.util.thread.QueuedThreadPool;
@ -87,6 +94,9 @@ public class JettySolrRunner {
private String coreNodeName;
/** Maps servlet holders (i.e. factories: class + init params) to path specs */
private SortedMap<ServletHolder,String> extraServlets = new TreeMap<ServletHolder,String>();
public static class DebugFilter implements Filter {
public int requestsToKeep = 10;
private AtomicLong nRequests = new AtomicLong();
@ -151,6 +161,19 @@ public class JettySolrRunner {
this.schemaFilename = schemaFileName;
}
/**
* Constructor taking an ordered list of additional (servlet holder -> path spec) mappings
* to add to the servlet context
*/
public JettySolrRunner(String solrHome, String context, int port,
String solrConfigFilename, String schemaFileName, boolean stopAtShutdown,
SortedMap<ServletHolder,String> extraServlets) {
if (null != extraServlets) { this.extraServlets.putAll(extraServlets); }
this.init(solrHome, context, port, stopAtShutdown);
this.solrConfigFilename = solrConfigFilename;
this.schemaFilename = schemaFileName;
}
private void init(String solrHome, String context, int port, boolean stopAtShutdown) {
this.context = context;
server = new Server(port);
@ -285,6 +308,10 @@ public class JettySolrRunner {
// FilterHolder fh = new FilterHolder(filter);
debugFilter = root.addFilter(DebugFilter.class, "*", EnumSet.of(DispatcherType.REQUEST) );
dispatchFilter = root.addFilter(SolrDispatchFilter.class, "*", EnumSet.of(DispatcherType.REQUEST) );
for (ServletHolder servletHolder : extraServlets.keySet()) {
String pathSpec = extraServlets.get(servletHolder);
root.addServlet(servletHolder, pathSpec);
}
if (solrConfigFilename != null) System.clearProperty("solrconfig");
if (schemaFilename != null) System.clearProperty("schema");
System.clearProperty("solr.solr.home");

View File

@ -1781,7 +1781,6 @@ public final class SolrCore implements SolrInfoMBean {
}
}
public void execute(SolrRequestHandler handler, SolrQueryRequest req, SolrQueryResponse rsp) {
if (handler==null) {
String msg = "Null Request Handler '" +
@ -1791,45 +1790,40 @@ public final class SolrCore implements SolrInfoMBean {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, msg);
}
// setup response header and handle request
final NamedList<Object> responseHeader = new SimpleOrderedMap<Object>();
rsp.add("responseHeader", responseHeader);
// toLog is a local ref to the same NamedList used by the request
NamedList<Object> toLog = rsp.getToLog();
// for back compat, we set these now just in case other code
// are expecting them during handleRequest
toLog.add("webapp", req.getContext().get("webapp"));
toLog.add("path", req.getContext().get("path"));
toLog.add("params", "{" + req.getParamString() + "}");
preDecorateResponse(req, rsp);
// TODO: this doesn't seem to be working correctly and causes problems with the example server and distrib (for example /spell)
// if (req.getParams().getBool(ShardParams.IS_SHARD,false) && !(handler instanceof SearchHandler))
// throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,"isShard is only acceptable with search handlers");
handler.handleRequest(req,rsp);
setResponseHeaderValues(handler,req,rsp);
if (log.isInfoEnabled() && toLog.size() > 0) {
StringBuilder sb = new StringBuilder(logid);
for (int i=0; i<toLog.size(); i++) {
String name = toLog.getName(i);
Object val = toLog.getVal(i);
if (name != null) {
sb.append(name).append('=');
postDecorateResponse(handler, req, rsp);
if (log.isInfoEnabled() && rsp.getToLog().size() > 0) {
log.info(rsp.getToLogAsString(logid));
}
sb.append(val).append(' ');
}
log.info(sb.toString());
public static void preDecorateResponse(SolrQueryRequest req, SolrQueryResponse rsp) {
// setup response header
final NamedList<Object> responseHeader = new SimpleOrderedMap<Object>();
rsp.add("responseHeader", responseHeader);
// toLog is a local ref to the same NamedList used by the response
NamedList<Object> toLog = rsp.getToLog();
// for back compat, we set these now just in case other code
// are expecting them during handleRequest
toLog.add("webapp", req.getContext().get("webapp"));
toLog.add("path", req.getContext().get("path"));
toLog.add("params", "{" + req.getParamString() + "}");
}
}
public static void setResponseHeaderValues(SolrRequestHandler handler, SolrQueryRequest req, SolrQueryResponse rsp) {
/** Put status, QTime, and possibly request handler and params, in the response header */
public static void postDecorateResponse
(SolrRequestHandler handler, SolrQueryRequest req, SolrQueryResponse rsp) {
// TODO should check that responseHeader has not been replaced by handler
NamedList<Object> responseHeader = rsp.getResponseHeader();
final int qtime=(int)(rsp.getEndTime() - req.getStartTime());
@ -1850,7 +1844,7 @@ public final class SolrCore implements SolrInfoMBean {
}
SolrParams params = req.getParams();
if( params.getBool(CommonParams.HEADER_ECHO_HANDLER, false) ) {
if( null != handler && params.getBool(CommonParams.HEADER_ECHO_HANDLER, false) ) {
responseHeader.add("handler", handler.getName() );
}
@ -1870,7 +1864,6 @@ public final class SolrCore implements SolrInfoMBean {
}
}
final public static void log(Throwable e) {
SolrException.log(log,null,e);
}

View File

@ -216,6 +216,20 @@ public class SolrQueryResponse {
return toLog;
}
/** Returns a string of the form "logid name1=value1 name2=value2 ..." */
public String getToLogAsString(String logid) {
StringBuilder sb = new StringBuilder(logid);
for (int i=0; i<toLog.size(); i++) {
String name = toLog.getName(i);
Object val = toLog.getVal(i);
if (name != null) {
sb.append(name).append('=');
}
sb.append(val).append(' ');
}
return sb.toString();
}
/**
* Enables or disables the emission of HTTP caching headers for this response.
* @param httpCaching true=emit caching headers, false otherwise

View File

@ -0,0 +1,106 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.schema.CopyField;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.restlet.resource.ResourceException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
/**
* Base class for Schema Field and DynamicField requests.
*/
abstract class BaseFieldResource extends BaseSchemaResource {
protected static final String INCLUDE_DYNAMIC_PARAM = "includeDynamic";
private static final String DYNAMIC_BASE = "dynamicBase";
private static final String UNIQUE_KEY = "uniqueKey";
private static final String SHOW_DEFAULTS = "showDefaults";
private LinkedHashMap<String,SimpleOrderedMap<Object>> requestedFields;
private boolean showDefaults;
protected LinkedHashMap<String,SimpleOrderedMap<Object>> getRequestedFields() {
return requestedFields;
}
protected BaseFieldResource() {
super();
}
/**
* Pulls the "fl" param from the request and splits it to get the
* requested list of fields. The (Dynamic)FieldCollectionResource classes
* will then restrict the fields sent back in the response to those
* on this list. The (Dynamic)FieldResource classes ignore this list,
* since the (dynamic) field is specified in the URL path, rather than
* in a query parameter.
* <p/>
* Also pulls the "showDefaults" param from the request, for use by all
* subclasses to include default values from the associated field type
* in the response. By default this param is off.
*/
@Override
public void doInit() throws ResourceException {
super.doInit();
if (isExisting()) {
String flParam = getSolrRequest().getParams().get(CommonParams.FL);
if (null != flParam) {
String[] fields = flParam.trim().split("[,\\s]+");
if (fields.length > 0) {
requestedFields = new LinkedHashMap<String,SimpleOrderedMap<Object>>();
for (String field : fields) {
if ( ! field.isEmpty()) {
requestedFields.put(field, null);
}
}
}
}
showDefaults = getSolrRequest().getParams().getBool(SHOW_DEFAULTS, false);
}
}
/** Get the properties for a given field.
*
* @param field not required to exist in the schema
*/
protected SimpleOrderedMap<Object> getFieldProperties(SchemaField field) {
if (null == field) {
return null;
}
SimpleOrderedMap<Object> properties = field.getNamedPropertyValues(showDefaults);
if ( ! getSchema().getFields().containsKey(field.getName())) {
String dynamicBase = getSchema().getDynamicPattern(field.getName());
if ( ! field.getName().equals(dynamicBase)) {
// Don't add dynamicBase property if it's identical to the field name.
properties.add(DYNAMIC_BASE, getSchema().getDynamicPattern(field.getName()));
}
}
if (field == getSchema().getUniqueKeyField()) {
properties.add(UNIQUE_KEY, true);
}
return properties;
}
}

View File

@ -0,0 +1,61 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.schema.FieldType;
import org.restlet.resource.ResourceException;
import java.util.LinkedHashMap;
import java.util.List;
/**
* Base class for the FieldType resource classes.
*/
abstract class BaseFieldTypeResource extends BaseSchemaResource {
private static final String FIELDS = "fields";
private static final String DYNAMIC_FIELDS = "dynamicFields";
private boolean showDefaults;
protected BaseFieldTypeResource() {
super();
}
@Override
public void doInit() throws ResourceException {
super.doInit();
showDefaults = getSolrRequest().getParams().getBool(SHOW_DEFAULTS, false);
}
/** Used by subclasses to collect field type properties */
protected SimpleOrderedMap<Object> getFieldTypeProperties(FieldType fieldType) {
SimpleOrderedMap<Object> properties = fieldType.getNamedPropertyValues(showDefaults);
properties.add(FIELDS, getFieldsWithFieldType(fieldType));
properties.add(DYNAMIC_FIELDS, getDynamicFieldsWithFieldType(fieldType));
return properties;
}
/** Return a list of names of Fields that have the given FieldType */
protected abstract List<String> getFieldsWithFieldType(FieldType fieldType);
/** Return a list of names of DynamicFields that have the given FieldType */
protected abstract List<String> getDynamicFieldsWithFieldType(FieldType fieldType);
}

View File

@ -0,0 +1,215 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrCore;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestInfo;
import org.apache.solr.response.BinaryQueryResponseWriter;
import org.apache.solr.response.QueryResponseWriter;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.servlet.ResponseUtils;
import org.apache.solr.util.FastWriter;
import org.restlet.data.MediaType;
import org.restlet.data.Method;
import org.restlet.data.Status;
import org.restlet.representation.OutputRepresentation;
import org.restlet.resource.ResourceException;
import org.restlet.resource.ServerResource;
import org.slf4j.Logger;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.io.Writer;
import java.net.URLDecoder;
import java.nio.charset.Charset;
/**
* Base class of all Solr Schema Restlet resource classes.
*/
abstract class BaseSchemaResource extends ServerResource {
private static final Charset UTF8 = Charset.forName("UTF-8");
protected static final String SHOW_DEFAULTS = "showDefaults";
private SolrCore solrCore;
private IndexSchema schema;
private SolrQueryRequest solrRequest;
private SolrQueryResponse solrResponse;
private QueryResponseWriter responseWriter;
private String contentType;
private boolean doIndent;
protected SolrCore getSolrCore() { return solrCore; }
protected IndexSchema getSchema() { return schema; }
protected SolrQueryRequest getSolrRequest() { return solrRequest; }
protected SolrQueryResponse getSolrResponse() { return solrResponse; }
protected String getContentType() { return contentType; }
protected BaseSchemaResource() {
super();
doIndent = true; // default to indenting
}
/**
* Pulls the SolrQueryRequest constructed in SolrDispatchFilter
* from the SolrRequestInfo thread local, then gets the SolrCore
* and IndexSchema and sets up the response.
* writer.
* <p/>
* If an error occurs during initialization, setExisting(false) is
* called and an error status code and message is set; in this case,
* Restlet will not continue servicing the request (by calling the
* method annotated to associate it with GET, etc., but rather will
* send an error response.
*/
@Override
public void doInit() throws ResourceException {
super.doInit();
setNegotiated(false); // Turn off content negotiation for now
if (isExisting()) {
try {
SolrRequestInfo solrRequestInfo = SolrRequestInfo.getRequestInfo();
if (null == solrRequestInfo) {
final String message = "No handler or core found in " + getRequest().getOriginalRef().getPath();
doError(Status.CLIENT_ERROR_BAD_REQUEST, message);
setExisting(false);
} else {
solrRequest = solrRequestInfo.getReq();
if (null == solrRequest) {
final String message = "No handler or core found in " + getRequest().getOriginalRef().getPath();
doError(Status.CLIENT_ERROR_BAD_REQUEST, message);
setExisting(false);
} else {
solrResponse = solrRequestInfo.getRsp();
solrCore = solrRequest.getCore();
schema = solrCore.getSchema();
String responseWriterName = solrRequest.getParams().get(CommonParams.WT);
if (null == responseWriterName) {
responseWriterName = "json"; // Default to json writer
}
String indent = solrRequest.getParams().get("indent");
if (null != indent && ("".equals(indent) || "off".equals(indent))) {
doIndent = false;
} else { // indent by default
ModifiableSolrParams newParams = new ModifiableSolrParams(solrRequest.getParams());
newParams.remove(indent);
newParams.add("indent", "on");
solrRequest.setParams(newParams);
}
responseWriter = solrCore.getQueryResponseWriter(responseWriterName);
contentType = responseWriter.getContentType(solrRequest, solrResponse);
final String path = getRequest().getRootRef().getPath();
final String firstPathElement = path.substring(0, path.indexOf("/", 1));
solrRequest.getContext().put("webapp", firstPathElement); // Context path
SolrCore.preDecorateResponse(solrRequest, solrResponse);
}
}
} catch (Throwable t) {
setExisting(false);
throw new ResourceException(t);
}
}
}
/**
* This class serves as an adapter between Restlet and Solr's response writers.
*/
public class SolrOutputRepresentation extends OutputRepresentation {
SolrOutputRepresentation() {
// No normalization, in case of a custom media type
super(MediaType.valueOf(contentType));
// TODO: For now, don't send the Vary: header, but revisit if/when content negotiation is added
getDimensions().clear();
}
/** Called by Restlet to get the response body */
@Override
public void write(OutputStream outputStream) throws IOException {
if (getRequest().getMethod() != Method.HEAD) {
if (responseWriter instanceof BinaryQueryResponseWriter) {
BinaryQueryResponseWriter binWriter = (BinaryQueryResponseWriter)responseWriter;
binWriter.write(outputStream, solrRequest, solrResponse);
} else {
String charset = ContentStreamBase.getCharsetFromContentType(contentType);
Writer out = (charset == null || charset.equalsIgnoreCase("UTF-8"))
? new OutputStreamWriter(outputStream, UTF8)
: new OutputStreamWriter(outputStream, charset);
out = new FastWriter(out);
responseWriter.write(out, solrRequest, solrResponse);
out.flush();
}
}
}
}
/**
* Deal with an exception on the SolrResponse, fill in response header info,
* and log the accumulated messages on the SolrResponse.
*/
protected void handlePostExecution(Logger log) {
handleException(log);
// TODO: should status=0 (success?) be left as-is in the response header?
SolrCore.postDecorateResponse(null, solrRequest, solrResponse);
if (log.isInfoEnabled() && solrResponse.getToLog().size() > 0) {
log.info(solrResponse.getToLogAsString(solrCore.getLogId()));
}
}
/**
* If there is an exception on the SolrResponse:
* <ul>
* <li>error info is added to the SolrResponse;</li>
* <li>the response status code is set to the error code from the exception; and</li>
* <li>the exception message is added to the list of things to be logged.</li>
* </ul>
*/
protected void handleException(Logger log) {
Exception exception = getSolrResponse().getException();
if (null != exception) {
NamedList info = new SimpleOrderedMap();
int code = ResponseUtils.getErrorInfo(exception, info, log);
setStatus(Status.valueOf(code));
getSolrResponse().add("error", info);
String message = (String)info.get("msg");
if (null != message && ! message.trim().isEmpty()) {
getSolrResponse().getToLog().add("msg", "{" + message.trim() + "}");
}
}
}
/** Decode URL-encoded strings as UTF-8, and avoid converting "+" to space */
protected static String urlDecode(String str) throws UnsupportedEncodingException {
return URLDecoder.decode(str.replace("+", "%2B"), "UTF-8");
}
}

View File

@ -0,0 +1,160 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.schema.CopyField;
import org.apache.solr.schema.IndexSchema;
import org.restlet.representation.Representation;
import org.restlet.resource.ResourceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* This class responds to requests at /solr/(corename)/schema/copyfields
*
* <p/>
*
* To restrict the set of copyFields in the response, specify one or both
* of the following as query parameters, with values as space and/or comma
* separated dynamic or explicit field names:
*
* <ul>
* <li>dest.fl: include copyFields that have one of these as a destination</li>
* <li>source.fl: include copyFields that have one of these as a source</li>
* </ul>
*
* If both dest.fl and source.fl are given as query parameters, the copyfields
* in the response will be restricted to those that match any of the destinations
* in dest.fl and also match any of the sources in source.fl.
*/
public class CopyFieldCollectionResource extends BaseFieldResource implements GETable {
private static final Logger log = LoggerFactory.getLogger(CopyFieldCollectionResource.class);
private static final String SOURCE = "source";
private static final String DESTINATION = "dest";
private static final String SOURCE_FIELD_LIST = SOURCE + "." + CommonParams.FL;
private static final String DESTINATION_FIELD_LIST = DESTINATION + "." + CommonParams.FL;
private static final String MAX_CHARS = "maxChars";
private static final String SOURCE_DYNAMIC_BASE = "sourceDynamicBase";
private static final String DESTINATION_DYNAMIC_BASE = "destDynamicBase";
private Set<String> sourceFields;
private Set<String> destinationFields;
public CopyFieldCollectionResource() {
super();
}
@Override
public void doInit() throws ResourceException {
super.doInit();
if (isExisting()) {
String sourceFieldListParam = getSolrRequest().getParams().get(SOURCE_FIELD_LIST);
if (null != sourceFieldListParam) {
String[] fields = sourceFieldListParam.trim().split("[,\\s]+");
if (fields.length > 0) {
sourceFields = new HashSet<String>(Arrays.asList(fields));
sourceFields.remove(""); // Remove empty values, if any
}
}
String destinationFieldListParam = getSolrRequest().getParams().get(DESTINATION_FIELD_LIST);
if (null != destinationFieldListParam) {
String[] fields = destinationFieldListParam.trim().split("[,\\s]+");
if (fields.length > 0) {
destinationFields = new HashSet<String>(Arrays.asList(fields));
destinationFields.remove(""); // Remove empty values, if any
}
}
}
}
@Override
public Representation get() {
try {
final List<SimpleOrderedMap<Object>> props = new ArrayList<SimpleOrderedMap<Object>>();
SortedMap<String,List<CopyField>> sortedCopyFields
= new TreeMap<String, List<CopyField>>(getSchema().getCopyFieldsMap());
for (List<CopyField> copyFields : sortedCopyFields.values()) {
Collections.sort(copyFields, new Comparator<CopyField>() {
@Override
public int compare(CopyField cf1, CopyField cf2) {
// source should all be the same => already sorted
return cf1.getDestination().getName().compareTo(cf2.getDestination().getName());
}
});
for (CopyField copyField : copyFields) {
final String source = copyField.getSource().getName();
final String destination = copyField.getDestination().getName();
if ( (null == sourceFields || sourceFields.contains(source))
&& (null == destinationFields || destinationFields.contains(destination))) {
SimpleOrderedMap<Object> copyFieldProps = new SimpleOrderedMap<Object>();
copyFieldProps.add(SOURCE, source);
copyFieldProps.add(DESTINATION, destination);
if (0 != copyField.getMaxChars()) {
copyFieldProps.add(MAX_CHARS, copyField.getMaxChars());
}
props.add(copyFieldProps);
}
}
}
for (IndexSchema.DynamicCopy dynamicCopy : getSchema().getDynamicCopyFields()) {
final String source = dynamicCopy.getRegex();
final String destination = dynamicCopy.getDestFieldName();
if ( (null == sourceFields || sourceFields.contains(source))
&& (null == destinationFields || destinationFields.contains(destination))) {
SimpleOrderedMap<Object> dynamicCopyProps = new SimpleOrderedMap<Object>();
dynamicCopyProps.add(SOURCE, dynamicCopy.getRegex());
IndexSchema.DynamicField sourceDynamicBase = dynamicCopy.getSourceDynamicBase();
if (null != sourceDynamicBase) {
dynamicCopyProps.add(SOURCE_DYNAMIC_BASE, sourceDynamicBase.getRegex());
}
dynamicCopyProps.add(DESTINATION, dynamicCopy.getDestFieldName());
IndexSchema.DynamicField destDynamicBase = dynamicCopy.getDestDynamicBase();
if (null != destDynamicBase) {
dynamicCopyProps.add(DESTINATION_DYNAMIC_BASE, destDynamicBase.getRegex());
}
if (0 != dynamicCopy.getMaxChars()) {
dynamicCopyProps.add(MAX_CHARS, dynamicCopy.getMaxChars());
}
props.add(dynamicCopyProps);
}
}
getSolrResponse().add(SchemaRestApi.COPY_FIELDS, props);
} catch (Exception e) {
getSolrResponse().setException(e);
}
handlePostExecution(log);
return new SolrOutputRepresentation();
}
}

View File

@ -0,0 +1,57 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.restlet.representation.Representation;
import org.restlet.resource.ResourceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class is invoked when a request URL starts with /schema/
* but then further path elements don't match any defined resources.
*/
public class DefaultSchemaResource extends BaseSchemaResource implements GETable {
private static final Logger log = LoggerFactory.getLogger(DefaultSchemaResource.class);
public DefaultSchemaResource() {
super();
}
@Override
public void doInit() throws ResourceException {
super.doInit();
}
@Override
public Representation get() {
try {
final String path = getRequest().getOriginalRef().getPath();
final String message = "Unknown path '" + path + "'";
throw new SolrException(ErrorCode.NOT_FOUND, message);
} catch (Exception e) {
getSolrResponse().setException(e);
}
handlePostExecution(log);
return new SolrOutputRepresentation();
}
}

View File

@ -0,0 +1,95 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.SchemaField;
import org.restlet.representation.Representation;
import org.restlet.resource.ResourceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* This class responds to requests at /solr/(corename)/schema/dynamicfields
* <p/>
* To restrict the set of dynamic fields in the response, specify a comma
* and/or space separated list of dynamic field patterns in the "fl" query
* parameter.
*/
public class DynamicFieldCollectionResource extends BaseFieldResource implements GETable {
private static final Logger log = LoggerFactory.getLogger(DynamicFieldCollectionResource.class);
private final static String INTERNAL_POLY_FIELD_PREFIX = "*" + FieldType.POLY_FIELD_SEPARATOR;
public DynamicFieldCollectionResource() {
super();
}
@Override
public void doInit() throws ResourceException {
super.doInit();
}
@Override
public Representation get() {
try {
SchemaField[] dynamicFields = getSchema().getDynamicFieldPrototypes();
List<SimpleOrderedMap<Object>> props = new ArrayList<SimpleOrderedMap<Object>>(dynamicFields.length);
if (null != getRequestedFields()) {
if (0 == getRequestedFields().size()) {
String message = "Empty " + CommonParams.FL + " parameter value";
throw new SolrException(ErrorCode.BAD_REQUEST, message);
}
for (SchemaField prototype : dynamicFields) {
if (getRequestedFields().containsKey(prototype.getName())) {
getRequestedFields().put(prototype.getName(), getFieldProperties(prototype));
}
}
// Use the same order as the fl parameter
for (Map.Entry<String,SimpleOrderedMap<Object>> requestedField : getRequestedFields().entrySet()) {
SimpleOrderedMap<Object> fieldProperties = requestedField.getValue();
// Should there be some form of error condition
// if one or more of the requested fields were not found?
if (null != fieldProperties) {
props.add(fieldProperties);
}
}
} else {
for (SchemaField prototype : dynamicFields) {
// omit internal polyfields
if ( ! prototype.getName().startsWith(INTERNAL_POLY_FIELD_PREFIX)) {
props.add(getFieldProperties(prototype));
}
}
}
getSolrResponse().add(SchemaRestApi.DYNAMIC_FIELDS, props);
} catch (Exception e) {
getSolrResponse().setException(e);
}
handlePostExecution(log);
return new SolrOutputRepresentation();
}
}

View File

@ -0,0 +1,89 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.schema.SchemaField;
import org.restlet.representation.Representation;
import org.restlet.resource.ResourceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.UnsupportedEncodingException;
/**
* This class responds to requests at /solr/(corename)/schema/dynamicfields/pattern
* where pattern is a field name pattern (with an asterisk at the beginning or the end).
*/
public class DynamicFieldResource extends BaseFieldResource implements GETable {
private static final Logger log = LoggerFactory.getLogger(DynamicFieldResource.class);
private static final String DYNAMIC_FIELD = "dynamicfield";
private String fieldNamePattern;
public DynamicFieldResource() {
super();
}
/**
* Gets the field name pattern from the request attribute where it's stored by Restlet.
*/
@Override
public void doInit() throws ResourceException {
super.doInit();
if (isExisting()) {
fieldNamePattern = (String)getRequestAttributes().get(SchemaRestApi.NAME_VARIABLE);
try {
fieldNamePattern = null == fieldNamePattern ? "" : urlDecode(fieldNamePattern.trim()).trim();
} catch (UnsupportedEncodingException e) {
throw new ResourceException(e);
}
}
}
@Override
public Representation get() {
try {
if (fieldNamePattern.isEmpty()) {
final String message = "Dynamic field name is missing";
throw new SolrException(ErrorCode.BAD_REQUEST, message);
} else {
SchemaField field = null;
for (SchemaField prototype : getSchema().getDynamicFieldPrototypes()) {
if (prototype.getName().equals(fieldNamePattern)) {
field = prototype;
break;
}
}
if (null == field) {
final String message = "Dynamic field '" + fieldNamePattern + "' not found.";
throw new SolrException(ErrorCode.NOT_FOUND, message);
} else {
getSolrResponse().add(DYNAMIC_FIELD, getFieldProperties(field));
}
}
} catch (Exception e) {
getSolrResponse().setException(e);
}
handlePostExecution(log);
return new SolrOutputRepresentation();
}
}

View File

@ -0,0 +1,96 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.restlet.representation.Representation;
import org.restlet.resource.ResourceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* This class responds to requests at /solr/(corename)/schema/fields
* <p/>
* One query parameters are supported:
* <ul>
* <li>
* "fl": a comma- and/or space-separated list of fields to send properties
* for in the response, rather than the default: all of them.
* </li>
* </ul>
*/
public class FieldCollectionResource extends BaseFieldResource implements GETable {
private static final Logger log = LoggerFactory.getLogger(FieldCollectionResource.class);
public FieldCollectionResource() {
super();
}
@Override
public void doInit() throws ResourceException {
super.doInit();
}
@Override
public Representation get() {
try {
// Get all explicitly defined fields from the schema
Set<String> fieldNames = new HashSet<String>(getSchema().getFields().keySet());
final List<SimpleOrderedMap<Object>> fieldCollectionProperties = new ArrayList<SimpleOrderedMap<Object>>(fieldNames.size());
if (null == getRequestedFields()) {
for (String fieldName : fieldNames) {
fieldCollectionProperties.add(getFieldProperties(getSchema().getFieldOrNull(fieldName)));
}
} else {
if (0 == getRequestedFields().size()) {
String message = "Empty " + CommonParams.FL + " parameter value";
throw new SolrException(ErrorCode.BAD_REQUEST, message);
}
for (String field : fieldNames) {
if (getRequestedFields().containsKey(field)) {
getRequestedFields().put(field, getFieldProperties(getSchema().getFieldOrNull(field)));
}
}
// Use the same order as the fl parameter
for (SimpleOrderedMap<Object> fieldProperties : getRequestedFields().values()) {
// Should there be some form of error condition
// if one or more of the requested fields were not found?
if (null != fieldProperties) {
fieldCollectionProperties.add(fieldProperties);
}
}
}
getSolrResponse().add(SchemaRestApi.FIELDS, fieldCollectionProperties);
} catch (Exception e) {
getSolrResponse().setException(e);
}
handlePostExecution(log);
return new SolrOutputRepresentation();
}
}

View File

@ -0,0 +1,92 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.schema.SchemaField;
import org.restlet.representation.Representation;
import org.restlet.resource.ResourceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.UnsupportedEncodingException;
/**
* This class responds to requests at /solr/(corename)/schema/fields/fieldname
* where "fieldname" is the name of a field.
* <p/>
* The GET method returns properties for the given fieldname.
* The "includeDynamic" query parameter, if specified, will cause the
* dynamic field matching the given fieldname to be returned if fieldname
* is not explicitly declared in the schema.
*/
public class FieldResource extends BaseFieldResource implements GETable {
private static final Logger log = LoggerFactory.getLogger(FieldResource.class);
private static final String FIELD = "field";
private boolean includeDynamic;
private String fieldName;
public FieldResource() {
super();
}
@Override
public void doInit() throws ResourceException {
super.doInit();
if (isExisting()) {
includeDynamic = getSolrRequest().getParams().getBool(INCLUDE_DYNAMIC_PARAM, false);
fieldName = (String)getRequestAttributes().get(SchemaRestApi.NAME_VARIABLE);
try {
fieldName = null == fieldName ? "" : urlDecode(fieldName.trim()).trim();
} catch (UnsupportedEncodingException e) {
throw new ResourceException(e);
}
}
}
@Override
public Representation get() {
try {
if (fieldName.isEmpty()) {
final String message = "Field name is missing";
throw new SolrException(ErrorCode.BAD_REQUEST, message);
} else {
SchemaField field;
if (includeDynamic) {
field = getSchema().getFieldOrNull(fieldName);
} else {
// Don't look for matches among dynamic fields
field = getSchema().getFields().get(fieldName);
}
if (null == field) {
final String message = "Field '" + fieldName + "' not found.";
throw new SolrException(ErrorCode.NOT_FOUND, message);
} else {
getSolrResponse().add(FIELD, getFieldProperties(field));
}
}
} catch (Exception e) {
getSolrResponse().setException(e);
}
handlePostExecution(log);
return new SolrOutputRepresentation();
}
}

View File

@ -0,0 +1,135 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.SchemaField;
import org.restlet.representation.Representation;
import org.restlet.resource.ResourceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
/**
* This class responds to requests at /solr/(corename)/schema/fieldtypes
*
* The GET method returns properties for all field types defined in the schema.
*/
public class FieldTypeCollectionResource extends BaseFieldTypeResource implements GETable {
private static final Logger log = LoggerFactory.getLogger(FieldTypeCollectionResource.class);
private static final String FIELD_TYPES = "fieldTypes";
private Map<String,List<String>> fieldsByFieldType;
private Map<String,List<String>> dynamicFieldsByFieldType;
public FieldTypeCollectionResource() {
super();
}
@Override
public void doInit() throws ResourceException {
super.doInit();
if (isExisting()) {
fieldsByFieldType = getFieldsByFieldType();
dynamicFieldsByFieldType = getDynamicFieldsByFieldType();
}
}
@Override
public Representation get() {
try {
List<SimpleOrderedMap<Object>> props = new ArrayList<SimpleOrderedMap<Object>>();
Map<String,FieldType> sortedFieldTypes = new TreeMap<String, FieldType>(getSchema().getFieldTypes());
for (FieldType fieldType : sortedFieldTypes.values()) {
props.add(getFieldTypeProperties(fieldType));
}
getSolrResponse().add(FIELD_TYPES, props);
} catch (Exception e) {
getSolrResponse().setException(e);
}
handlePostExecution(log);
return new SolrOutputRepresentation();
}
/** Returns field lists from the map constructed in doInit() */
@Override
protected List<String> getFieldsWithFieldType(FieldType fieldType) {
List<String> fields = fieldsByFieldType.get(fieldType.getTypeName());
if (null == fields) {
fields = Collections.emptyList();
}
return fields;
}
/** Returns dynamic field lists from the map constructed in doInit() */
@Override
protected List<String> getDynamicFieldsWithFieldType(FieldType fieldType) {
List<String> dynamicFields = dynamicFieldsByFieldType.get(fieldType.getTypeName());
if (null == dynamicFields) {
dynamicFields = Collections.emptyList();
}
return dynamicFields;
}
/**
* Returns a map from field type names to a sorted list of fields that use the field type.
* The map only includes field types that are used by at least one field.
*/
private Map<String,List<String>> getFieldsByFieldType() {
Map<String,List<String>> fieldsByFieldType = new HashMap<String,List<String>>();
for (SchemaField schemaField : getSchema().getFields().values()) {
final String fieldType = schemaField.getType().getTypeName();
List<String> fields = fieldsByFieldType.get(fieldType);
if (null == fields) {
fields = new ArrayList<String>();
fieldsByFieldType.put(fieldType, fields);
}
fields.add(schemaField.getName());
}
for (List<String> fields : fieldsByFieldType.values()) {
Collections.sort(fields);
}
return fieldsByFieldType;
}
/**
* Returns a map from field type names to a list of dynamic fields that use the field type.
* The map only includes field types that are used by at least one dynamic field.
*/
private Map<String,List<String>> getDynamicFieldsByFieldType() {
Map<String,List<String>> dynamicFieldsByFieldType = new HashMap<String,List<String>>();
for (SchemaField schemaField : getSchema().getDynamicFieldPrototypes()) {
final String fieldType = schemaField.getType().getTypeName();
List<String> dynamicFields = dynamicFieldsByFieldType.get(fieldType);
if (null == dynamicFields) {
dynamicFields = new ArrayList<String>();
dynamicFieldsByFieldType.put(fieldType, dynamicFields);
}
dynamicFields.add(schemaField.getName());
}
return dynamicFieldsByFieldType;
}
}

View File

@ -0,0 +1,114 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.SchemaField;
import org.restlet.representation.Representation;
import org.restlet.resource.ResourceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* This class responds to requests at /solr/(corename)/schema/fieldtype/typename
* where "typename" is the name of a field type in the schema.
*
* The GET method returns properties for the named field type.
*/
public class FieldTypeResource extends BaseFieldTypeResource implements GETable {
private static final Logger log = LoggerFactory.getLogger(FieldTypeResource.class);
private static final String FIELD_TYPE = "fieldType";
private String typeName;
public FieldTypeResource() {
super();
}
@Override
public void doInit() throws ResourceException {
super.doInit();
if (isExisting()) {
typeName = (String)getRequestAttributes().get(SchemaRestApi.NAME_VARIABLE);
try {
typeName = null == typeName ? "" : urlDecode(typeName.trim()).trim();
} catch (UnsupportedEncodingException e) {
throw new ResourceException(e);
}
}
}
@Override
public Representation get() {
try {
if (typeName.isEmpty()) {
final String message = "Field type name is missing";
throw new SolrException(ErrorCode.BAD_REQUEST, message);
} else {
FieldType fieldType = getSchema().getFieldTypes().get(typeName);
if (null == fieldType) {
final String message = "Field type '" + typeName + "' not found.";
throw new SolrException(ErrorCode.NOT_FOUND, message);
}
getSolrResponse().add(FIELD_TYPE, getFieldTypeProperties(fieldType));
}
} catch (Exception e) {
getSolrResponse().setException(e);
}
handlePostExecution(log);
return new SolrOutputRepresentation();
}
/**
* Returns a field list using the given field type by iterating over all fields
* defined in the schema.
*/
@Override
protected List<String> getFieldsWithFieldType(FieldType fieldType) {
List<String> fields = new ArrayList<String>();
for (SchemaField schemaField : getSchema().getFields().values()) {
if (schemaField.getType().getTypeName().equals(fieldType.getTypeName())) {
fields.add(schemaField.getName());
}
}
Collections.sort(fields);
return fields;
}
/**
* Returns a dynamic field list using the given field type by iterating over all
* dynamic fields defined in the schema.
*/
@Override
protected List<String> getDynamicFieldsWithFieldType(FieldType fieldType) {
List<String> dynamicFields = new ArrayList<String>();
for (SchemaField prototype : getSchema().getDynamicFieldPrototypes()) {
if (prototype.getType().getTypeName().equals(fieldType.getTypeName())) {
dynamicFields.add(prototype.getName());
}
}
return dynamicFields; // Don't sort these - they're matched in order
}
}

View File

@ -0,0 +1,27 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.restlet.representation.Representation;
import org.restlet.resource.Get;
/** Marker interface for resource classes that handle GET requests. */
public interface GETable {
@Get
public Representation get();
}

View File

@ -0,0 +1,84 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.restlet.Application;
import org.restlet.Restlet;
import org.restlet.routing.Router;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SchemaRestApi extends Application {
public static final Logger log = LoggerFactory.getLogger(SchemaRestApi.class);
public static final String FIELDS = "fields";
public static final String FIELDS_PATH = "/" + FIELDS;
public static final String DYNAMIC_FIELDS = "dynamicfields";
public static final String DYNAMIC_FIELDS_PATH = "/" + DYNAMIC_FIELDS;
public static final String FIELDTYPES = "fieldtypes";
public static final String FIELDTYPES_PATH = "/" + FIELDTYPES;
public static final String NAME_VARIABLE = "name";
public static final String NAME_SEGMENT = "/{" + NAME_VARIABLE + "}";
public static final String COPY_FIELDS = "copyfields";
public static final String COPY_FIELDS_PATH = "/" + COPY_FIELDS;
private Router router;
public SchemaRestApi() {
router = new Router(getContext());
}
@Override
public void stop() throws Exception {
if (router != null) {
router.stop();
}
}
/**
* Bind URL paths to the appropriate ServerResource subclass.
*/
@Override
public synchronized Restlet createInboundRoot() {
log.info("createInboundRoot started");
router.attachDefault(DefaultSchemaResource.class);
router.attach(FIELDS_PATH, FieldCollectionResource.class);
// Allow a trailing slash on collection requests
router.attach(FIELDS_PATH + "/", FieldCollectionResource.class);
router.attach(FIELDS_PATH + NAME_SEGMENT, FieldResource.class);
router.attach(DYNAMIC_FIELDS_PATH, DynamicFieldCollectionResource.class);
// Allow a trailing slash on collection requests
router.attach(DYNAMIC_FIELDS_PATH + "/", DynamicFieldCollectionResource.class);
router.attach(DYNAMIC_FIELDS_PATH + NAME_SEGMENT, DynamicFieldResource.class);
router.attach(FIELDTYPES_PATH, FieldTypeCollectionResource.class);
// Allow a trailing slash on collection requests
router.attach(FIELDTYPES_PATH + "/", FieldTypeCollectionResource.class);
router.attach(FIELDTYPES_PATH + NAME_SEGMENT, FieldTypeResource.class);
router.attach(COPY_FIELDS_PATH, CopyFieldCollectionResource.class);
// Allow a trailing slash on collection requests
router.attach(COPY_FIELDS_PATH + "/", CopyFieldCollectionResource.class);
log.info("createInboundRoot complete");
return router;
}
}

View File

@ -0,0 +1,27 @@
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
<p>
Provides RESTful API access to the Solr Schema using Restlet.
</p>
</body>
</html>

View File

@ -89,7 +89,7 @@ public abstract class AbstractSubTypeFieldType extends FieldType implements Sche
int p = SchemaField.calcProps(name, type, props);
SchemaField proto = SchemaField.create(name,
type, p, null);
schema.registerDynamicField(proto);
schema.registerDynamicFields(proto);
return proto;
}

View File

@ -205,7 +205,7 @@ public class CurrencyField extends FieldType implements SchemaAware, ResourceLoa
props.put("multiValued", "false");
props.put("omitNorms", "true");
int p = SchemaField.calcProps(name, type, props);
schema.registerDynamicField(SchemaField.create(name, type, p, null));
schema.registerDynamicFields(SchemaField.create(name, type, p, null));
}
/**

View File

@ -19,17 +19,23 @@ package org.apache.solr.schema;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.util.CharFilterFactory;
import org.apache.lucene.analysis.util.TokenFilterFactory;
import org.apache.lucene.analysis.util.TokenizerFactory;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.index.Term;
@ -49,8 +55,12 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.UnicodeUtil;
import org.apache.solr.analysis.SolrAnalyzer;
import org.apache.solr.analysis.TokenizerChain;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser;
import org.apache.solr.search.Sorting;
@ -82,6 +92,8 @@ public abstract class FieldType extends FieldProperties {
/** properties explicitly set to false */
protected int falseProperties;
protected int properties;
private boolean isExplicitQueryAnalyzer;
private boolean isExplicitAnalyzer;
/** Returns true if fields of this type should be tokenized */
@ -138,7 +150,7 @@ public abstract class FieldType extends FieldProperties {
args.remove("compressThreshold");
}
this.args=args;
this.args = Collections.unmodifiableMap(args);
Map<String,String> initArgs = new HashMap<String,String>(args);
trueProperties = FieldProperties.parseProperties(initArgs,true);
@ -151,25 +163,25 @@ public abstract class FieldType extends FieldProperties {
init(schema, initArgs);
String positionInc = initArgs.get("positionIncrementGap");
String positionInc = initArgs.get(POSITION_INCREMENT_GAP);
if (positionInc != null) {
Analyzer analyzer = getAnalyzer();
if (analyzer instanceof SolrAnalyzer) {
((SolrAnalyzer)analyzer).setPositionIncrementGap(Integer.parseInt(positionInc));
} else {
throw new RuntimeException("Can't set positionIncrementGap on custom analyzer " + analyzer.getClass());
throw new RuntimeException("Can't set " + POSITION_INCREMENT_GAP + " on custom analyzer " + analyzer.getClass());
}
analyzer = getQueryAnalyzer();
if (analyzer instanceof SolrAnalyzer) {
((SolrAnalyzer)analyzer).setPositionIncrementGap(Integer.parseInt(positionInc));
} else {
throw new RuntimeException("Can't set positionIncrementGap on custom analyzer " + analyzer.getClass());
throw new RuntimeException("Can't set " + POSITION_INCREMENT_GAP + " on custom analyzer " + analyzer.getClass());
}
initArgs.remove("positionIncrementGap");
initArgs.remove(POSITION_INCREMENT_GAP);
}
this.postingsFormat = initArgs.remove("postingsFormat");
this.docValuesFormat = initArgs.remove("docValuesFormat");
this.postingsFormat = initArgs.remove(POSTINGS_FORMAT);
this.docValuesFormat = initArgs.remove(DOC_VALUES_FORMAT);
if (initArgs.size() > 0) {
throw new RuntimeException("schema fieldtype " + typeName
@ -385,6 +397,22 @@ public abstract class FieldType extends FieldProperties {
UnicodeUtil.UTF16toUTF8(internal, 0, internal.length(), result);
}
public void setIsExplicitQueryAnalyzer(boolean isExplicitQueryAnalyzer) {
this.isExplicitQueryAnalyzer = isExplicitQueryAnalyzer;
}
public boolean isExplicitQueryAnalyzer() {
return isExplicitQueryAnalyzer;
}
public void setIsExplicitAnalyzer(boolean explicitAnalyzer) {
isExplicitAnalyzer = explicitAnalyzer;
}
public boolean isExplicitAnalyzer() {
return isExplicitAnalyzer;
}
/**
* Default analyzer for types that only produce 1 verbatim token...
* A maximum size of chars to be read must be specified
@ -499,6 +527,9 @@ public abstract class FieldType extends FieldProperties {
" (" + typeName + ") does not support specifying an analyzer");
}
/** @lucene.internal */
protected SimilarityFactory similarityFactory;
/** @lucene.internal */
protected Similarity similarity;
@ -516,6 +547,21 @@ public abstract class FieldType extends FieldProperties {
return similarity;
}
/**
* Gets the factory for the Similarity used when scoring fields of this type
*
* <p>
* The default implementation returns null, which means this type
* has no custom similarity factory associated with it.
* </p>
*
* @lucene.internal
*/
public SimilarityFactory getSimilarityFactory() {
return similarityFactory;
}
/** Return the numeric type of this field, or null if this field is not a
* numeric field. */
public org.apache.lucene.document.FieldType.NumericType getNumericType() {
@ -526,8 +572,9 @@ public abstract class FieldType extends FieldProperties {
* Sets the Similarity used when scoring fields of this type
* @lucene.internal
*/
public void setSimilarity(Similarity similarity) {
this.similarity = similarity;
public void setSimilarity(SimilarityFactory similarityFactory) {
this.similarityFactory = similarityFactory;
this.similarity = similarityFactory.getSimilarity();
}
/**
@ -675,4 +722,188 @@ public abstract class FieldType extends FieldProperties {
}
}
private static final String TYPE_NAME = "name";
private static final String CLASS_NAME = "class";
private static final String ANALYZER = "analyzer";
private static final String INDEX_ANALYZER = "indexAnalyzer";
private static final String QUERY_ANALYZER = "queryAnalyzer";
private static final String MULTI_TERM_ANALYZER = "multiTermAnalyzer";
private static final String SIMILARITY = "similarity";
private static final String POSTINGS_FORMAT = "postingsFormat";
private static final String DOC_VALUES_FORMAT = "docValuesFormat";
private static final String AUTO_GENERATE_PHRASE_QUERIES = "autoGeneratePhraseQueries";
private static final String ARGS = "args";
private static final String CHAR_FILTERS = "charFilters";
private static final String TOKENIZER = "tokenizer";
private static final String FILTERS = "filters";
private static final String POSITION_INCREMENT_GAP = "positionIncrementGap";
/**
* Get a map of property name -> value for this field type.
* @param showDefaults if true, include default properties.
*/
public SimpleOrderedMap<Object> getNamedPropertyValues(boolean showDefaults) {
SimpleOrderedMap<Object> namedPropertyValues = new SimpleOrderedMap<Object>();
namedPropertyValues.add(TYPE_NAME, getTypeName());
namedPropertyValues.add(CLASS_NAME, normalizeSPIname(getClass().getName()));
if (showDefaults) {
Map<String,String> fieldTypeArgs = getNonFieldPropertyArgs();
if (null != fieldTypeArgs) {
for (String key : fieldTypeArgs.keySet()) {
namedPropertyValues.add(key, fieldTypeArgs.get(key));
}
}
if (this instanceof TextField) {
namedPropertyValues.add(AUTO_GENERATE_PHRASE_QUERIES, ((TextField) this).getAutoGeneratePhraseQueries());
}
namedPropertyValues.add(getPropertyName(INDEXED), hasProperty(INDEXED));
namedPropertyValues.add(getPropertyName(STORED), hasProperty(STORED));
namedPropertyValues.add(getPropertyName(DOC_VALUES), hasProperty(DOC_VALUES));
namedPropertyValues.add(getPropertyName(STORE_TERMVECTORS), hasProperty(STORE_TERMVECTORS));
namedPropertyValues.add(getPropertyName(STORE_TERMPOSITIONS), hasProperty(STORE_TERMPOSITIONS));
namedPropertyValues.add(getPropertyName(STORE_TERMOFFSETS), hasProperty(STORE_TERMOFFSETS));
namedPropertyValues.add(getPropertyName(OMIT_NORMS), hasProperty(OMIT_NORMS));
namedPropertyValues.add(getPropertyName(OMIT_TF_POSITIONS), hasProperty(OMIT_TF_POSITIONS));
namedPropertyValues.add(getPropertyName(OMIT_POSITIONS), hasProperty(OMIT_POSITIONS));
namedPropertyValues.add(getPropertyName(STORE_OFFSETS), hasProperty(STORE_OFFSETS));
namedPropertyValues.add(getPropertyName(MULTIVALUED), hasProperty(MULTIVALUED));
if (hasProperty(SORT_MISSING_FIRST)) {
namedPropertyValues.add(getPropertyName(SORT_MISSING_FIRST), true);
} else if (hasProperty(SORT_MISSING_LAST)) {
namedPropertyValues.add(getPropertyName(SORT_MISSING_LAST), true);
}
namedPropertyValues.add(getPropertyName(TOKENIZED), isTokenized());
// The BINARY property is always false
// namedPropertyValues.add(getPropertyName(BINARY), hasProperty(BINARY));
} else { // Don't show defaults
Set<String> fieldProperties = new HashSet<String>();
for (String propertyName : FieldProperties.propertyNames) {
fieldProperties.add(propertyName);
}
for (String key : args.keySet()) {
if (fieldProperties.contains(key)) {
namedPropertyValues.add(key, StrUtils.parseBool(args.get(key)));
} else {
namedPropertyValues.add(key, args.get(key));
}
}
}
if (isExplicitAnalyzer()) {
String analyzerProperty = isExplicitQueryAnalyzer() ? INDEX_ANALYZER : ANALYZER;
namedPropertyValues.add(analyzerProperty, getAnalyzerProperties(getAnalyzer()));
}
if (isExplicitQueryAnalyzer()) {
String analyzerProperty = isExplicitAnalyzer() ? QUERY_ANALYZER : ANALYZER;
namedPropertyValues.add(analyzerProperty, getAnalyzerProperties(getQueryAnalyzer()));
}
if (this instanceof TextField) {
if (((TextField)this).isExplicitMultiTermAnalyzer()) {
namedPropertyValues.add(MULTI_TERM_ANALYZER, getAnalyzerProperties(((TextField) this).getMultiTermAnalyzer()));
}
}
if (null != getSimilarity()) {
namedPropertyValues.add(SIMILARITY, getSimilarityProperties());
}
if (null != getPostingsFormat()) {
namedPropertyValues.add(POSTINGS_FORMAT, getPostingsFormat());
}
if (null != getDocValuesFormat()) {
namedPropertyValues.add(DOC_VALUES_FORMAT, getDocValuesFormat());
}
return namedPropertyValues;
}
/** Returns args to this field type that aren't standard field properties */
protected Map<String,String> getNonFieldPropertyArgs() {
Map<String,String> initArgs = new HashMap<String,String>(args);
for (String prop : FieldProperties.propertyNames) {
initArgs.remove(prop);
}
return initArgs;
}
/**
* Returns a description of the given analyzer, by either reporting the Analyzer name
* if it's not a TokenizerChain, or if it is, querying each analysis factory for its
* name and args.
*/
protected static SimpleOrderedMap<Object> getAnalyzerProperties(Analyzer analyzer) {
SimpleOrderedMap<Object> analyzerProps = new SimpleOrderedMap<Object>();
analyzerProps.add(CLASS_NAME, normalizeSPIname(analyzer.getClass().getName()));
if (analyzer instanceof TokenizerChain) {
Map<String,String> factoryArgs;
TokenizerChain tokenizerChain = (TokenizerChain)analyzer;
CharFilterFactory[] charFilterFactories = tokenizerChain.getCharFilterFactories();
if (null != charFilterFactories && charFilterFactories.length > 0) {
List<SimpleOrderedMap<Object>> charFilterProps = new ArrayList<SimpleOrderedMap<Object>>();
for (CharFilterFactory charFilterFactory : charFilterFactories) {
SimpleOrderedMap<Object> props = new SimpleOrderedMap<Object>();
props.add(CLASS_NAME, normalizeSPIname(charFilterFactory.getClass().getName()));
factoryArgs = charFilterFactory.getOriginalArgs();
if (null != factoryArgs) {
for (String key : factoryArgs.keySet()) {
props.add(key, factoryArgs.get(key));
}
}
charFilterProps.add(props);
}
analyzerProps.add(CHAR_FILTERS, charFilterProps);
}
SimpleOrderedMap<Object> tokenizerProps = new SimpleOrderedMap<Object>();
TokenizerFactory tokenizerFactory = tokenizerChain.getTokenizerFactory();
tokenizerProps.add(CLASS_NAME, normalizeSPIname(tokenizerFactory.getClass().getName()));
factoryArgs = tokenizerFactory.getOriginalArgs();
if (null != factoryArgs) {
for (String key : factoryArgs.keySet()) {
tokenizerProps.add(key, factoryArgs.get(key));
}
}
analyzerProps.add(TOKENIZER, tokenizerProps);
TokenFilterFactory[] filterFactories = tokenizerChain.getTokenFilterFactories();
if (null != filterFactories && filterFactories.length > 0) {
List<SimpleOrderedMap<Object>> filterProps = new ArrayList<SimpleOrderedMap<Object>>();
for (TokenFilterFactory filterFactory : filterFactories) {
SimpleOrderedMap<Object> props = new SimpleOrderedMap<Object>();
props.add(CLASS_NAME, normalizeSPIname(filterFactory.getClass().getName()));
factoryArgs = filterFactory.getOriginalArgs();
if (null != factoryArgs) {
for (String key : factoryArgs.keySet()) {
props.add(key, factoryArgs.get(key));
}
}
filterProps.add(props);
}
analyzerProps.add(FILTERS, filterProps);
}
}
return analyzerProps;
}
private static String normalizeSPIname(String fullyQualifiedName) {
if (fullyQualifiedName.startsWith("org.apache.lucene.") || fullyQualifiedName.startsWith("org.apache.solr.")) {
return "solr" + fullyQualifiedName.substring(fullyQualifiedName.lastIndexOf('.'));
}
return fullyQualifiedName;
}
/** Returns a description of this field's similarity, if any */
protected SimpleOrderedMap<Object> getSimilarityProperties() {
SimpleOrderedMap<Object> props = new SimpleOrderedMap<Object>();
if (similarity != null) {
props.add(CLASS_NAME, normalizeSPIname(similarity.getClass().getName()));
SolrParams factoryParams = similarityFactory.getParams();
if (null != factoryParams) {
Iterator<String> iter = factoryParams.getParameterNamesIterator();
while (iter.hasNext()) {
String key = iter.next();
props.add(key, factoryParams.get(key));
}
}
}
return props;
}
}

View File

@ -98,20 +98,35 @@ public final class FieldTypePluginLoader
expression = "./similarity";
anode = (Node)xpath.evaluate(expression, node, XPathConstants.NODE);
SimilarityFactory simFactory = IndexSchema.readSimilarity(loader, anode);
if (queryAnalyzer==null) queryAnalyzer=analyzer;
if (analyzer==null) analyzer=queryAnalyzer;
if (multiAnalyzer == null) {
multiAnalyzer = constructMultiTermAnalyzer(queryAnalyzer);
if (null != simFactory) {
ft.setSimilarity(simFactory);
}
if (analyzer!=null) {
if (null == queryAnalyzer) {
queryAnalyzer = analyzer;
ft.setIsExplicitQueryAnalyzer(false);
} else {
ft.setIsExplicitQueryAnalyzer(true);
}
if (null == analyzer) {
analyzer = queryAnalyzer;
ft.setIsExplicitAnalyzer(false);
} else {
ft.setIsExplicitAnalyzer(true);
}
if (null != analyzer) {
ft.setAnalyzer(analyzer);
ft.setQueryAnalyzer(queryAnalyzer);
if (ft instanceof TextField)
if (ft instanceof TextField) {
if (null == multiAnalyzer) {
multiAnalyzer = constructMultiTermAnalyzer(queryAnalyzer);
((TextField)ft).setIsExplicitMultiTermAnalyzer(false);
} else {
((TextField)ft).setIsExplicitMultiTermAnalyzer(true);
}
((TextField)ft).setMultiTermAnalyzer(multiAnalyzer);
}
if (simFactory!=null) {
ft.setSimilarity(simFactory.getSimilarity());
}
if (ft instanceof SchemaAware){
schemaAware.add((SchemaAware) ft);

View File

@ -24,8 +24,8 @@ import org.apache.lucene.index.StorableField;
import org.apache.lucene.index.StoredDocument;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.util.Version;
import org.apache.lucene.analysis.util.ResourceLoader;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.util.DOMUtil;
import org.apache.solr.util.SystemIdResolver;
@ -34,13 +34,17 @@ import org.apache.solr.core.Config;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.search.similarities.DefaultSimilarityFactory;
import org.apache.solr.util.plugin.SolrCoreAware;
import org.w3c.dom.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
@ -77,6 +81,7 @@ public final class IndexSchema {
private final List<SchemaField> fieldsWithDefaultValue = new ArrayList<SchemaField>();
private final Collection<SchemaField> requiredFields = new HashSet<SchemaField>();
private DynamicField[] dynamicFields;
public DynamicField[] getDynamicFields() { return dynamicFields; }
private Analyzer analyzer;
private Analyzer queryAnalyzer;
@ -86,13 +91,16 @@ public final class IndexSchema {
private final Map<String, List<CopyField>> copyFieldsMap = new HashMap<String, List<CopyField>>();
public Map<String,List<CopyField>> getCopyFieldsMap() { return Collections.unmodifiableMap(copyFieldsMap); }
private DynamicCopy[] dynamicCopyFields;
public DynamicCopy[] getDynamicCopyFields() { return dynamicCopyFields; }
/**
* keys are all fields copied to, count is num of copyField
* directives that target them.
*/
private Map<SchemaField, Integer> copyFieldTargetCounts
= new HashMap<SchemaField, Integer>();
private Map<SchemaField, Integer> copyFieldTargetCounts = new HashMap<SchemaField, Integer>();
/**
* Constructs a schema using the specified resource name and stream.
@ -122,8 +130,7 @@ public final class IndexSchema {
/**
* @since solr 1.4
*/
public SolrResourceLoader getResourceLoader()
{
public SolrResourceLoader getResourceLoader() {
return loader;
}
@ -209,8 +216,6 @@ public final class IndexSchema {
*/
public Analyzer getAnalyzer() { return analyzer; }
/**
* Returns the Analyzer used when searching this index
*
@ -287,8 +292,7 @@ public final class IndexSchema {
*
* @since solr 1.3
*/
public void refreshAnalyzers()
{
public void refreshAnalyzers() {
analyzer = new SolrIndexAnalyzer();
queryAnalyzer = new SolrQueryAnalyzer();
}
@ -389,7 +393,7 @@ public final class IndexSchema {
FieldType ft = fieldTypes.get(type);
if (ft==null) {
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"Unknown fieldtype '" + type + "' specified on field " + name);
throw new SolrException(ErrorCode.BAD_REQUEST,"Unknown fieldtype '" + type + "' specified on field " + name);
}
Map<String,String> args = DOMUtil.toMapExcept(attrs, "name", "type");
@ -404,7 +408,7 @@ public final class IndexSchema {
if( old != null ) {
String msg = "[schema.xml] Duplicate field definition for '"
+ f.getName() + "' [[["+old.toString()+"]]] and [[["+f.toString()+"]]]";
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg );
throw new SolrException(ErrorCode.SERVER_ERROR, msg );
}
log.debug("field defined: " + f);
if( f.getDefaultValue() != null ) {
@ -416,8 +420,14 @@ public final class IndexSchema {
requiredFields.add(f);
}
} else if (node.getNodeName().equals("dynamicField")) {
if (isValidDynamicFieldName(name)) {
// make sure nothing else has the same path
addDynamicField(dFields, f);
} else {
String msg = "Dynamic field name '" + name
+ "' should have either a leading or a trailing asterisk, and no others.";
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
}
} else {
// we should never get here
throw new RuntimeException("Unknown field type");
@ -454,7 +464,7 @@ public final class IndexSchema {
if (null != ft.getSimilarity()) {
String msg = "FieldType '" + ft.getTypeName() + "' is configured with a similarity, but the global similarity does not support it: " + simFactory.getClass();
log.error(msg);
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg);
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
}
}
}
@ -470,7 +480,7 @@ public final class IndexSchema {
SchemaField defaultSearchField = getFields().get(defaultSearchFieldName);
if ((defaultSearchField == null) || !defaultSearchField.indexed()) {
String msg = "default search field '" + defaultSearchFieldName + "' not defined or not indexed" ;
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, msg );
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
}
}
log.info("default search field in schema is "+defaultSearchFieldName);
@ -494,7 +504,7 @@ public final class IndexSchema {
") can not be configured with a default value ("+
uniqueKeyField.getDefaultValue()+")";
log.error(msg);
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, msg );
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
}
if (!uniqueKeyField.stored()) {
@ -504,7 +514,7 @@ public final class IndexSchema {
String msg = "uniqueKey field ("+uniqueKeyFieldName+
") can not be configured to be multivalued";
log.error(msg);
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, msg );
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
}
uniqueKeyFieldName=uniqueKeyField.getName();
uniqueKeyFieldType=uniqueKeyField.getType();
@ -546,7 +556,7 @@ public final class IndexSchema {
String msg = "uniqueKey field ("+uniqueKeyFieldName+
") can not be the dest of a copyField (src="+source+")";
log.error(msg);
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg);
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
}
@ -570,38 +580,44 @@ public final class IndexSchema {
throw e;
} catch(Exception e) {
// unexpected exception...
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,"Schema Parsing Failed: " + e.getMessage(), e);
throw new SolrException(ErrorCode.SERVER_ERROR, "Schema Parsing Failed: " + e.getMessage(), e);
}
// create the field analyzers
refreshAnalyzers();
}
/** Returns true if the given name has exactly one asterisk either at the start or end of the name */
private boolean isValidDynamicFieldName(String name) {
if (name.startsWith("*") || name.endsWith("*")) {
int count = 0;
for (int pos = 0 ; pos < name.length() && -1 != (pos = name.indexOf('*', pos)) ; ++pos) ++count;
if (1 == count) return true;
}
return false;
}
private void addDynamicField(List<DynamicField> dFields, SchemaField f) {
boolean dup = isDuplicateDynField(dFields, f);
if( !dup ) {
addDynamicFieldNoDupCheck(dFields, f);
if (isDuplicateDynField(dFields, f)) {
String msg = "[schema.xml] Duplicate DynamicField definition for '" + f.getName() + "'";
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
} else {
String msg = "[schema.xml] Duplicate DynamicField definition for '"
+ f.getName() + "'";
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg);
addDynamicFieldNoDupCheck(dFields, f);
}
}
/**
* Register one or more new Dynamic Field with the Schema.
* @param f The {@link org.apache.solr.schema.SchemaField}
* Register one or more new Dynamic Fields with the Schema.
* @param fields The sequence of {@link org.apache.solr.schema.SchemaField}
*/
public void registerDynamicField(SchemaField ... f) {
public void registerDynamicFields(SchemaField... fields) {
List<DynamicField> dynFields = new ArrayList<DynamicField>(Arrays.asList(dynamicFields));
for (SchemaField field : f) {
if (isDuplicateDynField(dynFields, field) == false) {
for (SchemaField field : fields) {
if (isDuplicateDynField(dynFields, field)) {
log.debug("dynamic field already exists: dynamic field: [" + field.getName() + "]");
} else {
log.debug("dynamic field creation for schema field: " + field.getName());
addDynamicFieldNoDupCheck(dynFields, field);
} else {
log.debug("dynamic field already exists: dynamic field: [" + field.getName() + "]");
}
}
Collections.sort(dynFields);
@ -615,13 +631,12 @@ public final class IndexSchema {
private boolean isDuplicateDynField(List<DynamicField> dFields, SchemaField f) {
for (DynamicField df : dFields) {
if( df.regex.equals( f.name ) ) return true;
if (df.getRegex().equals(f.name)) return true;
}
return false;
}
public void registerCopyField( String source, String dest )
{
public void registerCopyField( String source, String dest ) {
registerCopyField(source, dest, CopyField.UNLIMITED);
}
@ -634,56 +649,87 @@ public final class IndexSchema {
*
* @see SolrCoreAware
*/
public void registerCopyField( String source, String dest, int maxChars )
{
boolean sourceIsPattern = isWildCard(source);
boolean destIsPattern = isWildCard(dest);
public void registerCopyField(String source, String dest, int maxChars) {
log.debug("copyField source='" + source + "' dest='" + dest + "' maxChars=" + maxChars);
log.debug("copyField source='"+source+"' dest='"+dest+"' maxChars='"+maxChars);
SchemaField d = getFieldOrNull(dest);
if(d == null){
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "copyField destination :'"+dest+"' does not exist" );
}
DynamicField destDynamicField = null;
SchemaField destSchemaField = fields.get(dest);
SchemaField sourceSchemaField = fields.get(source);
if(sourceIsPattern) {
if( destIsPattern ) {
DynamicField df = null;
for( DynamicField dd : dynamicFields ) {
if( dd.regex.equals( dest ) ) {
df = dd;
break;
}
}
if( df == null ) {
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, "copyField dynamic destination must match a dynamicField." );
}
registerDynamicCopyField(new DynamicDestCopy(source, df, maxChars ));
}
else {
registerDynamicCopyField(new DynamicCopy(source, d, maxChars));
}
}
else if( destIsPattern ) {
String msg = "copyField only supports a dynamic destination if the source is also dynamic" ;
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR, msg );
}
else {
// retrieve the field to force an exception if it doesn't exist
SchemaField f = getField(source);
DynamicField sourceDynamicBase = null;
DynamicField destDynamicBase = null;
boolean sourceIsDynamicFieldReference = false;
if (null == destSchemaField || null == sourceSchemaField) {
// Go through dynamicFields array only once, collecting info for both source and dest fields, if needed
for (DynamicField dynamicField : dynamicFields) {
if (null == sourceSchemaField && ! sourceIsDynamicFieldReference) {
if (dynamicField.matches(source)) {
sourceIsDynamicFieldReference = true;
if ( ! source.equals(dynamicField.getRegex())) {
sourceDynamicBase = dynamicField;
}
}
}
if (null == destSchemaField) {
if (dest.equals(dynamicField.getRegex())) {
destDynamicField = dynamicField;
destSchemaField = dynamicField.prototype;
} else if (dynamicField.matches(dest)) {
destSchemaField = dynamicField.makeSchemaField(dest);
destDynamicField = new DynamicField(destSchemaField);
destDynamicBase = dynamicField;
}
}
if (null != destSchemaField && (null != sourceSchemaField || sourceIsDynamicFieldReference)) break;
}
}
if (null == sourceSchemaField && ! sourceIsDynamicFieldReference) {
String msg = "copyField source :'" + source + "' is not an explicit field and doesn't match a dynamicField.";
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
}
if (null == destSchemaField) {
String msg = "copyField dest :'" + dest + "' is not an explicit field and doesn't match a dynamicField.";
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
}
if (sourceIsDynamicFieldReference) {
if (null != destDynamicField) { // source & dest: dynamic field references
registerDynamicCopyField(new DynamicCopy(source, destDynamicField, maxChars, sourceDynamicBase, destDynamicBase));
incrementCopyFieldTargetCount(destSchemaField);
} else { // source: dynamic field reference; dest: explicit field
destDynamicField = new DynamicField(destSchemaField);
registerDynamicCopyField(new DynamicCopy(source, destDynamicField, maxChars, sourceDynamicBase, null));
incrementCopyFieldTargetCount(destSchemaField);
}
} else {
if (null != destDynamicField) { // source: explicit field; dest: dynamic field reference
if (destDynamicField.pattern instanceof DynamicReplacement.DynamicPattern.NameEquals) {
// Dynamic dest with no asterisk is acceptable
registerDynamicCopyField(new DynamicCopy(source, destDynamicField, maxChars, sourceDynamicBase, destDynamicBase));
incrementCopyFieldTargetCount(destSchemaField);
} else {
String msg = "copyField only supports a dynamic destination with an asterisk "
+ "if the source is also dynamic with an asterisk";
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
}
} else { // source & dest: explicit fields
List<CopyField> copyFieldList = copyFieldsMap.get(source);
if (copyFieldList == null) {
copyFieldList = new ArrayList<CopyField>();
copyFieldsMap.put(source, copyFieldList);
}
copyFieldList.add(new CopyField(f, d, maxChars));
copyFieldTargetCounts.put(d, (copyFieldTargetCounts.containsKey(d) ? copyFieldTargetCounts.get(d) + 1 : 1));
copyFieldList.add(new CopyField(sourceSchemaField, destSchemaField, maxChars));
incrementCopyFieldTargetCount(destSchemaField);
}
}
}
private void registerDynamicCopyField( DynamicCopy dcopy )
{
private void incrementCopyFieldTargetCount(SchemaField dest) {
copyFieldTargetCounts.put(dest, copyFieldTargetCounts.containsKey(dest) ? copyFieldTargetCounts.get(dest) + 1 : 1);
}
private void registerDynamicCopyField( DynamicCopy dcopy ) {
if( dynamicCopyFields == null ) {
dynamicCopyFields = new DynamicCopy[] {dcopy};
}
@ -696,13 +742,6 @@ public final class IndexSchema {
log.trace("Dynamic Copy Field:" + dcopy);
}
private static Object[] append(Object[] orig, Object item) {
Object[] newArr = (Object[])java.lang.reflect.Array.newInstance(orig.getClass().getComponentType(), orig.length+1);
System.arraycopy(orig, 0, newArr, 0, orig.length);
newArr[orig.length] = item;
return newArr;
}
static SimilarityFactory readSimilarity(SolrResourceLoader loader, Node node) {
if (node==null) {
return null;
@ -728,34 +767,58 @@ public final class IndexSchema {
}
static abstract class DynamicReplacement implements Comparable<DynamicReplacement> {
final static int STARTS_WITH=1;
final static int ENDS_WITH=2;
public static abstract class DynamicReplacement implements Comparable<DynamicReplacement> {
abstract protected static class DynamicPattern {
protected final String regex;
protected final String fixedStr;
final String regex;
final int type;
protected DynamicPattern(String regex, String fixedStr) { this.regex = regex; this.fixedStr = fixedStr; }
final String str;
static DynamicPattern createPattern(String regex) {
if (regex.startsWith("*")) { return new NameEndsWith(regex); }
else if (regex.endsWith("*")) { return new NameStartsWith(regex); }
else { return new NameEquals(regex);
}
}
/** Returns true if the given name matches this pattern */
abstract boolean matches(String name);
/** Returns the remainder of the given name after removing this pattern's fixed string component */
abstract String remainder(String name);
/** Returns the result of combining this pattern's fixed string component with the given replacement */
abstract String subst(String replacement);
/** Returns the length of the original regex, including the asterisk, if any. */
public int length() { return regex.length(); }
private static class NameStartsWith extends DynamicPattern {
NameStartsWith(String regex) { super(regex, regex.substring(0, regex.length() - 1)); }
boolean matches(String name) { return name.startsWith(fixedStr); }
String remainder(String name) { return name.substring(fixedStr.length()); }
String subst(String replacement) { return fixedStr + replacement; }
}
private static class NameEndsWith extends DynamicPattern {
NameEndsWith(String regex) { super(regex, regex.substring(1)); }
boolean matches(String name) { return name.endsWith(fixedStr); }
String remainder(String name) { return name.substring(0, name.length() - fixedStr.length()); }
String subst(String replacement) { return replacement + fixedStr; }
}
private static class NameEquals extends DynamicPattern {
NameEquals(String regex) { super(regex, regex); }
boolean matches(String name) { return regex.equals(name); }
String remainder(String name) { return ""; }
String subst(String replacement) { return fixedStr; }
}
}
protected DynamicPattern pattern;
public boolean matches(String name) { return pattern.matches(name); }
protected DynamicReplacement(String regex) {
this.regex = regex;
if (regex.startsWith("*")) {
type=ENDS_WITH;
str=regex.substring(1);
}
else if (regex.endsWith("*")) {
type=STARTS_WITH;
str=regex.substring(0,regex.length()-1);
}
else {
throw new RuntimeException("dynamic field name must start or end with *");
}
}
public boolean matches(String name) {
if (type==STARTS_WITH && name.startsWith(str)) return true;
else if (type==ENDS_WITH && name.endsWith(str)) return true;
else return false;
pattern = DynamicPattern.createPattern(regex);
}
/**
@ -767,18 +830,17 @@ public final class IndexSchema {
*/
@Override
public int compareTo(DynamicReplacement other) {
return other.regex.length() - regex.length();
return other.pattern.length() - pattern.length();
}
/** Returns the regex used to create this instance's pattern */
public String getRegex() {
return pattern.regex;
}
}
//
// Instead of storing a type, this could be implemented as a hierarchy
// with a virtual matches().
// Given how often a search will be done, however, speed is the overriding
// concern and I'm not sure which is faster.
//
final static class DynamicField extends DynamicReplacement {
public final static class DynamicField extends DynamicReplacement {
final SchemaField prototype;
DynamicField(SchemaField prototype) {
@ -801,78 +863,47 @@ public final class IndexSchema {
}
}
static class DynamicCopy extends DynamicReplacement {
final SchemaField targetField;
final int maxChars;
public static class DynamicCopy extends DynamicReplacement {
private final DynamicField destination;
DynamicCopy(String regex, SchemaField targetField) {
this(regex, targetField, CopyField.UNLIMITED);
}
private final int maxChars;
public int getMaxChars() { return maxChars; }
DynamicCopy(String regex, SchemaField targetField, int maxChars) {
super(regex);
this.targetField = targetField;
final DynamicField sourceDynamicBase;
public DynamicField getSourceDynamicBase() { return sourceDynamicBase; }
final DynamicField destDynamicBase;
public DynamicField getDestDynamicBase() { return destDynamicBase; }
DynamicCopy(String sourceRegex, DynamicField destination, int maxChars,
DynamicField sourceDynamicBase, DynamicField destDynamicBase) {
super(sourceRegex);
this.destination = destination;
this.maxChars = maxChars;
this.sourceDynamicBase = sourceDynamicBase;
this.destDynamicBase = destDynamicBase;
}
public SchemaField getTargetField( String sourceField )
{
return targetField;
public String getDestFieldName() { return destination.getRegex(); }
/**
* Generates a destination field name based on this source pattern,
* by substituting the remainder of this source pattern into the
* the given destination pattern.
*/
public SchemaField getTargetField(String sourceField) {
String remainder = pattern.remainder(sourceField);
String targetFieldName = destination.pattern.subst(remainder);
return destination.makeSchemaField(targetFieldName);
}
@Override
public String toString() {
return targetField.toString();
return destination.prototype.toString();
}
}
static class DynamicDestCopy extends DynamicCopy
{
final DynamicField dynamic;
final int dtype;
final String dstr;
DynamicDestCopy(String source, DynamicField dynamic) {
this(source, dynamic, CopyField.UNLIMITED);
}
DynamicDestCopy(String source, DynamicField dynamic, int maxChars) {
super(source, dynamic.prototype, maxChars);
this.dynamic = dynamic;
String dest = dynamic.regex;
if (dest.startsWith("*")) {
dtype=ENDS_WITH;
dstr=dest.substring(1);
}
else if (dest.endsWith("*")) {
dtype=STARTS_WITH;
dstr=dest.substring(0,dest.length()-1);
}
else {
throw new RuntimeException("dynamic copyField destination name must start or end with *");
}
}
@Override
public SchemaField getTargetField( String sourceField )
{
String dyn = ( type==STARTS_WITH )
? sourceField.substring( str.length() )
: sourceField.substring( 0, sourceField.length()-str.length() );
String name = (dtype==STARTS_WITH) ? (dstr+dyn) : (dyn+dstr);
return dynamic.makeSchemaField( name );
}
@Override
public String toString() {
return targetField.toString();
}
}
public SchemaField[] getDynamicFieldPrototypes() {
SchemaField[] df = new SchemaField[dynamicFields.length];
for (int i=0;i<dynamicFields.length;i++) {
@ -883,16 +914,15 @@ public final class IndexSchema {
public String getDynamicPattern(String fieldName) {
for (DynamicField df : dynamicFields) {
if (df.matches(fieldName)) return df.regex;
if (df.matches(fieldName)) return df.getRegex();
}
return null;
}
/**
* Does the schema have the specified field defined explicitly, i.e.
* not as a result of a copyField declaration with a wildcard? We
* consider it explicitly defined if it matches a field or dynamicField
* declaration.
* Does the schema explicitly define the specified field, i.e. not as a result
* of a copyField declaration? We consider it explicitly defined if it matches
* a field name or a dynamicField name.
* @return true if explicitly declared in the schema.
*/
public boolean hasExplicitField(String fieldName) {
@ -901,7 +931,7 @@ public final class IndexSchema {
}
for (DynamicField df : dynamicFields) {
if (df.matches(fieldName)) return true;
if (fieldName.equals(df.getRegex())) return true;
}
return false;
@ -964,7 +994,7 @@ public final class IndexSchema {
/*** REMOVED -YCS
if (defaultFieldType != null) return new SchemaField(fieldName,defaultFieldType);
***/
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"undefined field: \""+fieldName+"\"");
throw new SolrException(ErrorCode.BAD_REQUEST,"undefined field: \""+fieldName+"\"");
}
/**
@ -976,7 +1006,7 @@ public final class IndexSchema {
* </p>
*
* @param fieldName may be an explicitly created field, or a name that
* excercies a dynamic field.
* excercises a dynamic field.
* @throws SolrException if no such field exists
* @see #getField(String)
* @see #getFieldTypeNoEx
@ -1007,7 +1037,7 @@ public final class IndexSchema {
* </p>
*
* @param fieldName may be an explicitly created field, or a name that
* excercies a dynamic field.
* exercises a dynamic field.
* @return null if field is not defined.
* @see #getField(String)
* @see #getFieldTypeNoEx
@ -1024,7 +1054,7 @@ public final class IndexSchema {
* the specified field name
*
* @param fieldName may be an explicitly created field, or a name that
* excercies a dynamic field.
* exercises a dynamic field.
* @throws SolrException if no such field exists
* @see #getField(String)
* @see #getFieldTypeNoEx
@ -1033,7 +1063,7 @@ public final class IndexSchema {
for (DynamicField df : dynamicFields) {
if (df.matches(fieldName)) return df.prototype.getType();
}
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"undefined field "+fieldName);
throw new SolrException(ErrorCode.BAD_REQUEST,"undefined field "+fieldName);
}
private FieldType dynFieldType(String fieldName) {
@ -1062,6 +1092,11 @@ public final class IndexSchema {
}
}
}
for (DynamicCopy dynamicCopy : dynamicCopyFields) {
if (dynamicCopy.getDestFieldName().equals(destField)) {
sf.add(getField(dynamicCopy.getRegex()));
}
}
return sf.toArray(new SchemaField[sf.size()]);
}
@ -1080,8 +1115,7 @@ public final class IndexSchema {
}
}
List<CopyField> fixedCopyFields = copyFieldsMap.get(sourceField);
if (fixedCopyFields != null)
{
if (null != fixedCopyFields) {
result.addAll(fixedCopyFields);
}
@ -1093,17 +1127,7 @@ public final class IndexSchema {
*
* @since solr 1.3
*/
public boolean isCopyFieldTarget( SchemaField f )
{
public boolean isCopyFieldTarget( SchemaField f ) {
return copyFieldTargetCounts.containsKey( f );
}
/**
* Is the given field name a wildcard? I.e. does it begin or end with *?
* @return true/false
*/
private static boolean isWildCard(String name) {
return name.startsWith("*") || name.endsWith("*");
}
}

View File

@ -20,10 +20,14 @@ package org.apache.solr.schema;
import org.apache.solr.common.SolrException;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.search.SortField;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.search.QParser;
import org.apache.solr.response.TextResponseWriter;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.io.IOException;
@ -34,12 +38,19 @@ import java.io.IOException;
*
*/
public final class SchemaField extends FieldProperties {
private static final String FIELD_NAME = "name";
private static final String TYPE_NAME = "type";
private static final String DEFAULT_VALUE = "default";
final String name;
final FieldType type;
final int properties;
final String defaultValue;
boolean required = false; // this can't be final since it may be changed dynamically
/** Declared field property overrides */
Map<String,String> args = Collections.emptyMap();
/** Create a new SchemaField with the given name and type,
* using all the default properties from the type.
@ -53,6 +64,7 @@ public final class SchemaField extends FieldProperties {
*/
public SchemaField(SchemaField prototype, String name) {
this(name, prototype.type, prototype.properties, prototype.defaultValue);
args = prototype.args;
}
/** Create a new SchemaField with the given name and type,
@ -186,10 +198,12 @@ public final class SchemaField extends FieldProperties {
static SchemaField create(String name, FieldType ft, Map<String,String> props) {
String defaultValue = null;
if( props.containsKey( "default" ) ) {
defaultValue = props.get( "default" );
if (props.containsKey(DEFAULT_VALUE)) {
defaultValue = props.get(DEFAULT_VALUE);
}
return new SchemaField(name, ft, calcProps(name, ft, props), defaultValue );
SchemaField field = new SchemaField(name, ft, calcProps(name, ft, props), defaultValue);
field.args = new HashMap<String,String>(props);
return field;
}
/**
@ -285,10 +299,51 @@ public final class SchemaField extends FieldProperties {
public boolean equals(Object obj) {
return(obj instanceof SchemaField) && name.equals(((SchemaField)obj).name);
}
/**
* Get a map of property name -> value for this field. If showDefaults is true,
* include default properties (those inherited from the declared property type and
* not overridden in the field declaration).
*/
public SimpleOrderedMap<Object> getNamedPropertyValues(boolean showDefaults) {
SimpleOrderedMap<Object> properties = new SimpleOrderedMap<Object>();
properties.add(FIELD_NAME, getName());
properties.add(TYPE_NAME, getType().getTypeName());
if (showDefaults) {
if (null != getDefaultValue()) {
properties.add(DEFAULT_VALUE, getDefaultValue());
}
properties.add(getPropertyName(INDEXED), indexed());
properties.add(getPropertyName(STORED), stored());
properties.add(getPropertyName(DOC_VALUES), hasDocValues());
properties.add(getPropertyName(STORE_TERMVECTORS), storeTermVector());
properties.add(getPropertyName(STORE_TERMPOSITIONS), storeTermPositions());
properties.add(getPropertyName(STORE_TERMOFFSETS), storeTermOffsets());
properties.add(getPropertyName(OMIT_NORMS), omitNorms());
properties.add(getPropertyName(OMIT_TF_POSITIONS), omitTermFreqAndPositions());
properties.add(getPropertyName(OMIT_POSITIONS), omitPositions());
properties.add(getPropertyName(STORE_OFFSETS), storeOffsetsWithPositions());
properties.add(getPropertyName(MULTIVALUED), multiValued());
if (sortMissingFirst()) {
properties.add(getPropertyName(SORT_MISSING_FIRST), sortMissingFirst());
} else if (sortMissingLast()) {
properties.add(getPropertyName(SORT_MISSING_LAST), sortMissingLast());
}
properties.add(getPropertyName(REQUIRED), isRequired());
properties.add(getPropertyName(TOKENIZED), isTokenized());
// The BINARY property is always false
// properties.add(getPropertyName(BINARY), isBinary());
} else {
for (Map.Entry<String,String> arg : args.entrySet()) {
String key = arg.getKey();
String value = arg.getValue();
if (key.equals(DEFAULT_VALUE)) {
properties.add(key, value);
} else {
properties.add(key, StrUtils.parseBool(value, false));
}
}
}
return properties;
}
}

View File

@ -19,8 +19,6 @@ package org.apache.solr.schema;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
import org.apache.lucene.search.*;
import org.apache.lucene.index.GeneralField;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
@ -58,6 +56,7 @@ public class TextField extends FieldType {
* @see #setMultiTermAnalyzer
*/
protected Analyzer multiTermAnalyzer=null;
private boolean isExplicitMultiTermAnalyzer = false;
@Override
protected void init(IndexSchema schema, Map<String,String> args) {
@ -331,4 +330,11 @@ public class TextField extends FieldType {
}
public void setIsExplicitMultiTermAnalyzer(boolean isExplicitMultiTermAnalyzer) {
this.isExplicitMultiTermAnalyzer = isExplicitMultiTermAnalyzer;
}
public boolean isExplicitMultiTermAnalyzer() {
return isExplicitMultiTermAnalyzer;
}
}

View File

@ -0,0 +1,71 @@
package org.apache.solr.servlet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
import org.slf4j.Logger;
import java.io.PrintWriter;
import java.io.StringWriter;
/**
* Response helper methods.
*/
public class ResponseUtils {
private ResponseUtils() {}
/**
* Adds the given Throwable's message to the given NamedList.
* <p/>
* If the response code is not a regular code, the Throwable's
* stack trace is both logged and added to the given NamedList.
* <p/>
* Status codes less than 100 are adjusted to be 500.
*/
public static int getErrorInfo(Throwable ex, NamedList info, Logger log) {
int code = 500;
if (ex instanceof SolrException) {
code = ((SolrException)ex).code();
}
for (Throwable th = ex; th != null; th = th.getCause()) {
String msg = th.getMessage();
if (msg != null) {
info.add("msg", msg);
break;
}
}
// For any regular code, don't include the stack trace
if (code == 500 || code < 100) {
StringWriter sw = new StringWriter();
ex.printStackTrace(new PrintWriter(sw));
SolrException.log(log, null, ex);
info.add("trace", sw.toString());
// non standard codes have undefined results with various servers
if (code < 100) {
log.warn("invalid return code: " + code);
code = 500;
}
}
info.add("code", code);
return code;
}
}

View File

@ -23,8 +23,6 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
import java.net.HttpURLConnection;
import java.net.URL;
@ -248,6 +246,20 @@ public class SolrDispatchFilter implements Filter
parsers.put(config, parser );
}
// Handle /schema/* paths via Restlet
if( path.startsWith("/schema") ) {
solrReq = parser.parse(core, path, req);
SolrRequestInfo.setRequestInfo(new SolrRequestInfo(solrReq, new SolrQueryResponse()));
if( path.equals(req.getServletPath()) ) {
// avoid endless loop - pass through to Restlet via webapp
chain.doFilter(request, response);
} else {
// forward rewritten URI (without path prefix and core/collection name) to Restlet
req.getRequestDispatcher(path).forward(request, response);
}
return;
}
// Determine the handler from the url path if not set
// (we might already have selected the cores handler)
if( handler == null && path.length() > 1 ) { // don't match "" or "/" as valid path
@ -353,14 +365,18 @@ public class SolrDispatchFilter implements Filter
try {
con.connect();
InputStream is = req.getInputStream();
OutputStream os = con.getOutputStream();
InputStream is;
OutputStream os;
if ("POST".equals(req.getMethod())) {
is = req.getInputStream();
os = con.getOutputStream(); // side effect: method is switched to POST
try {
IOUtils.copyLarge(is, os);
} finally {
IOUtils.closeQuietly(os);
IOUtils.closeQuietly(is); // TODO: I thought we weren't supposed to explicitly close servlet streams
}
}
resp.setStatus(con.getResponseCode());
@ -491,19 +507,11 @@ public class SolrDispatchFilter implements Filter
private void handleAdminRequest(HttpServletRequest req, ServletResponse response, SolrRequestHandler handler,
SolrQueryRequest solrReq) throws IOException {
SolrQueryResponse solrResp = new SolrQueryResponse();
final NamedList<Object> responseHeader = new SimpleOrderedMap<Object>();
solrResp.add("responseHeader", responseHeader);
NamedList toLog = solrResp.getToLog();
toLog.add("webapp", req.getContextPath());
toLog.add("path", solrReq.getContext().get("path"));
toLog.add("params", "{" + solrReq.getParamString() + "}");
SolrCore.preDecorateResponse(solrReq, solrResp);
handler.handleRequest(solrReq, solrResp);
SolrCore.setResponseHeaderValues(handler, solrReq, solrResp);
StringBuilder sb = new StringBuilder();
for (int i = 0; i < toLog.size(); i++) {
String name = toLog.getName(i);
Object val = toLog.getVal(i);
sb.append(name).append("=").append(val).append(" ");
SolrCore.postDecorateResponse(handler, solrReq, solrResp);
if (log.isInfoEnabled() && solrResp.getToLog().size() > 0) {
log.info(solrResp.getToLogAsString("[admin] "));
}
QueryResponseWriter respWriter = SolrCore.DEFAULT_RESPONSE_WRITERS.get(solrReq.getParams().get(CommonParams.WT));
if (respWriter == null) respWriter = SolrCore.DEFAULT_RESPONSE_WRITERS.get("standard");
@ -521,7 +529,7 @@ public class SolrDispatchFilter implements Filter
if (solrRsp.getException() != null) {
NamedList info = new SimpleOrderedMap();
int code = getErrorInfo(solrRsp.getException(),info);
int code = ResponseUtils.getErrorInfo(solrRsp.getException(), info, log);
solrRsp.add("error", info);
((HttpServletResponse) response).setStatus(code);
}
@ -543,38 +551,6 @@ public class SolrDispatchFilter implements Filter
//else http HEAD request, nothing to write out, waited this long just to get ContentType
}
protected int getErrorInfo(Throwable ex, NamedList info) {
int code=500;
if( ex instanceof SolrException ) {
code = ((SolrException)ex).code();
}
String msg = null;
for (Throwable th = ex; th != null; th = th.getCause()) {
msg = th.getMessage();
if (msg != null) break;
}
if(msg != null) {
info.add("msg", msg);
}
// For any regular code, don't include the stack trace
if( code == 500 || code < 100 ) {
StringWriter sw = new StringWriter();
ex.printStackTrace(new PrintWriter(sw));
SolrException.log(log, null, ex);
info.add("trace", sw.toString());
// non standard codes have undefined results with various servers
if( code < 100 ) {
log.warn( "invalid return code: "+code );
code = 500;
}
}
info.add("code", new Integer(code));
return code;
}
protected void execute( HttpServletRequest req, SolrRequestHandler handler, SolrQueryRequest sreq, SolrQueryResponse rsp) {
// a custom filter could add more stuff to the request before passing it on.
// for example: sreq.getContext().put( "HttpServletRequest", req );
@ -615,7 +591,7 @@ public class SolrDispatchFilter implements Filter
}
catch( Throwable t ) { // This error really does not matter
SimpleOrderedMap info = new SimpleOrderedMap();
int code=getErrorInfo(ex, info);
int code = ResponseUtils.getErrorInfo(ex, info, log);
response.sendError( code, info.toString() );
}
}

View File

@ -180,22 +180,10 @@ class LogUpdateProcessor extends UpdateRequestProcessor {
// LOG A SUMMARY WHEN ALL DONE (INFO LEVEL)
if (log.isInfoEnabled()) {
StringBuilder sb = new StringBuilder(rsp.getToLogAsString(req.getCore().getLogId()));
NamedList<Object> stdLog = rsp.getToLog();
StringBuilder sb = new StringBuilder(req.getCore().getLogId());
for (int i=0; i<stdLog.size(); i++) {
String name = stdLog.getName(i);
Object val = stdLog.getVal(i);
if (name != null) {
sb.append(name).append('=');
}
sb.append(val).append(' ');
}
stdLog.clear(); // make it so SolrCore.exec won't log this again
rsp.getToLog().clear(); // make it so SolrCore.exec won't log this again
// if id lists were truncated, show how many more there were
if (adds != null && numAdds > maxNumToLog) {
@ -210,6 +198,7 @@ class LogUpdateProcessor extends UpdateRequestProcessor {
log.info(sb.toString());
}
}
}

View File

@ -0,0 +1,619 @@
<?xml version="1.0" ?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!-- The Solr schema file, version 1.5 -->
<schema name="test-rest" version="1.5">
<!-- attribute "name" is the name of this schema and is only used for display purposes.
Applications should change this to reflect the nature of the search collection.
version="x.y" is Solr's version number for the schema syntax and semantics. It should
not normally be changed by applications.
1.0: multiValued attribute did not exist, all fields are multiValued by nature
1.1: multiValued attribute introduced, false by default
1.2: omitTermFreqAndPositions attribute introduced, true by default except for text fields.
1.3: removed optional field compress feature
1.4: default auto-phrase (QueryParser feature) to off
1.5: omitNorms defaults to true for primitive field types (int, float, boolean, string...)
-->
<types>
<fieldType name="pint" class="solr.IntField"/>
<fieldType name="plong" class="solr.LongField"/>
<fieldtype name="pfloat" class="solr.FloatField"/>
<fieldType name="pdouble" class="solr.DoubleField"/>
<fieldType name="int" class="solr.TrieIntField" precisionStep="0" positionIncrementGap="0"/>
<fieldType name="float" class="solr.TrieFloatField" precisionStep="0" positionIncrementGap="0"/>
<fieldType name="long" class="solr.TrieLongField" precisionStep="0" positionIncrementGap="0"/>
<fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" positionIncrementGap="0"/>
<fieldType name="tint" class="solr.TrieIntField" precisionStep="8" positionIncrementGap="0"/>
<fieldType name="tfloat" class="solr.TrieFloatField" precisionStep="8" positionIncrementGap="0"/>
<fieldType name="tlong" class="solr.TrieLongField" precisionStep="8" positionIncrementGap="0"/>
<fieldType name="tdouble" class="solr.TrieDoubleField" precisionStep="8" positionIncrementGap="0"/>
<!-- numeric field types that manipulate the value into
a string value that isn't human readable in it's internal form,
but sorts correctly and supports range queries.
If sortMissingLast="true" then a sort on this field will cause documents
without the field to come after documents with the field,
regardless of the requested sort order.
If sortMissingFirst="true" then a sort on this field will cause documents
without the field to come before documents with the field,
regardless of the requested sort order.
If sortMissingLast="false" and sortMissingFirst="false" (the default),
then default lucene sorting will be used which places docs without the field
first in an ascending sort and last in a descending sort.
-->
<fieldtype name="sint" class="solr.SortableIntField" sortMissingLast="true" />
<fieldtype name="slong" class="solr.SortableLongField" sortMissingLast="true"/>
<fieldtype name="sfloat" class="solr.SortableFloatField" sortMissingLast="true"/>
<fieldtype name="sdouble" class="solr.SortableDoubleField" sortMissingLast="true"/>
<!-- Field type demonstrating an Analyzer failure -->
<fieldtype name="failtype1" class="solr.TextField">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="0" catenateWords="0" catenateNumbers="0" catenateAll="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldtype>
<!-- Demonstrating ignoreCaseChange -->
<fieldtype name="wdf_nocase" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="0" catenateWords="0" catenateNumbers="0" catenateAll="0" splitOnCaseChange="0" preserveOriginal="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="wdf_preserve" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="0" generateNumberParts="1" catenateWords="0" catenateNumbers="0" catenateAll="0" splitOnCaseChange="0" preserveOriginal="1"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldtype>
<!-- HighlitText optimizes storage for (long) columns which will be highlit -->
<fieldtype name="highlittext" class="solr.TextField"/>
<fieldtype name="boolean" class="solr.BoolField" sortMissingLast="true"/>
<fieldtype name="string" class="solr.StrField" sortMissingLast="true"/>
<!-- format for date is 1995-12-31T23:59:59.999Z and only the fractional
seconds part (.999) is optional.
-->
<fieldtype name="date" class="solr.TrieDateField" sortMissingLast="true"/>
<fieldtype name="tdate" class="solr.TrieDateField" sortMissingLast="true" precisionStep="6"/>
<fieldtype name="pdate" class="solr.DateField" sortMissingLast="true"/>
<fieldType name="text" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="true" >
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.StopFilterFactory"
ignoreCase="true"
words="stopwords.txt"
enablePositionIncrements="true"
/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="1" catenateNumbers="1" catenateAll="0" splitOnCaseChange="1"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
<filter class="solr.StopFilterFactory"
ignoreCase="true"
words="stopwords.txt"
enablePositionIncrements="true"
/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="0" catenateNumbers="0" catenateAll="0" splitOnCaseChange="1"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
</fieldType>
<!-- field type that doesn't generate phrases from unquoted multiple tokens per analysis unit -->
<fieldType name="text_np" class="solr.TextField" positionIncrementGap="100">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.StopFilterFactory"
ignoreCase="true"
words="stopwords.txt"
enablePositionIncrements="true"
/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="1" catenateNumbers="1" catenateAll="0" splitOnCaseChange="1"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
<filter class="solr.StopFilterFactory"
ignoreCase="true"
words="stopwords.txt"
enablePositionIncrements="true"
/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="0" catenateNumbers="0" catenateAll="0" splitOnCaseChange="1"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
</fieldType>
<fieldtype name="nametext" class="solr.TextField">
<analyzer class="org.apache.lucene.analysis.core.WhitespaceAnalyzer"/>
</fieldtype>
<fieldtype name="teststop" class="solr.TextField">
<analyzer>
<tokenizer class="solr.LowerCaseTokenizerFactory"/>
<filter class="solr.StandardFilterFactory"/>
<filter class="solr.StopFilterFactory" words="stopwords.txt"/>
</analyzer>
</fieldtype>
<!-- fieldtypes in this section isolate tokenizers and tokenfilters for testing -->
<fieldtype name="lowertok" class="solr.TextField">
<analyzer><tokenizer class="solr.LowerCaseTokenizerFactory"/></analyzer>
</fieldtype>
<fieldtype name="keywordtok" class="solr.TextField">
<analyzer><tokenizer class="solr.MockTokenizerFactory" pattern="keyword"/></analyzer>
</fieldtype>
<fieldtype name="standardtok" class="solr.TextField">
<analyzer><tokenizer class="solr.StandardTokenizerFactory"/></analyzer>
</fieldtype>
<fieldtype name="lettertok" class="solr.TextField">
<analyzer><tokenizer class="solr.LetterTokenizerFactory"/></analyzer>
</fieldtype>
<fieldtype name="whitetok" class="solr.TextField">
<analyzer><tokenizer class="solr.MockTokenizerFactory"/></analyzer>
</fieldtype>
<fieldtype name="HTMLstandardtok" class="solr.TextField">
<analyzer>
<charFilter class="solr.HTMLStripCharFilterFactory"/>
<tokenizer class="solr.StandardTokenizerFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="HTMLwhitetok" class="solr.TextField">
<analyzer>
<charFilter class="solr.HTMLStripCharFilterFactory"/>
<tokenizer class="solr.MockTokenizerFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="standardtokfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.StandardTokenizerFactory"/>
<filter class="solr.StandardFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="standardfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.StandardFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="lowerfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="patternreplacefilt" class="solr.TextField">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory" pattern="keyword"/>
<filter class="solr.PatternReplaceFilterFactory"
pattern="([^a-zA-Z])" replacement="_" replace="all"
/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.MockTokenizerFactory" pattern="keyword"/>
</analyzer>
</fieldtype>
<fieldtype name="porterfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="engporterfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="custengporterfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="stopfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.StopFilterFactory" ignoreCase="true"/>
</analyzer>
</fieldtype>
<fieldtype name="custstopfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.StopFilterFactory" words="stopwords.txt"/>
</analyzer>
</fieldtype>
<fieldtype name="lengthfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.LengthFilterFactory" min="2" max="5"/>
</analyzer>
</fieldtype>
<fieldtype name="limitfilt" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.LimitTokenCountFilterFactory" maxTokenCount="100" />
</analyzer>
</fieldtype>
<fieldtype name="subword" class="solr.TextField" multiValued="true" positionIncrementGap="100">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="0" catenateNumbers="0" catenateAll="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.StopFilterFactory"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="numericsubword" class="solr.TextField" multiValued="true" positionIncrementGap="100">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.WordDelimiterFilterFactory" protected="protwords.txt" splitOnNumerics="0" splitOnCaseChange="0" generateWordParts="1" generateNumberParts="0" catenateWords="0" catenateNumbers="0" catenateAll="0"/>
<filter class="solr.StopFilterFactory"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.WordDelimiterFilterFactory" protected="protwords.txt" splitOnNumerics="0" splitOnCaseChange="0" generateWordParts="1" generateNumberParts="1" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
<filter class="solr.StopFilterFactory"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
</fieldtype>
<fieldtype name="protectedsubword" class="solr.TextField" multiValued="true" positionIncrementGap="100">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.WordDelimiterFilterFactory" protected="protwords.txt" splitOnNumerics="0" splitOnCaseChange="0" generateWordParts="1" generateNumberParts="1" catenateWords="0" catenateNumbers="0" catenateAll="0"/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldtype>
<!-- more flexible in matching skus, but more chance of a false match -->
<fieldtype name="skutype1" class="solr.TextField">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="0" generateNumberParts="0" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldtype>
<!-- less flexible in matching skus, but less chance of a false match -->
<fieldtype name="skutype2" class="solr.TextField">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="0" generateNumberParts="0" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="0" generateNumberParts="0" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldtype>
<!-- less flexible in matching skus, but less chance of a false match -->
<fieldtype name="syn" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter name="syn" class="solr.SynonymFilterFactory" synonyms="synonyms.txt"/>
</analyzer>
</fieldtype>
<!-- a text field with the stop filter only on the query analyzer
-->
<fieldType name="text_sw" class="solr.TextField" positionIncrementGap="100">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory"/>
<!-- in this example, we will only use synonyms at query time
<filter class="solr.SynonymFilterFactory" synonyms="index_synonyms.txt" ignoreCase="true" expand="false"/>
-->
<!--<filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>-->
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="1"
catenateNumbers="1" catenateAll="0" splitOnCaseChange="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
<analyzer type="query">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
<filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="0"
catenateNumbers="0" catenateAll="0" splitOnCaseChange="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.PorterStemFilterFactory"/>
</analyzer>
</fieldType>
<!-- Demonstrates How RemoveDuplicatesTokenFilter makes stemmed
synonyms "better"
-->
<fieldtype name="dedup" class="solr.TextField">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.SynonymFilterFactory"
synonyms="synonyms.txt" expand="true" />
<filter class="solr.PorterStemFilterFactory"/>
<filter class="solr.RemoveDuplicatesTokenFilterFactory" />
</analyzer>
</fieldtype>
<fieldtype name="unstored" class="solr.StrField" indexed="true" stored="false"/>
<fieldtype name="textgap" class="solr.TextField" multiValued="true" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.LowerCaseFilterFactory"/>
</analyzer>
</fieldtype>
<fieldType name="uuid" class="solr.UUIDField" />
<fieldType name="ignored" class="solr.StrField" indexed="false" stored="false" />
<fieldType name="random" class="solr.RandomSortField" indexed="true" />
<!-- Poly field -->
<fieldType name="xy" class="solr.PointType" dimension="2" subFieldType="double"/>
<fieldType name="xyd" class="solr.PointType" dimension="2" subFieldSuffix="*_d"/>
<fieldtype name="geohash" class="solr.GeoHashField"/>
<fieldType name="point" class="solr.PointType" dimension="2" subFieldSuffix="_d"/>
<!-- A specialized field for geospatial search. If indexed, this fieldType must not be multivalued. -->
<fieldType name="location" class="solr.LatLonType" subFieldSuffix="_coordinate"/>
</types>
<fields>
<field name="id" type="string" indexed="true" stored="true" multiValued="false" required="true"/>
<field name="signatureField" type="string" indexed="true" stored="false"/>
<field name="uuid" type="uuid" stored="true" />
<field name="name" type="nametext" indexed="true" stored="true"/>
<field name="text" type="text" indexed="true" stored="false" multiValued="true" />
<field name="subject" type="text" indexed="true" stored="true"/>
<field name="title" type="nametext" indexed="true" stored="true"/>
<field name="weight" type="float" indexed="true" stored="true"/>
<field name="bday" type="date" indexed="true" stored="true"/>
<field name="text_np" type="text_np" indexed="true" stored="false"/>
<field name="title_stemmed" type="text" indexed="true" stored="false"/>
<field name="title_lettertok" type="lettertok" indexed="true" stored="false"/>
<field name="syn" type="syn" indexed="true" stored="true"/>
<!-- to test property inheritance and overriding -->
<field name="shouldbeunstored" type="unstored" />
<field name="shouldbestored" type="unstored" stored="true"/>
<field name="shouldbeunindexed" type="unstored" indexed="false" stored="true"/>
<!-- test different combinations of indexed and stored -->
<field name="bind" type="boolean" indexed="true" stored="false"/>
<field name="bsto" type="boolean" indexed="false" stored="true"/>
<field name="bindsto" type="boolean" indexed="true" stored="true"/>
<field name="isto" type="int" indexed="false" stored="true"/>
<field name="iind" type="int" indexed="true" stored="false"/>
<field name="ssto" type="string" indexed="false" stored="true"/>
<field name="sind" type="string" indexed="true" stored="false"/>
<field name="sindsto" type="string" indexed="true" stored="true"/>
<!-- test combinations of term vector settings -->
<field name="test_basictv" type="text" termVectors="true"/>
<field name="test_notv" type="text" termVectors="false"/>
<field name="test_postv" type="text" termVectors="true" termPositions="true"/>
<field name="test_offtv" type="text" termVectors="true" termOffsets="true"/>
<field name="test_posofftv" type="text" termVectors="true"
termPositions="true" termOffsets="true"/>
<!-- test highlit field settings -->
<field name="test_hlt" type="highlittext" indexed="true" compressed="true"/>
<field name="test_hlt_off" type="highlittext" indexed="true" compressed="false"/>
<!-- fields to test individual tokenizers and tokenfilters -->
<field name="teststop" type="teststop" indexed="true" stored="true"/>
<field name="lowertok" type="lowertok" indexed="true" stored="true"/>
<field name="keywordtok" type="keywordtok" indexed="true" stored="true"/>
<field name="standardtok" type="standardtok" indexed="true" stored="true"/>
<field name="HTMLstandardtok" type="HTMLstandardtok" indexed="true" stored="true"/>
<field name="lettertok" type="lettertok" indexed="true" stored="true"/>
<field name="whitetok" type="whitetok" indexed="true" stored="true"/>
<field name="HTMLwhitetok" type="HTMLwhitetok" indexed="true" stored="true"/>
<field name="standardtokfilt" type="standardtokfilt" indexed="true" stored="true"/>
<field name="standardfilt" type="standardfilt" indexed="true" stored="true"/>
<field name="lowerfilt" type="lowerfilt" indexed="true" stored="true"/>
<field name="patternreplacefilt" type="patternreplacefilt" indexed="true" stored="true"/>
<field name="porterfilt" type="porterfilt" indexed="true" stored="true"/>
<field name="engporterfilt" type="engporterfilt" indexed="true" stored="true"/>
<field name="custengporterfilt" type="custengporterfilt" indexed="true" stored="true"/>
<field name="stopfilt" type="stopfilt" indexed="true" stored="true"/>
<field name="custstopfilt" type="custstopfilt" indexed="true" stored="true"/>
<field name="lengthfilt" type="lengthfilt" indexed="true" stored="true"/>
<field name="dedup" type="dedup" indexed="true" stored="true"/>
<field name="wdf_nocase" type="wdf_nocase" indexed="true" stored="true"/>
<field name="wdf_preserve" type="wdf_preserve" indexed="true" stored="true"/>
<field name="numberpartfail" type="failtype1" indexed="true" stored="true"/>
<field name="nullfirst" type="string" indexed="true" stored="true" sortMissingFirst="true"/>
<field name="subword" type="subword" indexed="true" stored="true"/>
<field name="numericsubword" type="numericsubword" indexed="true" stored="true"/>
<field name="protectedsubword" type="protectedsubword" indexed="true" stored="true"/>
<field name="sku1" type="skutype1" indexed="true" stored="true"/>
<field name="sku2" type="skutype2" indexed="true" stored="true"/>
<field name="textgap" type="textgap" indexed="true" stored="true"/>
<!--
<field name="timestamp" type="date" indexed="true" stored="true" default="NOW"/>
-->
<field name="timestamp" type="date" indexed="true" stored="true"/>
<!-- Test a point field for distances -->
<field name="point" type="xy" indexed="true" stored="true" multiValued="false"/>
<field name="pointD" type="xyd" indexed="true" stored="true" multiValued="false"/>
<field name="point_hash" type="geohash" indexed="true" stored="true" multiValued="false"/>
<field name="store" type="location" indexed="true" stored="true"/>
<!-- to test uniq fields -->
<field name="uniq" type="string" indexed="true" stored="true" multiValued="true"/>
<field name="uniq2" type="string" indexed="true" stored="true" multiValued="true"/>
<field name="uniq3" type="string" indexed="true" stored="true"/>
<field name="nouniq" type="string" indexed="true" stored="true" multiValued="true"/>
<!-- for versioning -->
<field name="_version_" type="long" indexed="true" stored="true"/>
<field name="copyfield_source" type="string" indexed="true" stored="true" multiValued="true"/>
<dynamicField name="*_coordinate" type="tdouble" indexed="true" stored="false"/>
<dynamicField name="*_si" type="sint" indexed="true" stored="true"/>
<dynamicField name="*_sl" type="slong" indexed="true" stored="true"/>
<dynamicField name="*_sf" type="sfloat" indexed="true" stored="true"/>
<dynamicField name="*_sd" type="sdouble" indexed="true" stored="true"/>
<dynamicField name="*_sI" type="string" indexed="true" stored="false"/>
<dynamicField name="*_sS" type="string" indexed="false" stored="true"/>
<dynamicField name="t_*" type="text" indexed="true" stored="true"/>
<dynamicField name="tv_*" type="text" indexed="true" stored="true"
termVectors="true" termPositions="true" termOffsets="true"/>
<dynamicField name="tv_mv_*" type="text" indexed="true" stored="true" multiValued="true"
termVectors="true" termPositions="true" termOffsets="true"/>
<dynamicField name="*_mfacet" type="string" indexed="true" stored="false" multiValued="true" />
<dynamicField name="*_sw" type="text_sw" indexed="true" stored="true" multiValued="true"/>
<dynamicField name="*_i" type="int" indexed="true" stored="true"/>
<dynamicField name="*_is" type="int" indexed="true" stored="true" multiValued="true"/>
<dynamicField name="*_s1" type="string" indexed="true" stored="true" multiValued="false"/>
<!-- :TODO: why are these identical?!?!?! -->
<dynamicField name="*_s" type="string" indexed="true" stored="true" multiValued="true"/>
<dynamicField name="*_ss" type="string" indexed="true" stored="true" multiValued="true"/>
<dynamicField name="*_l" type="long" indexed="true" stored="true"/>
<dynamicField name="*_t" type="text" indexed="true" stored="true"/>
<dynamicField name="*_tt" type="text" indexed="true" stored="true"/>
<dynamicField name="*_b" type="boolean" indexed="true" stored="true"/>
<dynamicField name="*_f" type="float" indexed="true" stored="true"/>
<dynamicField name="*_d" type="double" indexed="true" stored="true"/>
<dynamicField name="*_dt" type="date" indexed="true" stored="true"/>
<!-- some trie-coded dynamic fields for faster range queries -->
<dynamicField name="*_ti" type="tint" indexed="true" stored="true"/>
<dynamicField name="*_tl" type="tlong" indexed="true" stored="true"/>
<dynamicField name="*_tf" type="tfloat" indexed="true" stored="true"/>
<dynamicField name="*_td" type="tdouble" indexed="true" stored="true"/>
<dynamicField name="*_tdt" type="tdate" indexed="true" stored="true"/>
<dynamicField name="*_pi" type="pint" indexed="true" stored="true"/>
<dynamicField name="*_pf" type="pfloat" indexed="true" stored="true"/>
<dynamicField name="*_pl" type="plong" indexed="true" stored="true"/>
<dynamicField name="*_pd" type="pdouble" indexed="true" stored="true"/>
<dynamicField name="*_pdt" type="pdate" indexed="true" stored="true"/>
<dynamicField name="ignored_*" type="ignored" multiValued="true"/>
<dynamicField name="attr_*" type="text" indexed="true" stored="true" multiValued="true"/>
<dynamicField name="random_*" type="random" />
</fields>
<defaultSearchField>text</defaultSearchField>
<uniqueKey>id</uniqueKey>
<copyField source="title" dest="title_stemmed" maxChars="200"/>
<copyField source="title" dest="title_lettertok"/>
<copyField source="title" dest="text"/>
<copyField source="subject" dest="text"/>
<copyField source="copyfield_source" dest="text"/>
<copyField source="copyfield_source" dest="copyfield_dest_ss"/> <!-- copyField into another stored copyField - not best practice -->
<copyField source="title" dest="dest_sub_no_ast_s"/>
<copyField source="*_i" dest="title"/>
<copyField source="*_i" dest="*_s"/>
<copyField source="*_i" dest="*_dest_sub_s"/>
<copyField source="*_i" dest="dest_sub_no_ast_s"/>
<copyField source="*_src_sub_i" dest="title"/>
<copyField source="*_src_sub_i" dest="*_s"/>
<copyField source="*_src_sub_i" dest="*_dest_sub_s"/>
<copyField source="*_src_sub_i" dest="dest_sub_no_ast_s"/>
<copyField source="src_sub_no_ast_i" dest="title"/>
<copyField source="src_sub_no_ast_i" dest="*_s"/>
<copyField source="src_sub_no_ast_i" dest="*_dest_sub_s"/>
<copyField source="src_sub_no_ast_i" dest="dest_sub_no_ast_s"/>
</schema>

View File

@ -191,8 +191,7 @@ public class LukeRequestHandlerTest extends AbstractSolrTestCase {
field("text") + "/arr[@name='copySources']/str[.='subject']",
field("title") + "/arr[@name='copyDests']/str[.='text']",
field("title") + "/arr[@name='copyDests']/str[.='title_stemmed']",
// :TODO: SOLR-3798
//dynfield("bar_copydest_*") + "/arr[@name='copySource']/str[.='foo_copysource_*']",
dynfield("bar_copydest_*") + "/arr[@name='copySources']/str[.='foo_copysource_*']",
dynfield("foo_copysource_*") + "/arr[@name='copyDests']/str[.='bar_copydest_*']");
assertEquals(xml, null, r);
}

View File

@ -0,0 +1,37 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.util.RestTestBase;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.BeforeClass;
import org.restlet.ext.servlet.ServerServlet;
import java.util.SortedMap;
import java.util.TreeMap;
abstract public class SchemaRestletTestBase extends RestTestBase {
@BeforeClass
public static void init() throws Exception {
final SortedMap<ServletHolder,String> extraServlets = new TreeMap<ServletHolder,String>();
final ServletHolder schemaRestApi = new ServletHolder("SchemaRestApi", ServerServlet.class);
schemaRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SchemaRestApi");
extraServlets.put(schemaRestApi, "/schema/*");
createJettyAndHarness(TEST_HOME(), "solrconfig.xml", "schema-rest.xml", "/solr", true, extraServlets);
}
}

View File

@ -0,0 +1,140 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.junit.Test;
public class TestCopyFieldCollectionResource extends SchemaRestletTestBase {
@Test
public void testGetAllCopyFields() throws Exception {
assertQ("/schema/copyfields?indent=on&wt=xml",
"/response/arr[@name='copyfields']/lst[ str[@name='source'][.='title']"
+" and str[@name='dest'][.='title_stemmed']"
+" and int[@name='maxChars'][.='200']]",
"/response/arr[@name='copyfields']/lst[ str[@name='source'][.='title']"
+" and str[@name='dest'][.='dest_sub_no_ast_s']"
+" and str[@name='destDynamicBase'][.='*_s']]",
"/response/arr[@name='copyfields']/lst[ str[@name='source'][.='*_i']"
+" and str[@name='dest'][.='title']]",
"/response/arr[@name='copyfields']/lst[ str[@name='source'][.='*_i']"
+" and str[@name='dest'][.='*_s']]",
"/response/arr[@name='copyfields']/lst[ str[@name='source'][.='*_i']"
+" and str[@name='dest'][.='*_dest_sub_s']"
+" and str[@name='destDynamicBase'][.='*_s']]",
"/response/arr[@name='copyfields']/lst[ str[@name='source'][.='*_i']"
+" and str[@name='dest'][.='dest_sub_no_ast_s']"
+" and str[@name='destDynamicBase'][.='*_s']]",
"/response/arr[@name='copyfields']/lst[ str[@name='source'][.='*_src_sub_i']"
+" and str[@name='sourceDynamicBase'][.='*_i']"
+" and str[@name='dest'][.='title']]",
"/response/arr[@name='copyfields']/lst[ str[@name='source'][.='*_src_sub_i']"
+" and str[@name='sourceDynamicBase'][.='*_i']"
+" and str[@name='dest'][.='*_s']]",
"/response/arr[@name='copyfields']/lst[ str[@name='source'][.='*_src_sub_i']"
+" and str[@name='sourceDynamicBase'][.='*_i']"
+" and str[@name='dest'][.='*_dest_sub_s']"
+" and str[@name='destDynamicBase'][.='*_s']]",
"/response/arr[@name='copyfields']/lst[ str[@name='source'][.='*_src_sub_i']"
+" and str[@name='sourceDynamicBase'][.='*_i']"
+" and str[@name='dest'][.='dest_sub_no_ast_s']"
+" and str[@name='destDynamicBase'][.='*_s']]",
"/response/arr[@name='copyfields']/lst[ str[@name='source'][.='src_sub_no_ast_i']"
+" and str[@name='sourceDynamicBase'][.='*_i']"
+" and str[@name='dest'][.='title']]",
"/response/arr[@name='copyfields']/lst[ str[@name='source'][.='src_sub_no_ast_i']"
+" and str[@name='sourceDynamicBase'][.='*_i']"
+" and str[@name='dest'][.='*_s']]",
"/response/arr[@name='copyfields']/lst[ str[@name='source'][.='src_sub_no_ast_i']"
+" and str[@name='sourceDynamicBase'][.='*_i']"
+" and str[@name='dest'][.='*_dest_sub_s']"
+" and str[@name='destDynamicBase'][.='*_s']]",
"/response/arr[@name='copyfields']/lst[ str[@name='source'][.='src_sub_no_ast_i']"
+" and str[@name='sourceDynamicBase'][.='*_i']"
+" and str[@name='dest'][.='dest_sub_no_ast_s']"
+" and str[@name='destDynamicBase'][.='*_s']]");
}
@Test
public void testJsonGetAllCopyFields() throws Exception {
assertJQ("/schema/copyfields?indent=on&wt=json",
"/copyfields/[6]=={'source':'title','dest':'dest_sub_no_ast_s','destDynamicBase':'*_s'}",
"/copyfields/[7]=={'source':'*_i','dest':'title'}",
"/copyfields/[8]=={'source':'*_i','dest':'*_s'}",
"/copyfields/[9]=={'source':'*_i','dest':'*_dest_sub_s','destDynamicBase':'*_s'}",
"/copyfields/[10]=={'source':'*_i','dest':'dest_sub_no_ast_s','destDynamicBase':'*_s'}",
"/copyfields/[11]=={'source':'*_src_sub_i','sourceDynamicBase':'*_i','dest':'title'}",
"/copyfields/[12]=={'source':'*_src_sub_i','sourceDynamicBase':'*_i','dest':'*_s'}",
"/copyfields/[13]=={'source':'*_src_sub_i','sourceDynamicBase':'*_i','dest':'*_dest_sub_s','destDynamicBase':'*_s'}",
"/copyfields/[14]=={'source':'*_src_sub_i','sourceDynamicBase':'*_i','dest':'dest_sub_no_ast_s','destDynamicBase':'*_s'}",
"/copyfields/[15]=={'source':'src_sub_no_ast_i','sourceDynamicBase':'*_i','dest':'title'}",
"/copyfields/[16]=={'source':'src_sub_no_ast_i','sourceDynamicBase':'*_i','dest':'*_s'}",
"/copyfields/[17]=={'source':'src_sub_no_ast_i','sourceDynamicBase':'*_i','dest':'*_dest_sub_s','destDynamicBase':'*_s'}",
"/copyfields/[18]=={'source':'src_sub_no_ast_i','sourceDynamicBase':'*_i','dest':'dest_sub_no_ast_s','destDynamicBase':'*_s'}");
}
@Test
public void testRestrictSource() throws Exception {
assertQ("/schema/copyfields/?indent=on&wt=xml&source.fl=title,*_i,*_src_sub_i,src_sub_no_ast_i",
"count(/response/arr[@name='copyfields']/lst) = 16", // 4 + 4 + 4 + 4
"count(/response/arr[@name='copyfields']/lst/str[@name='source'][.='title']) = 4",
"count(/response/arr[@name='copyfields']/lst/str[@name='source'][.='*_i']) = 4",
"count(/response/arr[@name='copyfields']/lst/str[@name='source'][.='*_src_sub_i']) = 4",
"count(/response/arr[@name='copyfields']/lst/str[@name='source'][.='src_sub_no_ast_i']) = 4");
}
@Test
public void testRestrictDest() throws Exception {
assertQ("/schema/copyfields/?indent=on&wt=xml&dest.fl=title,*_s,*_dest_sub_s,dest_sub_no_ast_s",
"count(/response/arr[@name='copyfields']/lst) = 13", // 3 + 3 + 3 + 4
"count(/response/arr[@name='copyfields']/lst/str[@name='dest'][.='title']) = 3",
"count(/response/arr[@name='copyfields']/lst/str[@name='dest'][.='*_s']) = 3",
"count(/response/arr[@name='copyfields']/lst/str[@name='dest'][.='*_dest_sub_s']) = 3",
"count(/response/arr[@name='copyfields']/lst/str[@name='dest'][.='dest_sub_no_ast_s']) = 4");
}
@Test
public void testRestrictSourceAndDest() throws Exception {
assertQ("/schema/copyfields/?indent=on&wt=xml&source.fl=title,*_i&dest.fl=title,dest_sub_no_ast_s",
"count(/response/arr[@name='copyfields']/lst) = 3",
"/response/arr[@name='copyfields']/lst[ str[@name='source'][.='title']"
+" and str[@name='dest'][.='dest_sub_no_ast_s']]",
"/response/arr[@name='copyfields']/lst[ str[@name='source'][.='*_i']"
+" and str[@name='dest'][.='title']]",
"/response/arr[@name='copyfields']/lst[ str[@name='source'][.='*_i']"
+" and str[@name='dest'][.='dest_sub_no_ast_s']]");
}
}

View File

@ -0,0 +1,62 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.junit.Test;
import java.io.IOException;
public class TestDynamicFieldCollectionResource extends SchemaRestletTestBase {
@Test
public void testGetAllDynamicFields() throws Exception {
assertQ("/schema/dynamicfields?indent=on&wt=xml",
"(/response/arr[@name='dynamicfields']/lst/str[@name='name'])[1] = '*_coordinate'",
"(/response/arr[@name='dynamicfields']/lst/str[@name='name'])[2] = 'ignored_*'",
"(/response/arr[@name='dynamicfields']/lst/str[@name='name'])[3] = '*_mfacet'",
"count(//copySources/str)=count(//copyDests/str)");
}
@Test
public void testGetTwoDynamicFields() throws IOException {
assertQ("/schema/dynamicfields?indent=on&wt=xml&fl=*_i,*_s",
"count(/response/arr[@name='dynamicfields']/lst/str[@name='name']) = 2",
"(/response/arr[@name='dynamicfields']/lst/str[@name='name'])[1] = '*_i'",
"(/response/arr[@name='dynamicfields']/lst/str[@name='name'])[2] = '*_s'");
}
@Test
public void testNotFoundDynamicFields() throws IOException {
assertQ("/schema/dynamicfields?indent=on&wt=xml&fl=*_not_in_there,this_one_isnt_either_*",
"count(/response/arr[@name='dynamicfields']) = 1",
"count(/response/arr[@name='dynamicfields']/lst/str[@name='name']) = 0");
}
@Test
public void testJsonGetAllDynamicFields() throws Exception {
assertJQ("/schema/dynamicfields?indent=on",
"/dynamicfields/[0]/name=='*_coordinate'",
"/dynamicfields/[1]/name=='ignored_*'",
"/dynamicfields/[2]/name=='*_mfacet'");
}
@Test
public void testJsonGetTwoDynamicFields() throws Exception {
assertJQ("/schema/dynamicfields?indent=on&fl=*_i,*_s&wt=xml", // assertJQ will fix the wt param to be json
"/dynamicfields/[0]/name=='*_i'",
"/dynamicfields/[1]/name=='*_s'");
}
}

View File

@ -0,0 +1,70 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.junit.Test;
public class TestDynamicFieldResource extends SchemaRestletTestBase {
@Test
public void testGetDynamicField() throws Exception {
assertQ("/schema/dynamicfields/*_i?indent=on&wt=xml&showDefaults=on",
"count(/response/lst[@name='dynamicfield']) = 1",
"/response/lst[@name='dynamicfield']/str[@name='name'] = '*_i'",
"/response/lst[@name='dynamicfield']/str[@name='type'] = 'int'",
"/response/lst[@name='dynamicfield']/bool[@name='indexed'] = 'true'",
"/response/lst[@name='dynamicfield']/bool[@name='stored'] = 'true'",
"/response/lst[@name='dynamicfield']/bool[@name='docValues'] = 'false'",
"/response/lst[@name='dynamicfield']/bool[@name='termVectors'] = 'false'",
"/response/lst[@name='dynamicfield']/bool[@name='termPositions'] = 'false'",
"/response/lst[@name='dynamicfield']/bool[@name='termOffsets'] = 'false'",
"/response/lst[@name='dynamicfield']/bool[@name='omitNorms'] = 'true'",
"/response/lst[@name='dynamicfield']/bool[@name='omitTermFreqAndPositions'] = 'true'",
"/response/lst[@name='dynamicfield']/bool[@name='omitPositions'] = 'false'",
"/response/lst[@name='dynamicfield']/bool[@name='storeOffsetsWithPositions'] = 'false'",
"/response/lst[@name='dynamicfield']/bool[@name='multiValued'] = 'false'",
"/response/lst[@name='dynamicfield']/bool[@name='required'] = 'false'",
"/response/lst[@name='dynamicfield']/bool[@name='tokenized'] = 'false'");
}
@Test
public void testGetNotFoundDynamicField() throws Exception {
assertQ("/schema/dynamicfields/*not_in_there?indent=on&wt=xml",
"count(/response/lst[@name='dynamicfield']) = 0",
"/response/lst[@name='responseHeader']/int[@name='status'] = '404'",
"/response/lst[@name='error']/int[@name='code'] = '404'");
}
@Test
public void testJsonGetDynamicField() throws Exception {
assertJQ("/schema/dynamicfields/*_i?indent=on&showDefaults=on",
"/dynamicfield/name=='*_i'",
"/dynamicfield/type=='int'",
"/dynamicfield/indexed==true",
"/dynamicfield/stored==true",
"/dynamicfield/docValues==false",
"/dynamicfield/termVectors==false",
"/dynamicfield/termPositions==false",
"/dynamicfield/termOffsets==false",
"/dynamicfield/omitNorms==true",
"/dynamicfield/omitTermFreqAndPositions==true",
"/dynamicfield/omitPositions==false",
"/dynamicfield/storeOffsetsWithPositions==false",
"/dynamicfield/multiValued==false",
"/dynamicfield/required==false",
"/dynamicfield/tokenized==false");
}
}

View File

@ -0,0 +1,62 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.junit.Test;
import java.io.IOException;
public class TestFieldCollectionResource extends SchemaRestletTestBase {
@Test
public void testGetAllFields() throws Exception {
assertQ("/schema/fields?indent=on&wt=xml",
"(/response/arr[@name='fields']/lst/str[@name='name'])[1] = 'custstopfilt'",
"(/response/arr[@name='fields']/lst/str[@name='name'])[2] = 'lowerfilt'",
"(/response/arr[@name='fields']/lst/str[@name='name'])[3] = 'test_basictv'",
"count(//copySources/str) = count(//copyDests/str)");
}
@Test
public void testGetTwoFields() throws IOException {
assertQ("/schema/fields?indent=on&wt=xml&fl=id,_version_",
"count(/response/arr[@name='fields']/lst/str[@name='name']) = 2",
"(/response/arr[@name='fields']/lst/str[@name='name'])[1] = 'id'",
"(/response/arr[@name='fields']/lst/str[@name='name'])[2] = '_version_'");
}
@Test
public void testNotFoundFields() throws IOException {
assertQ("/schema/fields?indent=on&wt=xml&fl=not_in_there,this_one_either",
"count(/response/arr[@name='fields']) = 1",
"count(/response/arr[@name='fields']/lst/str[@name='name']) = 0");
}
@Test
public void testJsonGetAllFields() throws Exception {
assertJQ("/schema/fields?indent=on",
"/fields/[0]/name=='custstopfilt'",
"/fields/[1]/name=='lowerfilt'",
"/fields/[2]/name=='test_basictv'");
}
@Test
public void testJsonGetTwoFields() throws Exception {
assertJQ("/schema/fields?indent=on&fl=id,_version_&wt=xml", // assertJQ should fix the wt param to be json
"/fields/[0]/name=='id'",
"/fields/[1]/name=='_version_'");
}
}

View File

@ -0,0 +1,94 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.junit.Test;
public class TestFieldResource extends SchemaRestletTestBase {
@Test
public void testGetField() throws Exception {
assertQ("/schema/fields/test_postv?indent=on&wt=xml&showDefaults=true",
"count(/response/lst[@name='field']) = 1",
"count(/response/lst[@name='field']/*) = 15",
"/response/lst[@name='field']/str[@name='name'] = 'test_postv'",
"/response/lst[@name='field']/str[@name='type'] = 'text'",
"/response/lst[@name='field']/bool[@name='indexed'] = 'true'",
"/response/lst[@name='field']/bool[@name='stored'] = 'true'",
"/response/lst[@name='field']/bool[@name='docValues'] = 'false'",
"/response/lst[@name='field']/bool[@name='termVectors'] = 'true'",
"/response/lst[@name='field']/bool[@name='termPositions'] = 'true'",
"/response/lst[@name='field']/bool[@name='termOffsets'] = 'false'",
"/response/lst[@name='field']/bool[@name='omitNorms'] = 'false'",
"/response/lst[@name='field']/bool[@name='omitTermFreqAndPositions'] = 'false'",
"/response/lst[@name='field']/bool[@name='omitPositions'] = 'false'",
"/response/lst[@name='field']/bool[@name='storeOffsetsWithPositions'] = 'false'",
"/response/lst[@name='field']/bool[@name='multiValued'] = 'false'",
"/response/lst[@name='field']/bool[@name='required'] = 'false'",
"/response/lst[@name='field']/bool[@name='tokenized'] = 'true'");
}
@Test
public void testGetNotFoundField() throws Exception {
assertQ("/schema/fields/not_in_there?indent=on&wt=xml",
"count(/response/lst[@name='field']) = 0",
"/response/lst[@name='responseHeader']/int[@name='status'] = '404'",
"/response/lst[@name='error']/int[@name='code'] = '404'");
}
@Test
public void testJsonGetField() throws Exception {
assertJQ("/schema/fields/test_postv?indent=on&showDefaults=true",
"/field/name=='test_postv'",
"/field/type=='text'",
"/field/indexed==true",
"/field/stored==true",
"/field/docValues==false",
"/field/termVectors==true",
"/field/termPositions==true",
"/field/termOffsets==false",
"/field/omitNorms==false",
"/field/omitTermFreqAndPositions==false",
"/field/omitPositions==false",
"/field/storeOffsetsWithPositions==false",
"/field/multiValued==false",
"/field/required==false",
"/field/tokenized==true");
}
@Test
public void testGetFieldIncludeDynamic() throws Exception {
assertQ("/schema/fields/some_crazy_name_i?indent=on&wt=xml&includeDynamic=true",
"/response/lst[@name='field']/str[@name='name'] = 'some_crazy_name_i'",
"/response/lst[@name='field']/str[@name='dynamicBase'] = '*_i'");
}
@Test
public void testGetFieldDontShowDefaults() throws Exception {
String[] tests = {
"count(/response/lst[@name='field']) = 1",
"count(/response/lst[@name='field']/*) = 7",
"/response/lst[@name='field']/str[@name='name'] = 'id'",
"/response/lst[@name='field']/str[@name='type'] = 'string'",
"/response/lst[@name='field']/bool[@name='indexed'] = 'true'",
"/response/lst[@name='field']/bool[@name='stored'] = 'true'",
"/response/lst[@name='field']/bool[@name='multiValued'] = 'false'",
"/response/lst[@name='field']/bool[@name='required'] = 'true'"
};
assertQ("/schema/fields/id?indent=on&wt=xml", tests);
assertQ("/schema/fields/id?indent=on&wt=xml&showDefaults=false", tests);
}
}

View File

@ -0,0 +1,37 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.junit.Test;
public class TestFieldTypeCollectionResource extends SchemaRestletTestBase {
@Test
public void testGetAllFieldTypes() throws Exception {
assertQ("/schema/fieldtypes?indent=on&wt=xml",
"(/response/arr[@name='fieldTypes']/lst/str[@name='name'])[1] = 'HTMLstandardtok'",
"(/response/arr[@name='fieldTypes']/lst/str[@name='name'])[2] = 'HTMLwhitetok'",
"(/response/arr[@name='fieldTypes']/lst/str[@name='name'])[3] = 'boolean'");
}
@Test
public void testJsonGetAllFieldTypes() throws Exception {
assertJQ("/schema/fieldtypes?indent=on",
"/fieldTypes/[0]/name=='HTMLstandardtok'",
"/fieldTypes/[1]/name=='HTMLwhitetok'",
"/fieldTypes/[2]/name=='boolean'");
}
}

View File

@ -0,0 +1,89 @@
package org.apache.solr.rest;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limit ations under the License.
*/
import org.junit.Test;
public class TestFieldTypeResource extends SchemaRestletTestBase {
@Test
public void testGetFieldType() throws Exception {
assertQ("/schema/fieldtypes/float?indent=on&wt=xml&showDefaults=true",
"count(/response/lst[@name='fieldType']) = 1",
"count(/response/lst[@name='fieldType']/*) = 18",
"/response/lst[@name='fieldType']/str[@name='name'] = 'float'",
"/response/lst[@name='fieldType']/str[@name='class'] = 'solr.TrieFloatField'",
"/response/lst[@name='fieldType']/str[@name='precisionStep'] ='0'",
"/response/lst[@name='fieldType']/bool[@name='indexed'] = 'true'",
"/response/lst[@name='fieldType']/bool[@name='stored'] = 'true'",
"/response/lst[@name='fieldType']/bool[@name='docValues'] = 'false'",
"/response/lst[@name='fieldType']/bool[@name='termVectors'] = 'false'",
"/response/lst[@name='fieldType']/bool[@name='termPositions'] = 'false'",
"/response/lst[@name='fieldType']/bool[@name='termOffsets'] = 'false'",
"/response/lst[@name='fieldType']/bool[@name='omitNorms'] = 'true'",
"/response/lst[@name='fieldType']/bool[@name='omitTermFreqAndPositions'] = 'true'",
"/response/lst[@name='fieldType']/bool[@name='omitPositions'] = 'false'",
"/response/lst[@name='fieldType']/bool[@name='storeOffsetsWithPositions'] = 'false'",
"/response/lst[@name='fieldType']/bool[@name='multiValued'] = 'false'",
"/response/lst[@name='fieldType']/bool[@name='tokenized'] = 'true'",
"/response/lst[@name='fieldType']/arr[@name='fields']/str = 'weight'",
"/response/lst[@name='fieldType']/arr[@name='dynamicFields']/str = '*_f'");
}
@Test
public void testGetNotFoundFieldType() throws Exception {
assertQ("/schema/fieldtypes/not_in_there?indent=on&wt=xml",
"count(/response/lst[@name='fieldtypes']) = 0",
"/response/lst[@name='responseHeader']/int[@name='status'] = '404'",
"/response/lst[@name='error']/int[@name='code'] = '404'");
}
@Test
public void testJsonGetFieldType() throws Exception {
assertJQ("/schema/fieldtypes/float?indent=on&showDefaults=on", // assertJQ will add "&wt=json"
"/fieldType/name=='float'",
"/fieldType/class=='solr.TrieFloatField'",
"/fieldType/precisionStep=='0'",
"/fieldType/indexed==true",
"/fieldType/stored==true",
"/fieldType/docValues==false",
"/fieldType/termVectors==false",
"/fieldType/termPositions==false",
"/fieldType/termOffsets==false",
"/fieldType/omitNorms==true",
"/fieldType/omitTermFreqAndPositions==true",
"/fieldType/omitPositions==false",
"/fieldType/storeOffsetsWithPositions==false",
"/fieldType/multiValued==false",
"/fieldType/tokenized==true",
"/fieldType/fields==['weight']",
"/fieldType/dynamicFields==['*_f']");
}
@Test
public void testGetFieldTypeDontShowDefaults() throws Exception {
assertQ("/schema/fieldtypes/teststop?wt=xml&indent=on",
"count(/response/lst[@name='fieldType']/*) = 5",
"/response/lst[@name='fieldType']/str[@name='name'] = 'teststop'",
"/response/lst[@name='fieldType']/str[@name='class'] = 'solr.TextField'",
"/response/lst[@name='fieldType']/lst[@name='analyzer']/lst[@name='tokenizer']/str[@name='class'] = 'solr.LowerCaseTokenizerFactory'",
"/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst/str[@name='class'][.='solr.StandardFilterFactory']",
"/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst/str[@name='class'][.='solr.StopFilterFactory']",
"/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst/str[@name='words'][.='stopwords.txt']",
"/response/lst[@name='fieldType']/arr[@name='fields']/str[.='teststop']",
"/response/lst[@name='fieldType']/arr[@name='dynamicFields']");
}
}

View File

@ -0,0 +1 @@
e12c23b962c925f2681729afa1e40066a350ad27

View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,2 @@
This product includes software developed by
the Restlet project (http://www.restlet.org).

View File

@ -0,0 +1 @@
72baf27dc19d98f43c362ded582db408433373ee

View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,2 @@
This product includes software developed by
the SimpleXML project (http://simple.sourceforge.net).

View File

@ -24,11 +24,18 @@ import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.util.ExternalPaths;
import java.io.File;
import java.util.Collections;
import java.util.Map;
import java.util.HashMap;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.solr.util.RESTfulServerProvider;
import org.apache.solr.util.RestTestHarness;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.restlet.ext.servlet.ServerServlet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -92,7 +99,9 @@ abstract public class SolrJettyTestBase extends SolrTestCaseJ4
public static SolrServer server = null;
public static String context;
public static JettySolrRunner createJetty(String solrHome, String configFile, String context) throws Exception {
public static JettySolrRunner createJetty(String solrHome, String configFile, String schemaFile, String context,
boolean stopAtShutdown, SortedMap<ServletHolder,String> extraServlets)
throws Exception {
// creates the data dir
initCore(null, null, solrHome);
@ -103,7 +112,7 @@ abstract public class SolrJettyTestBase extends SolrTestCaseJ4
context = context==null ? "/solr" : context;
SolrJettyTestBase.context = context;
jetty = new JettySolrRunner(solrHome, context, 0, configFile, null);
jetty = new JettySolrRunner(solrHome, context, 0, configFile, schemaFile, stopAtShutdown, extraServlets);
jetty.start();
port = jetty.getLocalPort();
@ -111,6 +120,10 @@ abstract public class SolrJettyTestBase extends SolrTestCaseJ4
return jetty;
}
public static JettySolrRunner createJetty(String solrHome, String configFile, String context) throws Exception {
return createJetty(solrHome, configFile, null, context, true, null);
}
@AfterClass
public static void afterSolrJettyTestBase() throws Exception {

View File

@ -0,0 +1,269 @@
package org.apache.solr.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.XML;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
abstract public class BaseTestHarness {
private final ThreadLocal<DocumentBuilder> builderTL = new ThreadLocal<DocumentBuilder>();
private final ThreadLocal<XPath> xpathTL = new ThreadLocal<XPath>();
public DocumentBuilder getXmlDocumentBuilder() {
try {
DocumentBuilder builder = builderTL.get();
if (builder == null) {
builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
builderTL.set(builder);
}
return builder;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public XPath getXpath() {
try {
XPath xpath = xpathTL.get();
if (xpath == null) {
xpath = XPathFactory.newInstance().newXPath();
xpathTL.set(xpath);
}
return xpath;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* A helper method which validates a String against an array of XPath test
* strings.
*
* @param xml The xml String to validate
* @param tests Array of XPath strings to test (in boolean mode) on the xml
* @return null if all good, otherwise the first test that fails.
*/
public String validateXPath(String xml, String... tests)
throws XPathExpressionException, SAXException {
if (tests==null || tests.length == 0) return null;
Document document = null;
try {
document = getXmlDocumentBuilder().parse(new ByteArrayInputStream
(xml.getBytes("UTF-8")));
} catch (UnsupportedEncodingException e1) {
throw new RuntimeException("Totally weird UTF-8 exception", e1);
} catch (IOException e2) {
throw new RuntimeException("Totally weird io exception", e2);
}
for (String xp : tests) {
xp=xp.trim();
Boolean bool = (Boolean) getXpath().evaluate(xp, document, XPathConstants.BOOLEAN);
if (!bool) {
return xp;
}
}
return null;
}
/**
* A helper that creates an xml &lt;doc&gt; containing all of the
* fields and values specified
*
* @param fieldsAndValues 0 and Even numbered args are fields names odds are field values.
*/
public static StringBuffer makeSimpleDoc(String... fieldsAndValues) {
try {
StringWriter w = new StringWriter();
w.append("<doc>");
for (int i = 0; i < fieldsAndValues.length; i+=2) {
XML.writeXML(w, "field", fieldsAndValues[i + 1], "name",
fieldsAndValues[i]);
}
w.append("</doc>");
return w.getBuffer();
} catch (IOException e) {
throw new RuntimeException
("this should never happen with a StringWriter", e);
}
}
/**
* Generates a delete by query xml string
* @param q Query that has not already been xml escaped
* @param args The attributes of the delete tag
*/
public static String deleteByQuery(String q, String... args) {
try {
StringWriter r = new StringWriter();
XML.writeXML(r, "query", q);
return delete(r.getBuffer().toString(), args);
} catch(IOException e) {
throw new RuntimeException
("this should never happen with a StringWriter", e);
}
}
/**
* Generates a delete by id xml string
* @param id ID that has not already been xml escaped
* @param args The attributes of the delete tag
*/
public static String deleteById(String id, String... args) {
try {
StringWriter r = new StringWriter();
XML.writeXML(r, "id", id);
return delete(r.getBuffer().toString(), args);
} catch(IOException e) {
throw new RuntimeException
("this should never happen with a StringWriter", e);
}
}
/**
* Generates a delete xml string
* @param val text that has not already been xml escaped
* @param args 0 and Even numbered args are params, Odd numbered args are XML escaped values.
*/
private static String delete(String val, String... args) {
try {
StringWriter r = new StringWriter();
XML.writeUnescapedXML(r, "delete", val, (Object[]) args);
return r.getBuffer().toString();
} catch(IOException e) {
throw new RuntimeException
("this should never happen with a StringWriter", e);
}
}
/**
* Helper that returns an &lt;optimize&gt; String with
* optional key/val pairs.
*
* @param args 0 and Even numbered args are params, Odd numbered args are values.
*/
public static String optimize(String... args) {
return simpleTag("optimize", args);
}
private static String simpleTag(String tag, String... args) {
try {
StringWriter r = new StringWriter();
// this is annoying
if (null == args || 0 == args.length) {
XML.writeXML(r, tag, null);
} else {
XML.writeXML(r, tag, null, (Object[])args);
}
return r.getBuffer().toString();
} catch (IOException e) {
throw new RuntimeException
("this should never happen with a StringWriter", e);
}
}
/**
* Helper that returns an &lt;commit&gt; String with
* optional key/val pairs.
*
* @param args 0 and Even numbered args are params, Odd numbered args are values.
*/
public static String commit(String... args) {
return simpleTag("commit", args);
}
/** Reloads the core */
abstract public void reload() throws Exception;
/**
* Processes an "update" (add, commit or optimize) and
* returns the response as a String.
*
* This method does NOT commit after the request.
*
* @param xml The XML of the update
* @return The XML response to the update
*/
abstract public String update(String xml);
/**
* Validates that an "update" (add, commit or optimize) results in success.
*
* :TODO: currently only deals with one add/doc at a time, this will need changed if/when SOLR-2 is resolved
*
* @param xml The XML of the update
* @return null if successful, otherwise the XML response to the update
*/
public String validateUpdate(String xml) throws SAXException {
return checkUpdateStatus(xml, "0");
}
/**
* Validates that an "update" (add, commit or optimize) results in success.
*
* :TODO: currently only deals with one add/doc at a time, this will need changed if/when SOLR-2 is resolved
*
* @param xml The XML of the update
* @return null if successful, otherwise the XML response to the update
*/
public String validateErrorUpdate(String xml) throws SAXException {
try {
return checkUpdateStatus(xml, "1");
} catch (SolrException e) {
// return ((SolrException)e).getMessage();
return null; // success
}
}
/**
* Validates that an "update" (add, commit or optimize) results in success.
*
* :TODO: currently only deals with one add/doc at a time, this will need changed if/when SOLR-2 is resolved
*
* @param xml The XML of the update
* @return null if successful, otherwise the XML response to the update
*/
public String checkUpdateStatus(String xml, String code) throws SAXException {
try {
String res = update(xml);
String valid = validateXPath(res, "//int[@name='status']="+code );
return (null == valid) ? null : res;
} catch (XPathExpressionException e) {
throw new RuntimeException
("?!? static xpath has bug?", e);
}
}
}

View File

@ -0,0 +1,21 @@
package org.apache.solr.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public interface RESTfulServerProvider {
public String getBaseURL();
}

View File

@ -0,0 +1,327 @@
package org.apache.solr.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.JSONTestUtil;
import org.apache.solr.SolrJettyTestBase;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.MultiMapSolrParams;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.servlet.SolrRequestParsers;
import org.eclipse.jetty.servlet.ServletHolder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
import javax.xml.xpath.XPathExpressionException;
import java.io.IOException;
import java.util.Map;
import java.util.SortedMap;
abstract public class RestTestBase extends SolrJettyTestBase {
private static final Logger log = LoggerFactory.getLogger(RestTestBase.class);
protected static RestTestHarness restTestHarness;
public static void createJettyAndHarness
(String solrHome, String configFile, String schemaFile, String context,
boolean stopAtShutdown, SortedMap<ServletHolder,String> extraServlets) throws Exception {
createJetty(solrHome, configFile, schemaFile, context, stopAtShutdown, extraServlets);
restTestHarness = new RestTestHarness(new RESTfulServerProvider() {
@Override
public String getBaseURL() {
return jetty.getBaseUrl().toString();
}
});
}
/** Validates an update XML String is successful
*/
public static void assertU(String update) {
assertU(null, update);
}
/** Validates an update XML String is successful
*/
public static void assertU(String message, String update) {
checkUpdateU(message, update, true);
}
/** Validates an update XML String failed
*/
public static void assertFailedU(String update) {
assertFailedU(null, update);
}
/** Validates an update XML String failed
*/
public static void assertFailedU(String message, String update) {
checkUpdateU(message, update, false);
}
/** Checks the success or failure of an update message
*/
private static void checkUpdateU(String message, String update, boolean shouldSucceed) {
try {
String m = (null == message) ? "" : message + " ";
if (shouldSucceed) {
String response = restTestHarness.validateUpdate(update);
if (response != null) fail(m + "update was not successful: " + response);
} else {
String response = restTestHarness.validateErrorUpdate(update);
if (response != null) fail(m + "update succeeded, but should have failed: " + response);
}
} catch (SAXException e) {
throw new RuntimeException("Invalid XML", e);
}
}
/**
* Validates a query matches some XPath test expressions
*
* @param request a URL path with optional query params, e.g. "/schema/fields?fl=id,_version_"
*/
public static void assertQ(String request, String... tests) {
try {
int queryStartPos = request.indexOf('?');
String query;
String path;
if (-1 == queryStartPos) {
query = "";
path = request;
} else {
query = request.substring(queryStartPos + 1);
path = request.substring(0, queryStartPos);
}
query = setParam(query, "wt", "xml");
request = path + '?' + setParam(query, "indent", "on");
String response = restTestHarness.query(request);
// TODO: should the facet handling below be converted to parse the URL?
/*
if (req.getParams().getBool("facet", false)) {
// add a test to ensure that faceting did not throw an exception
// internally, where it would be added to facet_counts/exception
String[] allTests = new String[tests.length+1];
System.arraycopy(tests,0,allTests,1,tests.length);
allTests[0] = "*[count(//lst[@name='facet_counts']/*[@name='exception'])=0]";
tests = allTests;
}
*/
String results = restTestHarness.validateXPath(response, tests);
if (null != results) {
String msg = "REQUEST FAILED: xpath=" + results
+ "\n\txml response was: " + response
+ "\n\trequest was:" + request;
log.error(msg);
throw new RuntimeException(msg);
}
} catch (XPathExpressionException e1) {
throw new RuntimeException("XPath is invalid", e1);
} catch (Exception e2) {
SolrException.log(log, "REQUEST FAILED: " + request, e2);
throw new RuntimeException("Exception during query", e2);
}
}
/**
* Makes a query request and returns the JSON string response
*
* @param request a URL path with optional query params, e.g. "/schema/fields?fl=id,_version_"
*/
public static String JQ(String request) throws Exception {
int queryStartPos = request.indexOf('?');
String query;
String path;
if (-1 == queryStartPos) {
query = "";
path = request;
} else {
query = request.substring(queryStartPos + 1);
path = request.substring(0, queryStartPos);
}
query = setParam(query, "wt", "json");
request = path + '?' + setParam(query, "indent", "on");
String response;
boolean failed=true;
try {
response = restTestHarness.query(request);
failed = false;
} finally {
if (failed) {
log.error("REQUEST FAILED: " + request);
}
}
return response;
}
/**
* Validates a query matches some JSON test expressions using the default double delta tolerance.
* @see org.apache.solr.JSONTestUtil#DEFAULT_DELTA
* @see #assertJQ(String,double,String...)
*/
public static void assertJQ(String request, String... tests) throws Exception {
assertJQ(request, JSONTestUtil.DEFAULT_DELTA, tests);
}
/**
* Validates a query matches some JSON test expressions and closes the
* query. The text expression is of the form path:JSON. To facilitate
* easy embedding in Java strings, the JSON can have double quotes
* replaced with single quotes.
* <p>
* Please use this with care: this makes it easy to match complete
* structures, but doing so can result in fragile tests if you are
* matching more than what you want to test.
* </p>
* @param request a URL path with optional query params, e.g. "/schema/fields?fl=id,_version_"
* @param delta tolerance allowed in comparing float/double values
* @param tests JSON path expression + '==' + expected value
*/
public static void assertJQ(String request, double delta, String... tests) throws Exception {
int queryStartPos = request.indexOf('?');
String query;
String path;
if (-1 == queryStartPos) {
query = "";
path = request;
} else {
query = request.substring(queryStartPos + 1);
path = request.substring(0, queryStartPos);
}
query = setParam(query, "wt", "json");
request = path + '?' + setParam(query, "indent", "on");
String response;
boolean failed = true;
try {
response = restTestHarness.query(request);
failed = false;
} finally {
if (failed) {
log.error("REQUEST FAILED: " + request);
}
}
for (String test : tests) {
if (null == test || 0 == test.length()) continue;
String testJSON = test.replace('\'', '"');
try {
failed = true;
String err = JSONTestUtil.match(response, testJSON, delta);
failed = false;
if (err != null) {
log.error("query failed JSON validation. error=" + err +
"\n expected =" + testJSON +
"\n response = " + response +
"\n request = " + request + "\n"
);
throw new RuntimeException(err);
}
} finally {
if (failed) {
log.error("JSON query validation threw an exception." +
"\n expected =" + testJSON +
"\n response = " + response +
"\n request = " + request + "\n"
);
}
}
}
}
/**
* Insures that the given param is included in the query with the given value.
*
* <ol>
* <li>If the param is already included with the given value, the request is returned unchanged.</li>
* <li>If the param is not already included, it is added with the given value.</li>
* <li>If the param is already included, but with a different value, the value is replaced with the given value.</li>
* <li>If the param is already included multiple times, they are replaced with a single param with given value.</li>
* </ol>
*
* The passed-in valueToSet should NOT be URL encoded, as it will be URL encoded by this method.
*
* @param query The query portion of a request URL, e.g. "wt=json&indent=on&fl=id,_version_"
* @param paramToSet The parameter name to insure the presence of in the returned request
* @param valueToSet The parameter value to insure in the returned request
* @return The query with the given param set to the given value
*/
private static String setParam(String query, String paramToSet, String valueToSet) {
if (null == valueToSet) {
valueToSet = "";
}
try {
StringBuilder builder = new StringBuilder();
if (null == query || query.trim().isEmpty()) {
// empty query -> return "paramToSet=valueToSet"
builder.append(paramToSet);
builder.append('=');
StrUtils.partialURLEncodeVal(builder, valueToSet);
return builder.toString();
}
MultiMapSolrParams requestParams = SolrRequestParsers.parseQueryString(query);
String[] values = requestParams.getParams(paramToSet);
if (null == values) {
// paramToSet isn't present in the request -> append "&paramToSet=valueToSet"
builder.append(query);
builder.append('&');
builder.append(paramToSet);
builder.append('=');
StrUtils.partialURLEncodeVal(builder, valueToSet);
return builder.toString();
}
if (1 == values.length && valueToSet.equals(values[0])) {
// paramToSet=valueToSet is already in the query - just return the query as-is.
return query;
}
// More than one value for paramToSet on the request, or paramToSet's value is not valueToSet
// -> rebuild the query
boolean isFirst = true;
for (Map.Entry<String,String[]> entry : requestParams.getMap().entrySet()) {
String key = entry.getKey();
String[] valarr = entry.getValue();
if ( ! key.equals(paramToSet)) {
for (String val : valarr) {
builder.append(isFirst ? "" : '&');
isFirst = false;
builder.append(key);
builder.append('=');
StrUtils.partialURLEncodeVal(builder, null == val ? "" : val);
}
}
}
builder.append(isFirst ? "" : '&');
builder.append(paramToSet);
builder.append('=');
StrUtils.partialURLEncodeVal(builder, valueToSet);
return builder.toString();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}

View File

@ -0,0 +1,120 @@
package org.apache.solr.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.commons.io.IOUtils;
import org.eclipse.jetty.util.IO;
import javax.xml.xpath.XPathExpressionException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringWriter;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
/**
* Facilitates testing Solr's REST API via a provided embedded Jetty
*/
public class RestTestHarness extends BaseTestHarness {
private RESTfulServerProvider serverProvider;
public RestTestHarness(RESTfulServerProvider serverProvider) {
this.serverProvider = serverProvider;
}
public String getBaseURL() {
return serverProvider.getBaseURL();
}
/**
* Validates a "query" response against an array of XPath test strings
*
* @param request the Query to process
* @return null if all good, otherwise the first test that fails.
* @exception Exception any exception in the response.
* @exception java.io.IOException if there is a problem writing the XML
*/
public String validateQuery(String request, String... tests)
throws Exception {
String res = query(request);
return validateXPath(res, tests);
}
/**
* Processes a "query" using a URL path (with no context path) + optional query params,
* e.g. "/schema/fields?indent=on"
*
* @param request the URL path and optional query params
* @return The response to the query
* @exception Exception any exception in the response.
*/
public String query(String request) throws Exception {
URL url = new URL(getBaseURL() + request);
HttpURLConnection connection = (HttpURLConnection)url.openConnection();
InputStream inputStream = null;
StringWriter strWriter;
try {
try {
inputStream = connection.getInputStream();
} catch (IOException e) {
inputStream = connection.getErrorStream();
}
strWriter = new StringWriter();
IOUtils.copy(new InputStreamReader(inputStream, "UTF-8"), strWriter);
} finally {
IOUtils.closeQuietly(inputStream);
}
return strWriter.toString();
}
public String checkQueryStatus(String xml, String code) throws Exception {
try {
String response = query(xml);
String valid = validateXPath(response, "//int[@name='status']="+code );
return (null == valid) ? null : response;
} catch (XPathExpressionException e) {
throw new RuntimeException("?!? static xpath has bug?", e);
}
}
@Override
public void reload() throws Exception {
String xml = checkQueryStatus("/admin/cores?action=RELOAD", "0");
if (null != xml) {
throw new RuntimeException("RELOAD failed:\n" + xml);
}
}
/**
* Processes an "update" (add, commit or optimize) and
* returns the response as a String.
*
* @param xml The XML of the update
* @return The XML response to the update
*/
@Override
public String update(String xml) {
try {
return query("/update?stream.base=" + URLEncoder.encode(xml, "UTF-8"));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}

View File

@ -20,7 +20,6 @@ package org.apache.solr.util;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.XML;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.CoreContainer;
@ -38,22 +37,11 @@ import org.apache.solr.response.QueryResponseWriter;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.servlet.DirectSolrConnection;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
import org.apache.solr.common.util.NamedList.NamedListEntry;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.Map;
@ -70,11 +58,9 @@ import java.util.Map;
*
*
*/
public class TestHarness {
public class TestHarness extends BaseTestHarness {
String coreName;
protected volatile CoreContainer container;
private final ThreadLocal<DocumentBuilder> builderTL = new ThreadLocal<DocumentBuilder>();
private final ThreadLocal<XPath> xpathTL = new ThreadLocal<XPath>();
public UpdateRequestHandler updater;
/**
@ -138,32 +124,6 @@ public class TestHarness {
}
}
private DocumentBuilder getXmlDocumentBuilder() {
try {
DocumentBuilder builder = builderTL.get();
if (builder == null) {
builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
builderTL.set(builder);
}
return builder;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private XPath getXpath() {
try {
XPath xpath = xpathTL.get();
if (xpath == null) {
xpath = XPathFactory.newInstance().newXPath();
xpathTL.set(xpath);
}
return xpath;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
// Creates a container based on infos needed to create one core
static class Initializer extends CoreContainer.Initializer {
String coreName;
@ -270,55 +230,6 @@ public class TestHarness {
}
/**
* Validates that an "update" (add, commit or optimize) results in success.
*
* :TODO: currently only deals with one add/doc at a time, this will need changed if/when SOLR-2 is resolved
*
* @param xml The XML of the update
* @return null if successful, otherwise the XML response to the update
*/
public String validateUpdate(String xml) throws SAXException {
return checkUpdateStatus(xml, "0");
}
/**
* Validates that an "update" (add, commit or optimize) results in success.
*
* :TODO: currently only deals with one add/doc at a time, this will need changed if/when SOLR-2 is resolved
*
* @param xml The XML of the update
* @return null if successful, otherwise the XML response to the update
*/
public String validateErrorUpdate(String xml) throws SAXException {
try {
return checkUpdateStatus(xml, "1");
} catch (SolrException e) {
// return ((SolrException)e).getMessage();
return null; // success
}
}
/**
* Validates that an "update" (add, commit or optimize) results in success.
*
* :TODO: currently only deals with one add/doc at a time, this will need changed if/when SOLR-2 is resolved
*
* @param xml The XML of the update
* @return null if successful, otherwise the XML response to the update
*/
public String checkUpdateStatus(String xml, String code) throws SAXException {
try {
String res = update(xml);
String valid = validateXPath(res, "//int[@name='status']="+code );
return (null == valid) ? null : res;
} catch (XPathExpressionException e) {
throw new RuntimeException
("?!? static xpath has bug?", e);
}
}
/**
* Validates a "query" response against an array of XPath test strings
*
@ -397,43 +308,6 @@ public class TestHarness {
}
}
/**
* A helper method which valides a String against an array of XPath test
* strings.
*
* @param xml The xml String to validate
* @param tests Array of XPath strings to test (in boolean mode) on the xml
* @return null if all good, otherwise the first test that fails.
*/
public String validateXPath(String xml, String... tests)
throws XPathExpressionException, SAXException {
if (tests==null || tests.length == 0) return null;
Document document=null;
try {
document = getXmlDocumentBuilder().parse(new ByteArrayInputStream
(xml.getBytes("UTF-8")));
} catch (UnsupportedEncodingException e1) {
throw new RuntimeException("Totally weird UTF-8 exception", e1);
} catch (IOException e2) {
throw new RuntimeException("Totally weird io exception", e2);
}
for (String xp : tests) {
xp=xp.trim();
Boolean bool = (Boolean) getXpath().evaluate(xp, document,
XPathConstants.BOOLEAN);
if (!bool) {
return xp;
}
}
return null;
}
/**
* Shuts down and frees any resources
*/
@ -451,114 +325,6 @@ public class TestHarness {
}
}
/**
* A helper that creates an xml &lt;doc&gt; containing all of the
* fields and values specified
*
* @param fieldsAndValues 0 and Even numbered args are fields names odds are field values.
*/
public static StringBuffer makeSimpleDoc(String... fieldsAndValues) {
try {
StringWriter w = new StringWriter();
w.append("<doc>");
for (int i = 0; i < fieldsAndValues.length; i+=2) {
XML.writeXML(w, "field", fieldsAndValues[i+1], "name",
fieldsAndValues[i]);
}
w.append("</doc>");
return w.getBuffer();
} catch (IOException e) {
throw new RuntimeException
("this should never happen with a StringWriter", e);
}
}
/**
* Generates a delete by query xml string
* @param q Query that has not already been xml escaped
* @param args The attributes of the delete tag
*/
public static String deleteByQuery(String q, String... args) {
try {
StringWriter r = new StringWriter();
XML.writeXML(r, "query", q);
return delete(r.getBuffer().toString(), args);
} catch(IOException e) {
throw new RuntimeException
("this should never happen with a StringWriter", e);
}
}
/**
* Generates a delete by id xml string
* @param id ID that has not already been xml escaped
* @param args The attributes of the delete tag
*/
public static String deleteById(String id, String... args) {
try {
StringWriter r = new StringWriter();
XML.writeXML(r, "id", id);
return delete(r.getBuffer().toString(), args);
} catch(IOException e) {
throw new RuntimeException
("this should never happen with a StringWriter", e);
}
}
/**
* Generates a delete xml string
* @param val text that has not already been xml escaped
* @param args 0 and Even numbered args are params, Odd numbered args are XML escaped values.
*/
private static String delete(String val, String... args) {
try {
StringWriter r = new StringWriter();
XML.writeUnescapedXML(r, "delete", val, (Object[])args);
return r.getBuffer().toString();
} catch(IOException e) {
throw new RuntimeException
("this should never happen with a StringWriter", e);
}
}
/**
* Helper that returns an &lt;optimize&gt; String with
* optional key/val pairs.
*
* @param args 0 and Even numbered args are params, Odd numbered args are values.
*/
public static String optimize(String... args) {
return simpleTag("optimize", args);
}
private static String simpleTag(String tag, String... args) {
try {
StringWriter r = new StringWriter();
// this is annoying
if (null == args || 0 == args.length) {
XML.writeXML(r, tag, null);
} else {
XML.writeXML(r, tag, null, (Object[])args);
}
return r.getBuffer().toString();
} catch (IOException e) {
throw new RuntimeException
("this should never happen with a StringWriter", e);
}
}
/**
* Helper that returns an &lt;commit&gt; String with
* optional key/val pairs.
*
* @param args 0 and Even numbered args are params, Odd numbered args are values.
*/
public static String commit(String... args) {
return simpleTag("commit", args);
}
public LocalRequestFactory getRequestFactory(String qtype,
int start,
int limit) {

View File

@ -126,6 +126,15 @@
</init-param>
</servlet>
<servlet>
<servlet-name>SchemaRestApi</servlet-name>
<servlet-class>org.restlet.ext.servlet.ServerServlet</servlet-class>
<init-param>
<param-name>org.restlet.application</param-name>
<param-value>org.apache.solr.rest.SchemaRestApi</param-value>
</init-param>
</servlet>
<servlet-mapping>
<servlet-name>RedirectOldAdminUI</servlet-name>
<url-pattern>/admin/</url-pattern>
@ -154,6 +163,11 @@
<url-pattern>/admin.html</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>SchemaRestApi</servlet-name>
<url-pattern>/schema/*</url-pattern>
</servlet-mapping>
<mime-mapping>
<extension>.xsl</extension>
<!-- per http://www.w3.org/TR/2006/PR-xslt20-20061121/ -->