mirror of https://github.com/apache/lucene.git
SOLR-5098: Support adding field types to managed schema
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1622724 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
566dc2e73b
commit
f15d6e2b21
|
@ -141,6 +141,9 @@ New Features
|
|||
* SOLR-5097: Schema API: Add REST support for adding dynamic fields to the schema.
|
||||
(Steve Rowe)
|
||||
|
||||
* SOLR-5098: Schema API: Add REST support for adding field types to the schema.
|
||||
(Timothy Potter)
|
||||
|
||||
Bug Fixes
|
||||
----------------------
|
||||
|
||||
|
|
|
@ -109,7 +109,9 @@ public class RestManager {
|
|||
public static class Registry {
|
||||
|
||||
private Map<String,ManagedResourceRegistration> registered = new TreeMap<>();
|
||||
|
||||
|
||||
// maybe null until there is a restManager
|
||||
private RestManager initializedRestManager = null;
|
||||
|
||||
// REST API endpoints that need to be protected against dynamic endpoint creation
|
||||
private final Set<String> reservedEndpoints = new HashSet<>();
|
||||
|
@ -226,6 +228,11 @@ public class RestManager {
|
|||
log.info("Registered ManagedResource impl {} for path {}",
|
||||
implClass.getName(), resourceId);
|
||||
}
|
||||
|
||||
// there may be a RestManager, in which case, we want to add this new ManagedResource immediately
|
||||
if (initializedRestManager != null) {
|
||||
initializedRestManager.addRegisteredResource(registered.get(resourceId));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -605,6 +612,10 @@ public class RestManager {
|
|||
// keep track of this for lookups during request processing
|
||||
managed.put(reg.resourceId, createManagedResource(reg));
|
||||
}
|
||||
|
||||
// this is for any new registrations that don't come through the API
|
||||
// such as from adding a new fieldType to a managed schema that uses a ManagedResource
|
||||
registry.initializedRestManager = this;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -617,26 +628,35 @@ public class RestManager {
|
|||
ManagedResourceRegistration existingReg = registry.registered.get(resourceId);
|
||||
if (existingReg == null) {
|
||||
registry.registerManagedResource(resourceId, clazz, null);
|
||||
res = createManagedResource(registry.registered.get(resourceId));
|
||||
managed.put(resourceId, res);
|
||||
log.info("Registered new managed resource {}", resourceId);
|
||||
|
||||
// attach this new resource to the Restlet router
|
||||
Matcher resourceIdValidator = resourceIdRegex.matcher(resourceId);
|
||||
boolean validated = resourceIdValidator.matches();
|
||||
assert validated : "managed resourceId '" + resourceId
|
||||
+ "' should already be validated by registerManagedResource()";
|
||||
String routerPath = resourceIdValidator.group(1);
|
||||
String path = resourceIdValidator.group(2);
|
||||
Router router = SCHEMA_BASE_PATH.equals(routerPath) ? schemaRouter : configRouter;
|
||||
if (router != null) {
|
||||
attachManagedResource(res, path, router);
|
||||
}
|
||||
addRegisteredResource(registry.registered.get(resourceId));
|
||||
} else {
|
||||
res = getManagedResource(resourceId);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
// used internally to create and attach a ManagedResource to the Restlet router
|
||||
// the registry also uses this method directly, which is slightly hacky but necessary
|
||||
// in order to support dynamic adding of new fieldTypes using the managed-schema API
|
||||
private synchronized ManagedResource addRegisteredResource(ManagedResourceRegistration reg) {
|
||||
String resourceId = reg.resourceId;
|
||||
ManagedResource res = createManagedResource(reg);
|
||||
managed.put(resourceId, res);
|
||||
log.info("Registered new managed resource {}", resourceId);
|
||||
|
||||
// attach this new resource to the Restlet router
|
||||
Matcher resourceIdValidator = resourceIdRegex.matcher(resourceId);
|
||||
boolean validated = resourceIdValidator.matches();
|
||||
assert validated : "managed resourceId '" + resourceId
|
||||
+ "' should already be validated by registerManagedResource()";
|
||||
String routerPath = resourceIdValidator.group(1);
|
||||
String path = resourceIdValidator.group(2);
|
||||
Router router = SCHEMA_BASE_PATH.equals(routerPath) ? schemaRouter : configRouter;
|
||||
if (router != null) {
|
||||
attachManagedResource(res, path, router);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
|
|
|
@ -16,11 +16,18 @@ package org.apache.solr.rest.schema;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrException.ErrorCode;
|
||||
import org.apache.solr.common.util.SimpleOrderedMap;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.rest.GETable;
|
||||
import org.apache.solr.rest.POSTable;
|
||||
import org.apache.solr.schema.FieldType;
|
||||
import org.apache.solr.schema.IndexSchema;
|
||||
import org.apache.solr.schema.ManagedIndexSchema;
|
||||
import org.apache.solr.schema.SchemaField;
|
||||
import org.noggit.ObjectBuilder;
|
||||
import org.restlet.data.MediaType;
|
||||
import org.restlet.representation.Representation;
|
||||
import org.restlet.resource.ResourceException;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -38,7 +45,7 @@ import java.util.TreeMap;
|
|||
*
|
||||
* The GET method returns properties for all field types defined in the schema.
|
||||
*/
|
||||
public class FieldTypeCollectionResource extends BaseFieldTypeResource implements GETable {
|
||||
public class FieldTypeCollectionResource extends BaseFieldTypeResource implements GETable, POSTable {
|
||||
private static final Logger log = LoggerFactory.getLogger(FieldTypeCollectionResource.class);
|
||||
|
||||
private Map<String,List<String>> fieldsByFieldType;
|
||||
|
@ -132,4 +139,79 @@ public class FieldTypeCollectionResource extends BaseFieldTypeResource implement
|
|||
}
|
||||
return dynamicFieldsByFieldType;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Representation post(Representation entity) {
|
||||
try {
|
||||
if (!getSchema().isMutable()) {
|
||||
final String message = "This IndexSchema is not mutable.";
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST, message);
|
||||
}
|
||||
|
||||
if (null == entity.getMediaType())
|
||||
entity.setMediaType(MediaType.APPLICATION_JSON);
|
||||
|
||||
if (!entity.getMediaType().equals(MediaType.APPLICATION_JSON, true)) {
|
||||
String message = "Only media type " + MediaType.APPLICATION_JSON.toString() + " is accepted."
|
||||
+ " Request has media type " + entity.getMediaType().toString() + ".";
|
||||
log.error(message);
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST, message);
|
||||
}
|
||||
|
||||
Object object = ObjectBuilder.fromJSON(entity.getText());
|
||||
if (!(object instanceof List)) {
|
||||
String message = "Invalid JSON type " + object.getClass().getName()
|
||||
+ ", expected List of field type definitions in the form of"
|
||||
+ " (ignore the backslashes): [{\"name\":\"text_general\",\"class\":\"solr.TextField\", ...}, {...}, ...]";
|
||||
log.error(message);
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST, message);
|
||||
}
|
||||
|
||||
List<Map<String, Object>> fieldTypeList = (List<Map<String, Object>>) object;
|
||||
if (fieldTypeList.size() > 0)
|
||||
addOrUpdateFieldTypes(fieldTypeList);
|
||||
} catch (Exception e) {
|
||||
getSolrResponse().setException(e);
|
||||
}
|
||||
handlePostExecution(log);
|
||||
|
||||
return new SolrOutputRepresentation();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected void addOrUpdateFieldTypes(List<Map<String, Object>> fieldTypeList) throws Exception {
|
||||
List<FieldType> newFieldTypes = new ArrayList<>(fieldTypeList.size());
|
||||
ManagedIndexSchema oldSchema = (ManagedIndexSchema) getSchema();
|
||||
for (Map<String,Object> fieldTypeJson : fieldTypeList) {
|
||||
if (1 == fieldTypeJson.size() && fieldTypeJson.containsKey(IndexSchema.FIELD_TYPE)) {
|
||||
fieldTypeJson = (Map<String, Object>) fieldTypeJson.get(IndexSchema.FIELD_TYPE);
|
||||
}
|
||||
FieldType newFieldType =
|
||||
FieldTypeResource.buildFieldTypeFromJson(oldSchema,
|
||||
(String)fieldTypeJson.get(IndexSchema.NAME), fieldTypeJson);
|
||||
newFieldTypes.add(newFieldType);
|
||||
}
|
||||
// now deploy the added types (all or nothing)
|
||||
addNewFieldTypes(newFieldTypes, oldSchema);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds one or more new FieldType definitions to the managed schema for the given core.
|
||||
*/
|
||||
protected void addNewFieldTypes(List<FieldType> newFieldTypes, ManagedIndexSchema oldSchema) {
|
||||
boolean success = false;
|
||||
while (!success) {
|
||||
try {
|
||||
synchronized (oldSchema.getSchemaUpdateLock()) {
|
||||
IndexSchema newSchema = oldSchema.addFieldTypes(newFieldTypes);
|
||||
getSolrCore().setLatestSchema(newSchema);
|
||||
success = true;
|
||||
}
|
||||
} catch (ManagedIndexSchema.SchemaChangedInZkException e) {
|
||||
log.debug("Schema changed while processing request, retrying");
|
||||
oldSchema = (ManagedIndexSchema)getSolrCore().getLatestSchema();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,9 +19,13 @@ package org.apache.solr.rest.schema;
|
|||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrException.ErrorCode;
|
||||
import org.apache.solr.rest.GETable;
|
||||
import org.apache.solr.rest.PUTable;
|
||||
import org.apache.solr.schema.FieldType;
|
||||
import org.apache.solr.schema.IndexSchema;
|
||||
import org.apache.solr.schema.ManagedIndexSchema;
|
||||
import org.apache.solr.schema.SchemaField;
|
||||
import org.noggit.ObjectBuilder;
|
||||
import org.restlet.data.MediaType;
|
||||
import org.restlet.representation.Representation;
|
||||
import org.restlet.resource.ResourceException;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -31,6 +35,7 @@ import java.io.UnsupportedEncodingException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* This class responds to requests at /solr/(corename)/schema/fieldtype/(typename)
|
||||
|
@ -38,7 +43,7 @@ import java.util.List;
|
|||
*
|
||||
* The GET method returns properties for the named field type.
|
||||
*/
|
||||
public class FieldTypeResource extends BaseFieldTypeResource implements GETable {
|
||||
public class FieldTypeResource extends BaseFieldTypeResource implements GETable, PUTable {
|
||||
private static final Logger log = LoggerFactory.getLogger(FieldTypeResource.class);
|
||||
|
||||
private String typeName;
|
||||
|
@ -112,4 +117,108 @@ public class FieldTypeResource extends BaseFieldTypeResource implements GETable
|
|||
}
|
||||
return dynamicFields; // Don't sort these - they're matched in order
|
||||
}
|
||||
|
||||
/**
|
||||
* Accepts JSON add fieldtype request, to URL
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Representation put(Representation entity) {
|
||||
try {
|
||||
if (!getSchema().isMutable()) {
|
||||
final String message = "This IndexSchema is not mutable.";
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST, message);
|
||||
}
|
||||
|
||||
if (null == entity.getMediaType())
|
||||
entity.setMediaType(MediaType.APPLICATION_JSON);
|
||||
|
||||
if (!entity.getMediaType().equals(MediaType.APPLICATION_JSON, true)) {
|
||||
String message = "Only media type " + MediaType.APPLICATION_JSON.toString() + " is accepted."
|
||||
+ " Request has media type " + entity.getMediaType().toString() + ".";
|
||||
log.error(message);
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST, message);
|
||||
}
|
||||
|
||||
Object object = ObjectBuilder.fromJSON(entity.getText());
|
||||
if (!(object instanceof Map)) {
|
||||
String message = "Invalid JSON type " + object.getClass().getName() + ", expected Map of the form"
|
||||
+ " (ignore the backslashes): {\"name\":\"text_general\", \"class\":\"solr.TextField\" ...},"
|
||||
+ " either with or without a \"name\" mapping. If the \"name\" is specified, it must match the"
|
||||
+ " name given in the request URL: /schema/fieldtypes/(name)";
|
||||
log.error(message);
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST, message);
|
||||
}
|
||||
|
||||
// basic validation passed, let's try to create it!
|
||||
addOrUpdateFieldType((Map<String, Object>)object);
|
||||
|
||||
} catch (Exception e) {
|
||||
getSolrResponse().setException(e);
|
||||
}
|
||||
handlePostExecution(log);
|
||||
|
||||
return new SolrOutputRepresentation();
|
||||
}
|
||||
|
||||
protected void addOrUpdateFieldType(Map<String,Object> fieldTypeJson) {
|
||||
ManagedIndexSchema oldSchema = (ManagedIndexSchema) getSchema();
|
||||
FieldType newFieldType = buildFieldTypeFromJson(oldSchema, typeName, fieldTypeJson);
|
||||
addNewFieldType(newFieldType, oldSchema);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a new FieldType definitions to the managed schema for the given core.
|
||||
*/
|
||||
protected void addNewFieldType(FieldType newFieldType, ManagedIndexSchema oldSchema) {
|
||||
boolean success = false;
|
||||
while (!success) {
|
||||
try {
|
||||
Object updateLock = oldSchema.getSchemaUpdateLock();
|
||||
synchronized (updateLock) {
|
||||
IndexSchema newSchema = oldSchema.addFieldTypes(Collections.singletonList(newFieldType));
|
||||
getSolrCore().setLatestSchema(newSchema);
|
||||
|
||||
success = true;
|
||||
}
|
||||
} catch (ManagedIndexSchema.SchemaChangedInZkException e) {
|
||||
log.info("Schema changed while processing request, retrying");
|
||||
oldSchema = (ManagedIndexSchema)getSolrCore().getLatestSchema();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a FieldType definition from a JSON object.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
static FieldType buildFieldTypeFromJson(ManagedIndexSchema oldSchema, String fieldTypeName, Map<String,Object> fieldTypeJson) {
|
||||
if (1 == fieldTypeJson.size() && fieldTypeJson.containsKey(IndexSchema.FIELD_TYPE)) {
|
||||
fieldTypeJson = (Map<String, Object>)fieldTypeJson.get(IndexSchema.FIELD_TYPE);
|
||||
}
|
||||
|
||||
String bodyTypeName = (String) fieldTypeJson.get(IndexSchema.NAME);
|
||||
if (bodyTypeName == null) {
|
||||
// must provide the name in the JSON for converting to the XML format needed
|
||||
// to create FieldType objects using the FieldTypePluginLoader
|
||||
fieldTypeJson.put(IndexSchema.NAME, fieldTypeName);
|
||||
} else {
|
||||
// if they provide it in the JSON, then it must match the value from the path
|
||||
if (!fieldTypeName.equals(bodyTypeName)) {
|
||||
String message = "Field type name in the request body '" + bodyTypeName
|
||||
+ "' doesn't match field type name in the request URL '" + fieldTypeName + "'";
|
||||
log.error(message);
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST, message);
|
||||
}
|
||||
}
|
||||
|
||||
String className = (String)fieldTypeJson.get(FieldType.CLASS_NAME);
|
||||
if (className == null) {
|
||||
String message = "Missing required '" + FieldType.CLASS_NAME + "' property!";
|
||||
log.error(message);
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST, message);
|
||||
}
|
||||
|
||||
return oldSchema.newFieldType(fieldTypeName, className, fieldTypeJson);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,133 @@
|
|||
package org.apache.solr.rest.schema;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
|
||||
import org.apache.solr.common.SolrException;
|
||||
import org.apache.solr.common.SolrException.ErrorCode;
|
||||
import org.apache.solr.schema.IndexSchema;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
import org.w3c.dom.Node;
|
||||
|
||||
/**
|
||||
* Utility class for converting a JSON definition of a FieldType into the
|
||||
* XML format expected by the FieldTypePluginLoader.
|
||||
*/
|
||||
public class FieldTypeXmlAdapter {
|
||||
|
||||
public static Node toNode(Map<String,?> json) {
|
||||
DocumentBuilder docBuilder;
|
||||
try {
|
||||
docBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
|
||||
} catch (ParserConfigurationException e) {
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR, e);
|
||||
}
|
||||
|
||||
Document doc = docBuilder.newDocument();
|
||||
Element fieldType = doc.createElement(IndexSchema.FIELD_TYPE);
|
||||
appendAttrs(fieldType, json);
|
||||
|
||||
// transform the analyzer definitions into XML elements
|
||||
Element analyzer = transformAnalyzer(doc, json, "analyzer", null);
|
||||
if (analyzer != null)
|
||||
fieldType.appendChild(analyzer);
|
||||
|
||||
analyzer = transformAnalyzer(doc, json, "indexAnalyzer", "index");
|
||||
if (analyzer != null)
|
||||
fieldType.appendChild(analyzer);
|
||||
|
||||
analyzer = transformAnalyzer(doc, json, "queryAnalyzer", "query");
|
||||
if (analyzer != null)
|
||||
fieldType.appendChild(analyzer);
|
||||
|
||||
analyzer = transformAnalyzer(doc, json, "multiTermAnalyzer", "multiterm");
|
||||
if (analyzer != null)
|
||||
fieldType.appendChild(analyzer);
|
||||
|
||||
return fieldType;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected static Element transformAnalyzer(Document doc, Map<String,?> json, String jsonFieldName, String analyzerType) {
|
||||
Object jsonField = json.get(jsonFieldName);
|
||||
if (jsonField == null)
|
||||
return null; // it's ok for this field to not exist in the JSON map
|
||||
|
||||
if (!(jsonField instanceof Map))
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST, "Invalid fieldType definition! Expected JSON object for "+
|
||||
jsonFieldName+" not a "+jsonField.getClass().getName());
|
||||
|
||||
return createAnalyzerElement(doc, analyzerType, (Map<String,?>)jsonField);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected static Element createAnalyzerElement(Document doc, String type, Map<String,?> json) {
|
||||
Element analyzer = doc.createElement("analyzer");
|
||||
if (type != null)
|
||||
analyzer.setAttribute("type", type);
|
||||
|
||||
// charFilter(s)
|
||||
List<Map<String,?>> charFilters = (List<Map<String,?>>)json.get("charFilters");
|
||||
if (charFilters != null)
|
||||
appendFilterElements(doc, analyzer, "charFilter", charFilters);
|
||||
|
||||
// tokenizer
|
||||
Map<String,?> tokenizerJson = (Map<String,?>)json.get("tokenizer");
|
||||
if (tokenizerJson == null)
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST, "Analyzer must define a tokenizer!");
|
||||
|
||||
String tokClass = (String)tokenizerJson.get("class");
|
||||
if (tokClass == null)
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST, "Every tokenizer must define a class property!");
|
||||
|
||||
analyzer.appendChild(appendAttrs(doc.createElement("tokenizer"), tokenizerJson));
|
||||
|
||||
// filter(s)
|
||||
List<Map<String,?>> filters = (List<Map<String,?>>)json.get("filters");
|
||||
if (filters != null)
|
||||
appendFilterElements(doc, analyzer, "filter", filters);
|
||||
|
||||
return analyzer;
|
||||
}
|
||||
|
||||
protected static void appendFilterElements(Document doc, Element analyzer, String filterName, List<Map<String,?>> filters) {
|
||||
for (Map<String,?> next : filters) {
|
||||
String filterClass = (String)next.get("class");
|
||||
if (filterClass == null)
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST,
|
||||
"Every "+filterName+" must define a class property!");
|
||||
analyzer.appendChild(appendAttrs(doc.createElement(filterName), next));
|
||||
}
|
||||
}
|
||||
|
||||
protected static Element appendAttrs(Element elm, Map<String,?> json) {
|
||||
for (Map.Entry<String,?> entry : json.entrySet()) {
|
||||
Object val = entry.getValue();
|
||||
if (val != null && !(val instanceof Map))
|
||||
elm.setAttribute(entry.getKey(), val.toString());
|
||||
}
|
||||
return elm;
|
||||
}
|
||||
}
|
|
@ -845,10 +845,11 @@ public abstract class FieldType extends FieldProperties {
|
|||
for (String propertyName : FieldProperties.propertyNames) {
|
||||
fieldProperties.add(propertyName);
|
||||
}
|
||||
|
||||
for (String key : args.keySet()) {
|
||||
if (fieldProperties.contains(key)) {
|
||||
namedPropertyValues.add(key, StrUtils.parseBool(args.get(key)));
|
||||
} else if ( ! CLASS_NAME.equals(key) && ! TYPE_NAME.equals(key)) {
|
||||
} else if (!CLASS_NAME.equals(key) && !TYPE_NAME.equals(key)) {
|
||||
namedPropertyValues.add(key, args.get(key));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -477,16 +477,9 @@ public class IndexSchema {
|
|||
expression = stepsToPath(SCHEMA, AT + VERSION);
|
||||
version = schemaConf.getFloat(expression, 1.0f);
|
||||
|
||||
|
||||
// load the Field Types
|
||||
|
||||
final FieldTypePluginLoader typeLoader = new FieldTypePluginLoader(this, fieldTypes, schemaAware);
|
||||
|
||||
// /schema/fieldtype | /schema/fieldType | /schema/types/fieldtype | /schema/types/fieldType
|
||||
expression = stepsToPath(SCHEMA, FIELD_TYPE.toLowerCase(Locale.ROOT)) // backcompat(?)
|
||||
+ XPATH_OR + stepsToPath(SCHEMA, FIELD_TYPE)
|
||||
+ XPATH_OR + stepsToPath(SCHEMA, TYPES, FIELD_TYPE.toLowerCase(Locale.ROOT))
|
||||
+ XPATH_OR + stepsToPath(SCHEMA, TYPES, FIELD_TYPE);
|
||||
expression = getFieldTypeXPathExpressions();
|
||||
NodeList nodes = (NodeList) xpath.evaluate(expression, document, XPathConstants.NODESET);
|
||||
typeLoader.load(loader, nodes);
|
||||
|
||||
|
@ -1665,4 +1658,60 @@ public class IndexSchema {
|
|||
log.error(msg);
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies this schema, adds the given field type to the copy, then persists the
|
||||
* new schema. Requires synchronizing on the object returned by
|
||||
* {@link #getSchemaUpdateLock()}.
|
||||
*
|
||||
* @param fieldType the FieldType to add
|
||||
* @return a new IndexSchema based on this schema with the new FieldType added
|
||||
* @see #newFieldType(String, String, Map)
|
||||
*/
|
||||
public IndexSchema addFieldType(FieldType fieldType) {
|
||||
String msg = "This IndexSchema is not mutable.";
|
||||
log.error(msg);
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies this schema, adds the given field type to the copy, then persists the
|
||||
* new schema. Requires synchronizing on the object returned by
|
||||
* {@link #getSchemaUpdateLock()}.
|
||||
*
|
||||
* @param fieldTypeList a list of FieldTypes to add
|
||||
* @return a new IndexSchema based on this schema with the new types added
|
||||
* @see #newFieldType(String, String, Map)
|
||||
*/
|
||||
public IndexSchema addFieldTypes(List<FieldType> fieldTypeList) {
|
||||
String msg = "This IndexSchema is not mutable.";
|
||||
log.error(msg);
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a FieldType if the given typeName does not already
|
||||
* exist in this schema. The resulting FieldType can be used in a call
|
||||
* to {@link #addFieldType(FieldType)}.
|
||||
*
|
||||
* @param typeName the name of the type to add
|
||||
* @param className the name of the FieldType class
|
||||
* @param options the options to use when creating the FieldType
|
||||
* @return The created FieldType
|
||||
* @see #addFieldType(FieldType)
|
||||
*/
|
||||
public FieldType newFieldType(String typeName, String className, Map<String,?> options) {
|
||||
String msg = "This IndexSchema is not mutable.";
|
||||
log.error(msg);
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
|
||||
}
|
||||
|
||||
protected String getFieldTypeXPathExpressions() {
|
||||
// /schema/fieldtype | /schema/fieldType | /schema/types/fieldtype | /schema/types/fieldType
|
||||
String expression = stepsToPath(SCHEMA, FIELD_TYPE.toLowerCase(Locale.ROOT)) // backcompat(?)
|
||||
+ XPATH_OR + stepsToPath(SCHEMA, FIELD_TYPE)
|
||||
+ XPATH_OR + stepsToPath(SCHEMA, TYPES, FIELD_TYPE.toLowerCase(Locale.ROOT))
|
||||
+ XPATH_OR + stepsToPath(SCHEMA, TYPES, FIELD_TYPE);
|
||||
return expression;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,6 +17,11 @@ package org.apache.solr.schema;
|
|||
*/
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.util.CharFilterFactory;
|
||||
import org.apache.lucene.analysis.util.TokenFilterFactory;
|
||||
import org.apache.lucene.analysis.util.TokenizerFactory;
|
||||
import org.apache.solr.analysis.TokenizerChain;
|
||||
import org.apache.solr.cloud.ZkController;
|
||||
import org.apache.solr.cloud.ZkSolrResourceLoader;
|
||||
import org.apache.solr.common.SolrException;
|
||||
|
@ -25,19 +30,26 @@ import org.apache.solr.common.cloud.SolrZkClient;
|
|||
import org.apache.solr.core.Config;
|
||||
import org.apache.solr.core.SolrConfig;
|
||||
import org.apache.solr.core.SolrResourceLoader;
|
||||
import org.apache.solr.rest.schema.FieldTypeXmlAdapter;
|
||||
import org.apache.solr.util.FileUtils;
|
||||
import org.apache.lucene.analysis.util.ResourceLoaderAware;
|
||||
import org.apache.zookeeper.CreateMode;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.apache.zookeeper.data.Stat;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Node;
|
||||
import org.w3c.dom.NodeList;
|
||||
import org.xml.sax.InputSource;
|
||||
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.StringWriter;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
|
@ -46,6 +58,7 @@ import java.util.Collection;
|
|||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
/** Solr-managed schema - non-user-editable, but can be mutable via internal and external REST API requests. */
|
||||
|
@ -153,8 +166,11 @@ public final class ManagedIndexSchema extends IndexSchema {
|
|||
// Assumption: the path exists
|
||||
Stat stat = zkClient.setData(managedSchemaPath, data, schemaZkVersion, true);
|
||||
schemaZkVersion = stat.getVersion();
|
||||
log.info("Persisted managed schema at " + managedSchemaPath);
|
||||
log.info("Persisted managed schema version "+schemaZkVersion+" at " + managedSchemaPath);
|
||||
} catch (KeeperException.BadVersionException e) {
|
||||
|
||||
log.error("Bad version when trying to persist schema using "+schemaZkVersion+" due to: "+e);
|
||||
|
||||
success = false;
|
||||
schemaChangedInZk = true;
|
||||
}
|
||||
|
@ -347,7 +363,93 @@ public final class ManagedIndexSchema extends IndexSchema {
|
|||
}
|
||||
return newSchema;
|
||||
}
|
||||
|
||||
public ManagedIndexSchema addFieldType(FieldType fieldType) {
|
||||
return addFieldTypes(Collections.singletonList(fieldType));
|
||||
}
|
||||
|
||||
public ManagedIndexSchema addFieldTypes(List<FieldType> fieldTypeList) {
|
||||
if (!isMutable) {
|
||||
String msg = "This ManagedIndexSchema is not mutable.";
|
||||
log.error(msg);
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
|
||||
}
|
||||
|
||||
ManagedIndexSchema newSchema = shallowCopy(true);
|
||||
|
||||
// we shallow copied fieldTypes, but since we're changing them, we need to do a true
|
||||
// deep copy before adding the new field types
|
||||
HashMap<String,FieldType> clone =
|
||||
(HashMap<String,FieldType>)((HashMap<String,FieldType>)newSchema.fieldTypes).clone();
|
||||
newSchema.fieldTypes = clone;
|
||||
|
||||
// do a first pass to validate the field types don't exist already
|
||||
for (FieldType fieldType : fieldTypeList) {
|
||||
String typeName = fieldType.getTypeName();
|
||||
if (newSchema.getFieldTypeByName(typeName) != null) {
|
||||
throw new FieldExistsException(ErrorCode.BAD_REQUEST,
|
||||
"Field type '" + typeName + "' already exists!");
|
||||
}
|
||||
|
||||
newSchema.fieldTypes.put(typeName, fieldType);
|
||||
}
|
||||
|
||||
// Run the callbacks on SchemaAware now that everything else is done
|
||||
for (SchemaAware aware : newSchema.schemaAware)
|
||||
aware.inform(newSchema);
|
||||
|
||||
// looks good for the add, notify ResoureLoaderAware objects
|
||||
for (FieldType fieldType : fieldTypeList) {
|
||||
|
||||
// must inform any sub-components used in the
|
||||
// tokenizer chain if they are ResourceLoaderAware
|
||||
if (fieldType.supportsAnalyzers()) {
|
||||
Analyzer indexAnalyzer = fieldType.getIndexAnalyzer();
|
||||
if (indexAnalyzer != null && indexAnalyzer instanceof TokenizerChain)
|
||||
informResourceLoaderAwareObjectsInChain((TokenizerChain)indexAnalyzer);
|
||||
|
||||
Analyzer queryAnalyzer = fieldType.getQueryAnalyzer();
|
||||
// ref comparison is correct here (vs. equals) as they may be the same
|
||||
// object in which case, we don't need to inform twice ... however, it's
|
||||
// actually safe to call inform multiple times on an object anyway
|
||||
if (queryAnalyzer != null &&
|
||||
queryAnalyzer != indexAnalyzer &&
|
||||
queryAnalyzer instanceof TokenizerChain)
|
||||
informResourceLoaderAwareObjectsInChain((TokenizerChain)queryAnalyzer);
|
||||
|
||||
// if fieldType is a TextField, it might have a multi-term analyzer
|
||||
if (fieldType instanceof TextField) {
|
||||
TextField textFieldType = (TextField)fieldType;
|
||||
Analyzer multiTermAnalyzer = textFieldType.getMultiTermAnalyzer();
|
||||
if (multiTermAnalyzer != null && multiTermAnalyzer != indexAnalyzer &&
|
||||
multiTermAnalyzer != queryAnalyzer && multiTermAnalyzer instanceof TokenizerChain)
|
||||
informResourceLoaderAwareObjectsInChain((TokenizerChain)multiTermAnalyzer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
newSchema.refreshAnalyzers();
|
||||
|
||||
boolean success = newSchema.persistManagedSchema(false);
|
||||
if (success) {
|
||||
if (log.isDebugEnabled()) {
|
||||
StringBuilder fieldTypeNames = new StringBuilder();
|
||||
for (int i=0; i < fieldTypeList.size(); i++) {
|
||||
if (i > 0) fieldTypeNames.append(", ");
|
||||
fieldTypeNames.append(fieldTypeList.get(i).typeName);
|
||||
}
|
||||
log.debug("Added field types: {}", fieldTypeNames.toString());
|
||||
}
|
||||
} else {
|
||||
// this is unlikely to happen as most errors are handled as exceptions in the persist code
|
||||
log.error("Failed to add field types: {}", fieldTypeList);
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR,
|
||||
"Failed to persist updated schema due to underlying storage issue; check log for more details!");
|
||||
}
|
||||
|
||||
return newSchema;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SchemaField newField(String fieldName, String fieldType, Map<String,?> options) {
|
||||
SchemaField sf;
|
||||
|
@ -412,6 +514,77 @@ public final class ManagedIndexSchema extends IndexSchema {
|
|||
return sf;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldType newFieldType(String typeName, String className, Map<String, ?> options) {
|
||||
if (!isMutable) {
|
||||
String msg = "This ManagedIndexSchema is not mutable.";
|
||||
log.error(msg);
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR, msg);
|
||||
}
|
||||
|
||||
if (getFieldTypeByName(typeName) != null) {
|
||||
String msg = "Field type '" + typeName + "' already exists.";
|
||||
log.error(msg);
|
||||
throw new SolrException(ErrorCode.BAD_REQUEST, msg);
|
||||
}
|
||||
|
||||
// build the new FieldType using the existing FieldTypePluginLoader framework
|
||||
// which expects XML, so we use a JSON to XML adapter to transform the JSON object
|
||||
// provided in the request into the XML format supported by the plugin loader
|
||||
Map<String, FieldType> newFieldTypes = new HashMap<String, FieldType>();
|
||||
List<SchemaAware> schemaAwareList = new ArrayList<SchemaAware>();
|
||||
FieldTypePluginLoader typeLoader = new FieldTypePluginLoader(this, newFieldTypes, schemaAwareList);
|
||||
typeLoader.loadSingle(loader, FieldTypeXmlAdapter.toNode(options));
|
||||
FieldType ft = newFieldTypes.get(typeName);
|
||||
if (!schemaAwareList.isEmpty())
|
||||
schemaAware.addAll(schemaAwareList);
|
||||
|
||||
return ft;
|
||||
}
|
||||
|
||||
/**
|
||||
* After creating a new FieldType, it may contain components that implement
|
||||
* the ResourceLoaderAware interface, which need to be informed after they
|
||||
* are loaded (as they depend on this callback to complete initialization work)
|
||||
*/
|
||||
protected void informResourceLoaderAwareObjectsInChain(TokenizerChain chain) {
|
||||
CharFilterFactory[] charFilters = chain.getCharFilterFactories();
|
||||
if (charFilters != null) {
|
||||
for (CharFilterFactory next : charFilters) {
|
||||
if (next instanceof ResourceLoaderAware) {
|
||||
try {
|
||||
((ResourceLoaderAware) next).inform(loader);
|
||||
} catch (IOException e) {
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TokenizerFactory tokenizerFactory = chain.getTokenizerFactory();
|
||||
if (tokenizerFactory != null && tokenizerFactory instanceof ResourceLoaderAware) {
|
||||
try {
|
||||
((ResourceLoaderAware) tokenizerFactory).inform(loader);
|
||||
} catch (IOException e) {
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR, e);
|
||||
}
|
||||
}
|
||||
|
||||
TokenFilterFactory[] filters = chain.getTokenFilterFactories();
|
||||
if (filters != null) {
|
||||
for (TokenFilterFactory next : filters) {
|
||||
if (next instanceof ResourceLoaderAware) {
|
||||
try {
|
||||
((ResourceLoaderAware) next).inform(loader);
|
||||
} catch (IOException e) {
|
||||
throw new SolrException(ErrorCode.SERVER_ERROR, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Called from ZkIndexSchemaReader to merge the fields from the serialized managed schema
|
||||
* on ZooKeeper with the local managed schema.
|
||||
|
@ -427,6 +600,10 @@ public final class ManagedIndexSchema extends IndexSchema {
|
|||
Config schemaConf = new Config(loader, SCHEMA, inputSource, SLASH+SCHEMA+SLASH);
|
||||
Document document = schemaConf.getDocument();
|
||||
final XPath xpath = schemaConf.getXPath();
|
||||
|
||||
// create a unified collection of field types from zk and in the local
|
||||
newSchema.mergeFieldTypesFromZk(document, xpath);
|
||||
|
||||
newSchema.loadFields(document, xpath);
|
||||
// let's completely rebuild the copy fields from the schema in ZK.
|
||||
// create new copyField-related objects so we don't affect the
|
||||
|
@ -525,4 +702,23 @@ public final class ManagedIndexSchema extends IndexSchema {
|
|||
public Object getSchemaUpdateLock() {
|
||||
return schemaUpdateLock;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads FieldType objects defined in the schema.xml document.
|
||||
*
|
||||
* @param document Schema XML document where field types are defined.
|
||||
* @param xpath Used for evaluating xpath expressions to find field types defined in the schema.xml.
|
||||
* @throws javax.xml.xpath.XPathExpressionException if an error occurs when finding field type elements in the document.
|
||||
*/
|
||||
protected synchronized void mergeFieldTypesFromZk(Document document, XPath xpath)
|
||||
throws XPathExpressionException
|
||||
{
|
||||
Map<String, FieldType> newFieldTypes = new HashMap<String, FieldType>();
|
||||
FieldTypePluginLoader typeLoader = new FieldTypePluginLoader(this, newFieldTypes, schemaAware);
|
||||
String expression = getFieldTypeXPathExpressions();
|
||||
NodeList nodes = (NodeList) xpath.evaluate(expression, document, XPathConstants.NODESET);
|
||||
typeLoader.load(loader, nodes);
|
||||
for (String newTypeName : newFieldTypes.keySet())
|
||||
fieldTypes.put(newTypeName, newFieldTypes.get(newTypeName));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,346 @@
|
|||
package org.apache.solr.rest.schema;
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.solr.util.RestTestBase;
|
||||
import org.eclipse.jetty.servlet.ServletHolder;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.noggit.JSONUtil;
|
||||
import org.restlet.ext.servlet.ServerServlet;
|
||||
|
||||
public class TestManagedSchemaFieldTypeResource extends RestTestBase {
|
||||
|
||||
private static File tmpSolrHome;
|
||||
private static File tmpConfDir;
|
||||
|
||||
private static final String collection = "collection1";
|
||||
private static final String confDir = collection + "/conf";
|
||||
|
||||
@Before
|
||||
public void before() throws Exception {
|
||||
tmpSolrHome = createTempDir();
|
||||
tmpConfDir = new File(tmpSolrHome, confDir);
|
||||
FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile());
|
||||
|
||||
final SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
|
||||
final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class);
|
||||
solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi");
|
||||
extraServlets.put(solrRestApi, "/schema/*"); // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...'
|
||||
|
||||
System.setProperty("managed.schema.mutable", "true");
|
||||
System.setProperty("enable.update.log", "false");
|
||||
|
||||
createJettyAndHarness(tmpSolrHome.getAbsolutePath(), "solrconfig-managed-schema.xml", "schema-rest.xml",
|
||||
"/solr", true, extraServlets);
|
||||
}
|
||||
|
||||
@After
|
||||
private void after() throws Exception {
|
||||
jetty.stop();
|
||||
jetty = null;
|
||||
System.clearProperty("managed.schema.mutable");
|
||||
System.clearProperty("enable.update.log");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAddFieldTypes() throws Exception {
|
||||
|
||||
// name mismatch
|
||||
assertJPut("/schema/fieldtypes/myIntFieldType",
|
||||
json("{'name':'badNameEh','class':'solr.TrieIntField','stored':false}"),
|
||||
"/responseHeader/status==400");
|
||||
|
||||
// no class
|
||||
assertJPut("/schema/fieldtypes/myIntFieldType",
|
||||
json("{'stored':false}"),
|
||||
"/responseHeader/status==400");
|
||||
|
||||
// invalid attribute
|
||||
assertJPut("/schema/fieldtypes/myIntFieldType",
|
||||
json("{'foo':'bar'}"),
|
||||
"/responseHeader/status==400");
|
||||
|
||||
// empty analyzer
|
||||
String ftdef = "";
|
||||
ftdef += "{";
|
||||
ftdef += " 'class':'solr.TextField','positionIncrementGap':'100',";
|
||||
ftdef += " 'analyzer':''";
|
||||
ftdef += "}";
|
||||
assertJPut("/schema/fieldtypes/emptyAnalyzerFieldType",
|
||||
json(ftdef),
|
||||
"/responseHeader/status==400");
|
||||
|
||||
// basic field types
|
||||
assertJPut("/schema/fieldtypes/myIntFieldType",
|
||||
json("{'name':'myIntFieldType','class':'solr.TrieIntField','stored':false}"),
|
||||
"/responseHeader/status==0");
|
||||
checkFieldTypeProps(getExpectedProps("myIntFieldType", "solr.TrieIntField", true, false), 16);
|
||||
|
||||
assertJPut("/schema/fieldtypes/myDoubleFieldType",
|
||||
json("{'class':'solr.TrieDoubleField','precisionStep':'0','positionIncrementGap':'0'}"),
|
||||
"/responseHeader/status==0");
|
||||
Map<String,Object> expProps =
|
||||
getExpectedProps("myDoubleFieldType", "solr.TrieDoubleField", true, true);
|
||||
// add some additional expected props for this type
|
||||
expProps.put("precisionStep", "0");
|
||||
expProps.put("positionIncrementGap", "0");
|
||||
checkFieldTypeProps(expProps, 18);
|
||||
|
||||
assertJPut("/schema/fieldtypes/myBoolFieldType",
|
||||
json("{'class':'solr.BoolField','sortMissingLast':true}"),
|
||||
"/responseHeader/status==0");
|
||||
expProps = getExpectedProps("myBoolFieldType", "solr.BoolField", true, true);
|
||||
expProps.put("sortMissingLast", true);
|
||||
checkFieldTypeProps(expProps, 17);
|
||||
|
||||
// a text analyzing field type
|
||||
ftdef = "{";
|
||||
ftdef += " 'class':'solr.TextField','positionIncrementGap':'100',";
|
||||
ftdef += " 'analyzer':{";
|
||||
ftdef += " 'charFilters':[";
|
||||
ftdef += " {'class':'solr.PatternReplaceCharFilterFactory','replacement':'$1$1','pattern':'([a-zA-Z])\\\\1+'}";
|
||||
ftdef += " ],";
|
||||
ftdef += " 'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'},";
|
||||
ftdef += " 'filters':[";
|
||||
ftdef += " {'class':'solr.WordDelimiterFilterFactory','preserveOriginal':'0'},";
|
||||
ftdef += " {'class':'solr.StopFilterFactory','words':'stopwords.txt','ignoreCase':'true'},";
|
||||
ftdef += " {'class':'solr.LowerCaseFilterFactory'},";
|
||||
ftdef += " {'class':'solr.ASCIIFoldingFilterFactory'},";
|
||||
ftdef += " {'class':'solr.KStemFilterFactory'}";
|
||||
ftdef += " ]";
|
||||
ftdef += " }";
|
||||
ftdef += "}";
|
||||
|
||||
assertJPut("/schema/fieldtypes/myTextFieldType", json(ftdef), "/responseHeader/status==0");
|
||||
|
||||
expProps = getExpectedProps("myTextFieldType", "solr.TextField", true, true);
|
||||
expProps.put("autoGeneratePhraseQueries", false);
|
||||
expProps.put("omitNorms", false);
|
||||
expProps.put("omitTermFreqAndPositions", false);
|
||||
expProps.put("omitPositions", false);
|
||||
expProps.put("storeOffsetsWithPositions", false);
|
||||
expProps.put("tokenized", true);
|
||||
|
||||
List<String> analyzerTests = new ArrayList<>();
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='charFilters']/lst[1]/str[@name='class'] = 'solr.PatternReplaceCharFilterFactory'");
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/lst[@name='tokenizer']/str[@name='class'] = 'solr.WhitespaceTokenizerFactory'");
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[1]/str[@name='class'] = 'solr.WordDelimiterFilterFactory'");
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[2]/str[@name='class'] = 'solr.StopFilterFactory'");
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[3]/str[@name='class'] = 'solr.LowerCaseFilterFactory'");
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[4]/str[@name='class'] = 'solr.ASCIIFoldingFilterFactory'");
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[5]/str[@name='class'] = 'solr.KStemFilterFactory'");
|
||||
checkFieldTypeProps(expProps, 19, analyzerTests);
|
||||
|
||||
// now add a field type that uses managed resources and a field that uses that type
|
||||
|
||||
String piglatinStopWordEndpoint = "/schema/analysis/stopwords/piglatin";
|
||||
String piglatinSynonymEndpoint = "/schema/analysis/synonyms/piglatin";
|
||||
|
||||
// now define a new FieldType that uses the managed piglatin endpoints
|
||||
// the managed endpoints will be autovivified as needed
|
||||
ftdef = "{";
|
||||
ftdef += " 'class':'solr.TextField',";
|
||||
ftdef += " 'analyzer':{";
|
||||
ftdef += " 'tokenizer':{'class':'solr.StandardTokenizerFactory'},";
|
||||
ftdef += " 'filters':[";
|
||||
ftdef += " {'class':'solr.ManagedStopFilterFactory','managed':'piglatin'},";
|
||||
ftdef += " {'class':'solr.ManagedSynonymFilterFactory','managed':'piglatin'}";
|
||||
ftdef += " ]";
|
||||
ftdef += " }";
|
||||
ftdef += "}";
|
||||
assertJPut("/schema/fieldtypes/piglatinFieldType", json(ftdef), "/responseHeader/status==0");
|
||||
|
||||
expProps = getExpectedProps("piglatinFieldType", "solr.TextField", true, true);
|
||||
expProps.put("autoGeneratePhraseQueries", false);
|
||||
expProps.put("omitNorms", false);
|
||||
expProps.put("omitTermFreqAndPositions", false);
|
||||
expProps.put("omitPositions", false);
|
||||
expProps.put("storeOffsetsWithPositions", false);
|
||||
expProps.put("tokenized", true);
|
||||
|
||||
analyzerTests = new ArrayList<>();
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/lst[@name='tokenizer']/str[@name='class'] = 'solr.StandardTokenizerFactory'");
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[1]/str[@name='class'] = 'solr.ManagedStopFilterFactory'");
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[2]/str[@name='class'] = 'solr.ManagedSynonymFilterFactory'");
|
||||
checkFieldTypeProps(expProps, 18, analyzerTests);
|
||||
|
||||
assertJQ(piglatinSynonymEndpoint,
|
||||
"/synonymMappings/initArgs/ignoreCase==false",
|
||||
"/synonymMappings/managedMap=={}");
|
||||
|
||||
// add some piglatin synonyms
|
||||
Map<String,List<String>> syns = new HashMap<>();
|
||||
syns.put("appyhay", Arrays.asList("ladgay","oyfuljay"));
|
||||
assertJPut(piglatinSynonymEndpoint,
|
||||
JSONUtil.toJSON(syns),
|
||||
"/responseHeader/status==0");
|
||||
assertJQ(piglatinSynonymEndpoint,
|
||||
"/synonymMappings/managedMap/appyhay==['ladgay','oyfuljay']");
|
||||
|
||||
// add some piglatin stopwords
|
||||
assertJPut(piglatinStopWordEndpoint,
|
||||
JSONUtil.toJSON(Arrays.asList("hetay")),
|
||||
"/responseHeader/status==0");
|
||||
|
||||
assertJQ(piglatinStopWordEndpoint + "/hetay", "/hetay=='hetay'");
|
||||
|
||||
// add a field that uses our new type
|
||||
assertJPut("/schema/fields/newManagedField",
|
||||
json("{'type':'piglatinFieldType','stored':false}"),
|
||||
"/responseHeader/status==0");
|
||||
|
||||
assertQ("/schema/fields/newManagedField?indent=on&wt=xml",
|
||||
"count(/response/lst[@name='field']) = 1",
|
||||
"/response/lst[@name='responseHeader']/int[@name='status'] = '0'");
|
||||
|
||||
// try to delete the managed synonyms endpoint, which should fail because it is being used
|
||||
assertJDelete(piglatinSynonymEndpoint, "/responseHeader/status==403");
|
||||
|
||||
// test adding multiple field types at once
|
||||
ftdef = "[";
|
||||
ftdef += "{";
|
||||
ftdef += " 'name':'textFieldType1',";
|
||||
ftdef += " 'class':'solr.TextField','positionIncrementGap':'100',";
|
||||
ftdef += " 'analyzer':{";
|
||||
ftdef += " 'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'},";
|
||||
ftdef += " 'filters':[";
|
||||
ftdef += " {'class':'solr.WordDelimiterFilterFactory','preserveOriginal':'0'},";
|
||||
ftdef += " {'class':'solr.StopFilterFactory','words':'stopwords.txt','ignoreCase':'true'},";
|
||||
ftdef += " {'class':'solr.LowerCaseFilterFactory'}";
|
||||
ftdef += " ]";
|
||||
ftdef += " }";
|
||||
ftdef += "},{";
|
||||
ftdef += " 'name':'textFieldType2',";
|
||||
ftdef += " 'class':'solr.TextField','positionIncrementGap':'100',";
|
||||
ftdef += " 'analyzer':{";
|
||||
ftdef += " 'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'},";
|
||||
ftdef += " 'filters':[";
|
||||
ftdef += " {'class':'solr.WordDelimiterFilterFactory','preserveOriginal':'0'},";
|
||||
ftdef += " {'class':'solr.StopFilterFactory','words':'stopwords.txt','ignoreCase':'true'},";
|
||||
ftdef += " {'class':'solr.LowerCaseFilterFactory'},";
|
||||
ftdef += " {'class':'solr.ASCIIFoldingFilterFactory'}";
|
||||
ftdef += " ]";
|
||||
ftdef += " }";
|
||||
ftdef += "}";
|
||||
ftdef += "]";
|
||||
|
||||
assertJPost("/schema/fieldtypes", json(ftdef), "/responseHeader/status==0");
|
||||
|
||||
expProps = getExpectedProps("textFieldType1", "solr.TextField", true, true);
|
||||
expProps.put("autoGeneratePhraseQueries", false);
|
||||
expProps.put("omitNorms", false);
|
||||
expProps.put("omitTermFreqAndPositions", false);
|
||||
expProps.put("omitPositions", false);
|
||||
expProps.put("storeOffsetsWithPositions", false);
|
||||
expProps.put("tokenized", true);
|
||||
|
||||
analyzerTests = new ArrayList<>();
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/lst[@name='tokenizer']/str[@name='class'] = 'solr.WhitespaceTokenizerFactory'");
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[1]/str[@name='class'] = 'solr.WordDelimiterFilterFactory'");
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[2]/str[@name='class'] = 'solr.StopFilterFactory'");
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[3]/str[@name='class'] = 'solr.LowerCaseFilterFactory'");
|
||||
checkFieldTypeProps(expProps, 19, analyzerTests);
|
||||
|
||||
expProps = getExpectedProps("textFieldType2", "solr.TextField", true, true);
|
||||
expProps.put("autoGeneratePhraseQueries", false);
|
||||
expProps.put("omitNorms", false);
|
||||
expProps.put("omitTermFreqAndPositions", false);
|
||||
expProps.put("omitPositions", false);
|
||||
expProps.put("storeOffsetsWithPositions", false);
|
||||
expProps.put("tokenized", true);
|
||||
|
||||
analyzerTests = new ArrayList<>();
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/lst[@name='tokenizer']/str[@name='class'] = 'solr.WhitespaceTokenizerFactory'");
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[1]/str[@name='class'] = 'solr.WordDelimiterFilterFactory'");
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[2]/str[@name='class'] = 'solr.StopFilterFactory'");
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[3]/str[@name='class'] = 'solr.LowerCaseFilterFactory'");
|
||||
analyzerTests.add("/response/lst[@name='fieldType']/lst[@name='analyzer']/arr[@name='filters']/lst[4]/str[@name='class'] = 'solr.ASCIIFoldingFilterFactory'");
|
||||
checkFieldTypeProps(expProps, 19, analyzerTests);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to check fieldType settings against a set of expected values.
|
||||
*/
|
||||
protected void checkFieldTypeProps(Map<String,Object> expected, int expectedChildCount) {
|
||||
checkFieldTypeProps(expected, expectedChildCount, null);
|
||||
}
|
||||
|
||||
protected void checkFieldTypeProps(Map<String,Object> expected, int expectedChildCount, List<String> addlTests) {
|
||||
String fieldTypeName = (String)expected.get("name");
|
||||
|
||||
List<String> tests = new ArrayList<>();
|
||||
tests.add("count(/response/lst[@name='fieldType']) = 1");
|
||||
tests.add("count(/response/lst[@name='fieldType']/*) = "+expectedChildCount);
|
||||
tests.add("count(/response/lst[@name='fieldType']/arr[@name='fields']/*) = 0");
|
||||
tests.add("count(/response/lst[@name='fieldType']/arr[@name='dynamicFields']/*) = 0");
|
||||
for (Map.Entry<String,Object> next : expected.entrySet()) {
|
||||
Object val = next.getValue();
|
||||
String pathType = null;
|
||||
if (val instanceof Boolean)
|
||||
pathType = "bool";
|
||||
else if (val instanceof String)
|
||||
pathType = "str";
|
||||
else
|
||||
fail("Unexpected value type "+val.getClass().getName());
|
||||
// NOTE: it seems like the fieldtypes endpoint only returns strings or booleans
|
||||
|
||||
String xpath =
|
||||
"/response/lst[@name='fieldType']/"+pathType+"[@name='"+next.getKey()+"']";
|
||||
tests.add(xpath+" = '"+val+"'");
|
||||
}
|
||||
|
||||
if (addlTests != null)
|
||||
tests.addAll(addlTests);
|
||||
|
||||
assertQ("/schema/fieldtypes/"+fieldTypeName+"?indent=on&wt=xml&showDefaults=true",
|
||||
tests.toArray(new String[0]));
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a map containing expected values for a field type created by this test.
|
||||
*/
|
||||
protected Map<String,Object> getExpectedProps(String name, String className, boolean indexed, boolean stored) {
|
||||
Map<String,Object> map = new HashMap<>();
|
||||
map.put("name", name);
|
||||
map.put("class", className);
|
||||
map.put("indexed", indexed);
|
||||
map.put("stored", stored);
|
||||
map.put("docValues", false);
|
||||
map.put("termVectors", false);
|
||||
map.put("termPositions", false);
|
||||
map.put("termOffsets", false);
|
||||
map.put("omitNorms", true);
|
||||
map.put("omitTermFreqAndPositions", true);
|
||||
map.put("omitPositions", false);
|
||||
map.put("storeOffsetsWithPositions", false);
|
||||
map.put("multiValued", false);
|
||||
map.put("tokenized", false);
|
||||
return map;
|
||||
}
|
||||
}
|
|
@ -42,6 +42,8 @@ public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestB
|
|||
private static final String POST_DYNAMIC_FIELDNAME = "newdynamicfieldPost";
|
||||
private static final String PUT_FIELDNAME = "newfieldPut";
|
||||
private static final String POST_FIELDNAME = "newfieldPost";
|
||||
private static final String PUT_FIELDTYPE = "newfieldtypePut";
|
||||
private static final String POST_FIELDTYPE = "newfieldtypePost";
|
||||
|
||||
public TestCloudManagedSchemaConcurrent() {
|
||||
super();
|
||||
|
@ -129,6 +131,20 @@ public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestB
|
|||
verifySuccess(request, response);
|
||||
}
|
||||
|
||||
private static void addFieldTypePut(RestTestHarness publisher, String typeName) throws Exception {
|
||||
final String content = "{\"class\":\"solr.TrieIntField\"}";
|
||||
String request = "/schema/fieldtypes/" + typeName + "?wt=xml";
|
||||
String response = publisher.put(request, content);
|
||||
verifySuccess(request, response);
|
||||
}
|
||||
|
||||
private static void addFieldTypePost(RestTestHarness publisher, String typeName) throws Exception {
|
||||
final String content = "[{\"name\":\""+typeName+"\",\"class\":\"solr.TrieIntField\"}]";
|
||||
String request = "/schema/fieldtypes/?wt=xml";
|
||||
String response = publisher.post(request, content);
|
||||
verifySuccess(request, response);
|
||||
}
|
||||
|
||||
private String[] getExpectedFieldResponses(Info info) {
|
||||
String[] expectedAddFields = new String[1 + info.numAddFieldPuts + info.numAddFieldPosts];
|
||||
expectedAddFields[0] = SUCCESS_XPATH;
|
||||
|
@ -182,6 +198,26 @@ public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestB
|
|||
return expectedCopyFields.toArray(new String[expectedCopyFields.size()]);
|
||||
}
|
||||
|
||||
private String[] getExpectedFieldTypeResponses(Info info) {
|
||||
String[] expectedAddFieldTypes = new String[1 + info.numAddFieldTypePuts + info.numAddFieldTypePosts];
|
||||
expectedAddFieldTypes[0] = SUCCESS_XPATH;
|
||||
|
||||
for (int i = 0; i < info.numAddFieldTypePuts; ++i) {
|
||||
String newFieldTypeName = PUT_FIELDTYPE + info.fieldNameSuffix + i;
|
||||
expectedAddFieldTypes[1 + i]
|
||||
= "/response/arr[@name='fieldTypes']/lst/str[@name='name'][.='" + newFieldTypeName + "']";
|
||||
}
|
||||
|
||||
for (int i = 0; i < info.numAddFieldTypePosts; ++i) {
|
||||
String newFieldTypeName = POST_FIELDTYPE + info.fieldNameSuffix + i;
|
||||
expectedAddFieldTypes[1 + info.numAddFieldTypePuts + i]
|
||||
= "/response/arr[@name='fieldTypes']/lst/str[@name='name'][.='" + newFieldTypeName + "']";
|
||||
}
|
||||
|
||||
return expectedAddFieldTypes;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void doTest() throws Exception {
|
||||
setupHarnesses();
|
||||
|
@ -194,6 +230,8 @@ public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestB
|
|||
int numAddFieldPosts = 0;
|
||||
int numAddDynamicFieldPuts = 0;
|
||||
int numAddDynamicFieldPosts = 0;
|
||||
int numAddFieldTypePuts = 0;
|
||||
int numAddFieldTypePosts = 0;
|
||||
public String fieldNameSuffix;
|
||||
List<CopyFieldInfo> copyFields = new ArrayList<>();
|
||||
|
||||
|
@ -254,8 +292,21 @@ public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestB
|
|||
copyField(publisher, sourceField, destField);
|
||||
info.copyFields.add(new CopyFieldInfo(sourceField, destField));
|
||||
}
|
||||
},
|
||||
PUT_AddFieldType {
|
||||
@Override public void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception {
|
||||
String typeName = PUT_FIELDTYPE + info.numAddFieldTypePuts++;
|
||||
addFieldTypePut(publisher, typeName);
|
||||
}
|
||||
},
|
||||
POST_AddFieldType {
|
||||
@Override public void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception {
|
||||
String typeName = POST_FIELDTYPE + info.numAddFieldTypePosts++;
|
||||
addFieldTypePost(publisher, typeName);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
public abstract void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception;
|
||||
|
||||
private static final Operation[] VALUES = values();
|
||||
|
@ -279,6 +330,7 @@ public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestB
|
|||
String[] expectedAddFields = getExpectedFieldResponses(info);
|
||||
String[] expectedAddDynamicFields = getExpectedDynamicFieldResponses(info);
|
||||
String[] expectedCopyFields = getExpectedCopyFieldResponses(info);
|
||||
String[] expectedAddFieldTypes = getExpectedFieldTypeResponses(info);
|
||||
|
||||
boolean success = false;
|
||||
long maxTimeoutMillis = 100000;
|
||||
|
@ -292,6 +344,14 @@ public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestB
|
|||
Thread.sleep(100);
|
||||
|
||||
for (RestTestHarness client : restTestHarnesses) {
|
||||
// verify addFieldTypePuts and addFieldTypePosts
|
||||
request = "/schema/fieldtypes?wt=xml";
|
||||
response = client.query(request);
|
||||
result = BaseTestHarness.validateXPath(response, expectedAddFieldTypes);
|
||||
if (result != null) {
|
||||
break;
|
||||
}
|
||||
|
||||
// verify addFieldPuts and addFieldPosts
|
||||
request = "/schema/fields?wt=xml";
|
||||
response = client.query(request);
|
||||
|
@ -368,6 +428,36 @@ public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestB
|
|||
}
|
||||
}
|
||||
|
||||
private class PutFieldTypeThread extends PutPostThread {
|
||||
public PutFieldTypeThread(RestTestHarness harness, Info info) {
|
||||
super(harness, info);
|
||||
fieldName = PUT_FIELDTYPE + "Thread" + info.numAddFieldTypePuts++;
|
||||
}
|
||||
public void run() {
|
||||
try {
|
||||
addFieldTypePut(harness, fieldName);
|
||||
} catch (Exception e) {
|
||||
// log.error("###ACTUAL FAILURE!");
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class PostFieldTypeThread extends PutPostThread {
|
||||
public PostFieldTypeThread(RestTestHarness harness, Info info) {
|
||||
super(harness, info);
|
||||
fieldName = POST_FIELDTYPE + "Thread" + info.numAddFieldTypePosts++;
|
||||
}
|
||||
public void run() {
|
||||
try {
|
||||
addFieldTypePost(harness, fieldName);
|
||||
} catch (Exception e) {
|
||||
// log.error("###ACTUAL FAILURE!");
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class PutDynamicFieldThread extends PutPostThread {
|
||||
public PutDynamicFieldThread(RestTestHarness harness, Info info) {
|
||||
super(harness, info);
|
||||
|
@ -402,7 +492,7 @@ public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestB
|
|||
|
||||
// First, add a bunch of fields via PUT and POST, as well as copyFields,
|
||||
// but do it fast enough and verify shards' schemas after all of them are added
|
||||
int numFields = 25;
|
||||
int numFields = 5;
|
||||
Info info = new Info("Thread");
|
||||
|
||||
for (int i = 0; i <= numFields ; ++i) {
|
||||
|
@ -423,12 +513,23 @@ public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestB
|
|||
PutDynamicFieldThread putDynamicFieldThread = new PutDynamicFieldThread(publisher, info);
|
||||
putDynamicFieldThread.start();
|
||||
|
||||
publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
|
||||
PostFieldTypeThread postFieldTypeThread = new PostFieldTypeThread(publisher, info);
|
||||
postFieldTypeThread.start();
|
||||
|
||||
publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
|
||||
PutFieldTypeThread putFieldTypeThread = new PutFieldTypeThread(publisher, info);
|
||||
putFieldTypeThread.start();
|
||||
|
||||
postFieldThread.join();
|
||||
putFieldThread.join();
|
||||
postDynamicFieldThread.join();
|
||||
putDynamicFieldThread.join();
|
||||
postFieldTypeThread.join();
|
||||
putFieldTypeThread.join();
|
||||
|
||||
String[] expectedAddFields = getExpectedFieldResponses(info);
|
||||
String[] expectedAddFieldTypes = getExpectedFieldTypeResponses(info);
|
||||
String[] expectedAddDynamicFields = getExpectedDynamicFieldResponses(info);
|
||||
|
||||
boolean success = false;
|
||||
|
@ -467,6 +568,17 @@ public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestB
|
|||
// System.err.println("###FAILURE!");
|
||||
break;
|
||||
}
|
||||
|
||||
request = "/schema/fieldtypes?wt=xml";
|
||||
response = client.query(request);
|
||||
//System.err.println("###RESPONSE: " + response);
|
||||
result = BaseTestHarness.validateXPath(response, expectedAddFieldTypes);
|
||||
|
||||
if (result != null) {
|
||||
// System.err.println("###FAILURE!");
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
success = (result == null);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue