mirror of https://github.com/apache/lucene.git
SOLR-742 -- More code cleanup
git-svn-id: https://svn.apache.org/repos/asf/lucene/solr/trunk@709088 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
fbf5c2fc46
commit
68add68a6f
|
@ -20,6 +20,7 @@ import org.w3c.dom.Element;
|
|||
import org.w3c.dom.NamedNodeMap;
|
||||
import org.w3c.dom.Node;
|
||||
import org.w3c.dom.NodeList;
|
||||
import org.apache.solr.schema.SchemaField;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
|
@ -52,6 +53,8 @@ public class DataConfig {
|
|||
|
||||
public Map<String, Properties> dataSources = new HashMap<String, Properties>();
|
||||
|
||||
public Map<String, SchemaField> lowerNameVsSchemaField = new HashMap<String, SchemaField>();
|
||||
|
||||
public Document getDocumentByName(String name) {
|
||||
if (documentCache == null) {
|
||||
documentCache = new HashMap<String, Document>();
|
||||
|
@ -127,14 +130,8 @@ public class DataConfig {
|
|||
|
||||
public Script script;
|
||||
|
||||
public List<Field> implicitFields;
|
||||
|
||||
public Map<String, Field> colNameVsField;
|
||||
|
||||
public Map<String, Field> lowercaseColNameVsField;
|
||||
|
||||
public Entity rootEntity;
|
||||
|
||||
public Entity() {
|
||||
}
|
||||
|
||||
|
@ -148,12 +145,10 @@ public class DataConfig {
|
|||
List<Element> n = getChildNodes(element, "field");
|
||||
fields = new ArrayList<Field>();
|
||||
colNameVsField = new HashMap<String, Field>();
|
||||
lowercaseColNameVsField = new HashMap<String, Field>();
|
||||
for (Element elem : n) {
|
||||
Field field = new Field(elem);
|
||||
fields.add(field);
|
||||
colNameVsField.put(field.column, field);
|
||||
lowercaseColNameVsField.put(field.column.toLowerCase(), field);
|
||||
}
|
||||
n = getChildNodes(element, "entity");
|
||||
if (!n.isEmpty())
|
||||
|
@ -163,14 +158,6 @@ public class DataConfig {
|
|||
|
||||
}
|
||||
|
||||
public void setParentEntity(Entity parentEntity) {
|
||||
this.parentEntity = parentEntity;
|
||||
while (parentEntity.parentEntity != null) {
|
||||
parentEntity = parentEntity.parentEntity;
|
||||
}
|
||||
this.rootEntity = parentEntity;
|
||||
}
|
||||
|
||||
public void clearCache() {
|
||||
if (entities != null) {
|
||||
for (Entity entity : entities)
|
||||
|
|
|
@ -95,14 +95,15 @@ public class DataImporter {
|
|||
dataSourceProps = ds;
|
||||
loadDataConfig(dataConfig);
|
||||
|
||||
for (Map.Entry<String, SchemaField> entry : schema.getFields().entrySet()) {
|
||||
config.lowerNameVsSchemaField.put(entry.getKey().toLowerCase(), entry.getValue());
|
||||
}
|
||||
|
||||
for (DataConfig.Document document : config.documents) {
|
||||
for (DataConfig.Entity e : document.entities) {
|
||||
Map<String, DataConfig.Field> fields = new HashMap<String, DataConfig.Field>();
|
||||
initEntity(e, fields, false);
|
||||
e.implicitFields = new ArrayList<DataConfig.Field>();
|
||||
String errs = verifyWithSchema(e, fields);
|
||||
if (e.implicitFields.isEmpty())
|
||||
e.implicitFields = null;
|
||||
String errs = verifyWithSchema(fields);
|
||||
if (errs != null) {
|
||||
throw new DataImportHandlerException(
|
||||
DataImportHandlerException.SEVERE, errs);
|
||||
|
@ -111,7 +112,7 @@ public class DataImporter {
|
|||
}
|
||||
}
|
||||
|
||||
private String verifyWithSchema(DataConfig.Entity e, Map<String, DataConfig.Field> fields) {
|
||||
private String verifyWithSchema(Map<String, DataConfig.Field> fields) {
|
||||
List<String> errors = new ArrayList<String>();
|
||||
Map<String, SchemaField> schemaFields = schema.getFields();
|
||||
for (Map.Entry<String, SchemaField> entry : schemaFields.entrySet()) {
|
||||
|
@ -122,10 +123,6 @@ public class DataImporter {
|
|||
.info(sf.getName()
|
||||
+ " is a required field in SolrSchema . But not found in DataConfig");
|
||||
}
|
||||
DataConfig.Field field = new DataConfig.Field(sf.getName(), sf.multiValued());
|
||||
e.implicitFields.add(field);
|
||||
e.colNameVsField.put(field.column, field);
|
||||
e.lowercaseColNameVsField.put(field.column.toLowerCase(), field);
|
||||
}
|
||||
}
|
||||
for (Map.Entry<String, DataConfig.Field> entry : fields.entrySet()) {
|
||||
|
@ -197,7 +194,7 @@ public class DataImporter {
|
|||
if (e.fields != null) {
|
||||
for (DataConfig.Field f : e.fields) {
|
||||
f.nameOrColName = f.getName();
|
||||
SchemaField schemaField = schema.getFields().get(f.getName());
|
||||
SchemaField schemaField = schema.getFieldOrNull(f.getName());
|
||||
if (schemaField != null) {
|
||||
f.multiValued = schemaField.multiValued();
|
||||
f.allAttributes.put(MULTI_VALUED, Boolean.toString(schemaField
|
||||
|
@ -235,7 +232,7 @@ public class DataImporter {
|
|||
if (e.entities == null)
|
||||
return;
|
||||
for (DataConfig.Entity e1 : e.entities) {
|
||||
e1.setParentEntity(e);
|
||||
e1.parentEntity = e;
|
||||
initEntity(e1, fields, e.isDocRoot || docRootFound);
|
||||
}
|
||||
|
||||
|
|
|
@ -17,12 +17,11 @@
|
|||
|
||||
package org.apache.solr.handler.dataimport;
|
||||
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.common.SolrInputField;
|
||||
import org.apache.solr.core.SolrCore;
|
||||
import org.apache.solr.schema.SchemaField;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
@ -332,7 +331,7 @@ public class DocBuilder {
|
|||
if (e.getErrCode() == DataImportHandlerException.SKIP) {
|
||||
importStatistics.skipDocCount.getAndIncrement();
|
||||
} else {
|
||||
LOG.error( "Exception while processing: "
|
||||
LOG.error("Exception while processing: "
|
||||
+ entity.name + " document : " + doc, e);
|
||||
}
|
||||
if (e.getErrCode() == DataImportHandlerException.SEVERE)
|
||||
|
@ -369,24 +368,23 @@ public class DocBuilder {
|
|||
private void addFields(DataConfig.Entity entity, SolrInputDocument doc, Map<String, Object> arow) {
|
||||
for (Map.Entry<String, Object> entry : arow.entrySet()) {
|
||||
String key = entry.getKey();
|
||||
if (key.startsWith("$")) {
|
||||
// All fields starting with $ are special values and don't need to be added
|
||||
continue;
|
||||
}
|
||||
DataConfig.Field field = entity.colNameVsField.get(key);
|
||||
if (field == null) {
|
||||
field = entity.lowercaseColNameVsField.get(key.toLowerCase());
|
||||
}
|
||||
if (field == null && entity.rootEntity != null) {
|
||||
field = entity.rootEntity.colNameVsField.get(key);
|
||||
}
|
||||
if (field == null && entity.rootEntity != null) {
|
||||
field = entity.rootEntity.lowercaseColNameVsField.get(key.toLowerCase());
|
||||
}
|
||||
if (field == null) {
|
||||
// This can be a dynamic field
|
||||
if (field == null) {
|
||||
// This can be a dynamic field or a field which does not have an entry in data-config ( an implicit field)
|
||||
SchemaField sf = dataImporter.getSchema().getFieldOrNull(key);
|
||||
if (sf == null) {
|
||||
sf = dataImporter.getConfig().lowerNameVsSchemaField.get(key.toLowerCase());
|
||||
}
|
||||
if (sf != null) {
|
||||
addFieldToDoc(entry.getValue(), key, 1.0f, sf.multiValued(), doc);
|
||||
}
|
||||
} else {
|
||||
if (field.toWrite) {
|
||||
//else do nothing. if we add it it may fail
|
||||
} else {
|
||||
if (field.toWrite) {
|
||||
addFieldToDoc(entry.getValue(), key, field.boost, field.multiValued, doc);
|
||||
}
|
||||
}
|
||||
|
@ -404,7 +402,7 @@ public class DocBuilder {
|
|||
} else {
|
||||
if (doc.getField(name) == null)
|
||||
for (Object o : collection) {
|
||||
doc.addField(name, o,boost);
|
||||
doc.addField(name, o, boost);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -581,7 +579,7 @@ public class DocBuilder {
|
|||
String n = DocBuilder.class.getPackage().getName() + "." + name;
|
||||
return core != null ?
|
||||
core.getResourceLoader().findClass(n) :
|
||||
Class.forName(n);
|
||||
Class.forName(n);
|
||||
} catch (Exception e1) {
|
||||
throw new ClassNotFoundException("Unable to load " + name + " or " + DocBuilder.class.getPackage().getName() + "." + name, e);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue