SOLR-5527: DIH logs spurious warning for special commands

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1547394 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Shalin Shekhar Mangar 2013-12-03 14:13:13 +00:00
parent 8c04dea24c
commit fcf3a10704
6 changed files with 30 additions and 16 deletions

View File

@ -187,6 +187,8 @@ Bug Fixes
* SOLR-5204: StatsComponent and SpellCheckComponent do not support the * SOLR-5204: StatsComponent and SpellCheckComponent do not support the
shards.tolerant=true parameter. (Anca Kopetz, shalin) shards.tolerant=true parameter. (Anca Kopetz, shalin)
* SOLR-5527: DIH logs spurious warning for special commands. (shalin)
Optimizations Optimizations
---------------------- ----------------------

View File

@ -51,6 +51,11 @@ public class DocBuilder {
private static final Logger LOG = LoggerFactory.getLogger(DocBuilder.class); private static final Logger LOG = LoggerFactory.getLogger(DocBuilder.class);
private static final Date EPOCH = new Date(0); private static final Date EPOCH = new Date(0);
public static final String DELETE_DOC_BY_ID = "$deleteDocById";
public static final String DELETE_DOC_BY_QUERY = "$deleteDocByQuery";
public static final String DOC_BOOST = "$docBoost";
public static final String SKIP_DOC = "$skipDoc";
public static final String SKIP_ROW = "$skipRow";
DataImporter dataImporter; DataImporter dataImporter;
@ -568,7 +573,7 @@ public class DocBuilder {
} }
private void handleSpecialCommands(Map<String, Object> arow, DocWrapper doc) { private void handleSpecialCommands(Map<String, Object> arow, DocWrapper doc) {
Object value = arow.get("$deleteDocById"); Object value = arow.get(DELETE_DOC_BY_ID);
if (value != null) { if (value != null) {
if (value instanceof Collection) { if (value instanceof Collection) {
Collection collection = (Collection) value; Collection collection = (Collection) value;
@ -581,7 +586,7 @@ public class DocBuilder {
importStatistics.deletedDocCount.incrementAndGet(); importStatistics.deletedDocCount.incrementAndGet();
} }
} }
value = arow.get("$deleteDocByQuery"); value = arow.get(DELETE_DOC_BY_QUERY);
if (value != null) { if (value != null) {
if (value instanceof Collection) { if (value instanceof Collection) {
Collection collection = (Collection) value; Collection collection = (Collection) value;
@ -594,7 +599,7 @@ public class DocBuilder {
importStatistics.deletedDocCount.incrementAndGet(); importStatistics.deletedDocCount.incrementAndGet();
} }
} }
value = arow.get("$docBoost"); value = arow.get(DOC_BOOST);
if (value != null) { if (value != null) {
float value1 = 1.0f; float value1 = 1.0f;
if (value instanceof Number) { if (value instanceof Number) {
@ -605,7 +610,7 @@ public class DocBuilder {
doc.setDocumentBoost(value1); doc.setDocumentBoost(value1);
} }
value = arow.get("$skipDoc"); value = arow.get(SKIP_DOC);
if (value != null) { if (value != null) {
if (Boolean.parseBoolean(value.toString())) { if (Boolean.parseBoolean(value.toString())) {
throw new DataImportHandlerException(DataImportHandlerException.SKIP, throw new DataImportHandlerException(DataImportHandlerException.SKIP,
@ -613,7 +618,7 @@ public class DocBuilder {
} }
} }
value = arow.get("$skipRow"); value = arow.get(SKIP_ROW);
if (value != null) { if (value != null) {
if (Boolean.parseBoolean(value.toString())) { if (Boolean.parseBoolean(value.toString())) {
throw new DataImportHandlerException(DataImportHandlerException.SKIP_ROW); throw new DataImportHandlerException(DataImportHandlerException.SKIP_ROW);

View File

@ -153,7 +153,4 @@ public class EntityProcessorBase extends EntityProcessor {
public static final String CONTINUE = "continue"; public static final String CONTINUE = "continue";
public static final String SKIP = "skip"; public static final String SKIP = "skip";
public static final String SKIP_DOC = "$skipDoc";
} }

View File

@ -330,7 +330,7 @@ public class XPathEntityProcessor extends EntityProcessorBase {
} else if (SKIP.equals(onError)) { } else if (SKIP.equals(onError)) {
LOG.warn(msg, e); LOG.warn(msg, e);
Map<String, Object> map = new HashMap<String, Object>(); Map<String, Object> map = new HashMap<String, Object>();
map.put(SKIP_DOC, Boolean.TRUE); map.put(DocBuilder.SKIP_DOC, Boolean.TRUE);
rows.add(map); rows.add(map);
} else if (CONTINUE.equals(onError)) { } else if (CONTINUE.equals(onError)) {
LOG.warn(msg, e); LOG.warn(msg, e);

View File

@ -8,6 +8,7 @@ import java.util.Locale;
import java.util.Map; import java.util.Map;
import org.apache.solr.handler.dataimport.DataImporter; import org.apache.solr.handler.dataimport.DataImporter;
import org.apache.solr.handler.dataimport.DocBuilder;
import org.apache.solr.schema.IndexSchema; import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField; import org.apache.solr.schema.SchemaField;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -111,7 +112,7 @@ public class DIHConfiguration {
for (Map.Entry<String,EntityField> entry : fields.entrySet()) { for (Map.Entry<String,EntityField> entry : fields.entrySet()) {
EntityField fld = entry.getValue(); EntityField fld = entry.getValue();
SchemaField field = getSchemaField(fld.getName()); SchemaField field = getSchemaField(fld.getName());
if (field == null) { if (field == null && !isSpecialCommand(fld.getName())) {
LOG.info("The field :" + fld.getName() + " present in DataConfig does not have a counterpart in Solr Schema"); LOG.info("The field :" + fld.getName() + " present in DataConfig does not have a counterpart in Solr Schema");
} }
} }
@ -178,4 +179,13 @@ public class DIHConfiguration {
public IndexSchema getSchema() { public IndexSchema getSchema() {
return schema; return schema;
} }
public static boolean isSpecialCommand(String fld) {
return DocBuilder.DELETE_DOC_BY_ID.equals(fld) ||
DocBuilder.DELETE_DOC_BY_QUERY.equals(fld) ||
DocBuilder.DOC_BOOST.equals(fld) ||
DocBuilder.SKIP_DOC.equals(fld) ||
DocBuilder.SKIP_ROW.equals(fld);
}
} }

View File

@ -132,7 +132,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
public void testSkipDoc() throws Exception { public void testSkipDoc() throws Exception {
List rows = new ArrayList(); List rows = new ArrayList();
rows.add(createMap("id", "1", "desc", "one")); rows.add(createMap("id", "1", "desc", "one"));
rows.add(createMap("id", "2", "desc", "two", "$skipDoc", "true")); rows.add(createMap("id", "2", "desc", "two", DocBuilder.SKIP_DOC, "true"));
MockDataSource.setIterator("select * from x", rows.iterator()); MockDataSource.setIterator("select * from x", rows.iterator());
runFullImport(dataConfigWithDynamicTransformer); runFullImport(dataConfigWithDynamicTransformer);
@ -146,7 +146,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
public void testSkipRow() throws Exception { public void testSkipRow() throws Exception {
List rows = new ArrayList(); List rows = new ArrayList();
rows.add(createMap("id", "1", "desc", "one")); rows.add(createMap("id", "1", "desc", "one"));
rows.add(createMap("id", "2", "desc", "two", "$skipRow", "true")); rows.add(createMap("id", "2", "desc", "two", DocBuilder.SKIP_ROW, "true"));
MockDataSource.setIterator("select * from x", rows.iterator()); MockDataSource.setIterator("select * from x", rows.iterator());
runFullImport(dataConfigWithDynamicTransformer); runFullImport(dataConfigWithDynamicTransformer);
@ -166,7 +166,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
MockDataSource.setIterator("3", rows.iterator()); MockDataSource.setIterator("3", rows.iterator());
rows = new ArrayList(); rows = new ArrayList();
rows.add(createMap("name_s", "xyz", "$skipRow", "true")); rows.add(createMap("name_s", "xyz", DocBuilder.SKIP_ROW, "true"));
MockDataSource.setIterator("4", rows.iterator()); MockDataSource.setIterator("4", rows.iterator());
runFullImport(dataConfigWithTwoEntities); runFullImport(dataConfigWithTwoEntities);
@ -197,7 +197,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
List rows = new ArrayList(); List rows = new ArrayList();
rows.add(createMap("id", "1", "desc", "one")); rows.add(createMap("id", "1", "desc", "one"));
rows.add(createMap("id", "2", "desc", "two")); rows.add(createMap("id", "2", "desc", "two"));
rows.add(createMap("id", "3", "desc", "two", "$deleteDocById", "2")); rows.add(createMap("id", "3", "desc", "two", DocBuilder.DELETE_DOC_BY_ID, "2"));
MockDataSource.setIterator("select * from x", rows.iterator()); MockDataSource.setIterator("select * from x", rows.iterator());
runFullImport(dataConfigForSkipTransform); runFullImport(dataConfigForSkipTransform);
@ -213,7 +213,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
rows = new ArrayList(); rows = new ArrayList();
rows.add(createMap("id", "1", "desc", "one")); rows.add(createMap("id", "1", "desc", "one"));
rows.add(createMap("id", "2", "desc", "one")); rows.add(createMap("id", "2", "desc", "one"));
rows.add(createMap("id", "3", "desc", "two", "$deleteDocByQuery", "desc:one")); rows.add(createMap("id", "3", "desc", "two", DocBuilder.DELETE_DOC_BY_QUERY, "desc:one"));
MockDataSource.setIterator("select * from x", rows.iterator()); MockDataSource.setIterator("select * from x", rows.iterator());
runFullImport(dataConfigForSkipTransform); runFullImport(dataConfigForSkipTransform);
@ -227,7 +227,7 @@ public class TestDocBuilder2 extends AbstractDataImportHandlerTestCase {
MockDataSource.clearCache(); MockDataSource.clearCache();
rows = new ArrayList(); rows = new ArrayList();
rows.add(createMap("$deleteDocById", "3")); rows.add(createMap(DocBuilder.DELETE_DOC_BY_ID, "3"));
MockDataSource.setIterator("select * from x", rows.iterator()); MockDataSource.setIterator("select * from x", rows.iterator());
runFullImport(dataConfigForSkipTransform, createMap("clean","false")); runFullImport(dataConfigForSkipTransform, createMap("clean","false"));
assertQ(req("id:3"), "//*[@numFound='0']"); assertQ(req("id:3"), "//*[@numFound='0']");