mirror of https://github.com/apache/lucene.git
SOLR-1229: fixes for deletedPkQuery
git-svn-id: https://svn.apache.org/repos/asf/lucene/solr/trunk@792963 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
6457b52936
commit
a7c25f8813
|
@ -246,6 +246,8 @@ Bug Fixes
|
|||
26.SOLR-1146: ConcurrentModificationException in DataImporter.getStatusMessages
|
||||
(Walter Ferrara, Noble Paul via shalin)
|
||||
|
||||
27.SOLR-1229: Fixes for deletedPkQuery, particularly when using transformed Solr unique id's
|
||||
(Lance Norskog, Noble Paul via ehatcher)
|
||||
|
||||
|
||||
Documentation
|
||||
|
|
|
@ -68,7 +68,7 @@ public abstract class AbstractDataImportHandlerTest extends
|
|||
|
||||
protected void runDeltaImport(String dataConfig) throws Exception {
|
||||
LocalSolrQueryRequest request = lrf.makeRequest("command", "delta-import",
|
||||
"debug", "on", "clean", "true", "commit", "true", "dataConfig",
|
||||
"debug", "on", "clean", "false", "commit", "true", "dataConfig",
|
||||
dataConfig);
|
||||
h.query("/dataimport", request);
|
||||
}
|
||||
|
|
|
@ -78,6 +78,8 @@ public class DataConfig {
|
|||
|
||||
public String pk;
|
||||
|
||||
public String pkMappingFromSchema;
|
||||
|
||||
public String dataSource;
|
||||
|
||||
public Map<String, String> allAttributes;
|
||||
|
|
|
@ -106,6 +106,7 @@ public class DataImporter {
|
|||
Map<String, DataConfig.Field> fields = new HashMap<String, DataConfig.Field>();
|
||||
initEntity(e, fields, false);
|
||||
verifyWithSchema(fields);
|
||||
identifyPk(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -145,6 +146,22 @@ public class DataImporter {
|
|||
}
|
||||
}
|
||||
|
||||
private void identifyPk(DataConfig.Entity entity) {
|
||||
String schemaPk = schema.getUniqueKeyField().getName();
|
||||
//if no fields are mentioned . solr uniqeKey is same as dih 'pk'
|
||||
entity.pkMappingFromSchema = schemaPk;
|
||||
for (DataConfig.Field field : entity.fields) {
|
||||
if(field.getName().equals(schemaPk)) {
|
||||
entity.pkMappingFromSchema = field.column;
|
||||
//get the corresponding column mapping for the solr uniqueKey
|
||||
// But if there are multiple columns mapping to the solr uniqueKey, it will fail
|
||||
// so , in one off cases we may need pk
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void loadDataConfig(String configFile) {
|
||||
|
||||
try {
|
||||
|
|
|
@ -273,16 +273,11 @@ public class DocBuilder {
|
|||
while (iter.hasNext()) {
|
||||
Map<String, Object> map = iter.next();
|
||||
Object key = null;
|
||||
if(root.pk != null){
|
||||
if(root.pkMappingFromSchema != null){
|
||||
key = map.get(root.pkMappingFromSchema);
|
||||
} else if(root.pk != null){
|
||||
key = map.get(root.pk);
|
||||
}
|
||||
if(key == null && map.size() ==1){
|
||||
//iterating through the map just to get the first and only item
|
||||
for (Map.Entry<String, Object> e : map.entrySet()) {
|
||||
key = e.getValue();
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(key == null) {
|
||||
LOG.warn("no key was available for deleteted pk query");
|
||||
continue;
|
||||
|
|
|
@ -0,0 +1,295 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.handler.dataimport;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Test for SqlEntityProcessor which checks variations in primary key names and deleted ids
|
||||
* </p>
|
||||
*
|
||||
*
|
||||
* @version $Id: TestSqlEntityProcessor2.java 723824 2008-12-05 19:14:11Z shalin $
|
||||
* @since solr 1.3
|
||||
*/
|
||||
public class TestSqlEntityProcessorDelta extends AbstractDataImportHandlerTest {
|
||||
private static final String FULLIMPORT_QUERY = "select * from x";
|
||||
|
||||
private static final String DELTA_QUERY = "select id from x where last_modified > NOW";
|
||||
|
||||
private static final String DELETED_PK_QUERY = "select id from x where last_modified > NOW AND deleted='true'";
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() {
|
||||
return "dataimport-schema.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSolrConfigFile() {
|
||||
return "dataimport-solrconfig.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private void add1document() throws Exception {
|
||||
List parentRow = new ArrayList();
|
||||
parentRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator(FULLIMPORT_QUERY, parentRow.iterator());
|
||||
|
||||
List childRow = new ArrayList();
|
||||
childRow.add(createMap("desc", "hello"));
|
||||
MockDataSource.setIterator("select * from y where y.A='1'", childRow
|
||||
.iterator());
|
||||
|
||||
super.runFullImport(dataConfig_delta);
|
||||
|
||||
assertQ(req("*:* OR add1document"), "//*[@numFound='1']");
|
||||
assertQ(req("id:1"), "//*[@numFound='1']");
|
||||
assertQ(req("desc:hello"), "//*[@numFound='1']");
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCompositePk_FullImport() throws Exception {
|
||||
add1document();
|
||||
}
|
||||
|
||||
// WORKS
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCompositePk_DeltaImport_delete() throws Exception {
|
||||
add1document();
|
||||
List deletedRow = new ArrayList();
|
||||
deletedRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator(DELETED_PK_QUERY, deletedRow.iterator());
|
||||
|
||||
MockDataSource.setIterator(DELTA_QUERY, Collections
|
||||
.EMPTY_LIST.iterator());
|
||||
|
||||
List childRow = new ArrayList();
|
||||
childRow.add(createMap("desc", "hello"));
|
||||
MockDataSource.setIterator("select * from y where y.A='1'", childRow
|
||||
.iterator());
|
||||
|
||||
super.runDeltaImport(dataConfig_delta);
|
||||
assertQ(req("*:* OR testCompositePk_DeltaImport_delete"), "//*[@numFound='0']");
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCompositePk_DeltaImport_empty() throws Exception {
|
||||
List deltaRow = new ArrayList();
|
||||
deltaRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator(DELTA_QUERY, deltaRow.iterator());
|
||||
|
||||
MockDataSource.setIterator(DELETED_PK_QUERY, Collections
|
||||
.EMPTY_LIST.iterator());
|
||||
|
||||
List parentRow = new ArrayList();
|
||||
parentRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator("select * from x where id='1'", parentRow
|
||||
.iterator());
|
||||
|
||||
List childRow = new ArrayList();
|
||||
childRow.add(createMap("desc", "hello"));
|
||||
MockDataSource.setIterator("select * from y where y.A='1'", childRow
|
||||
.iterator());
|
||||
|
||||
super.runDeltaImport(dataConfig_delta);
|
||||
|
||||
assertQ(req("*:* OR testCompositePk_DeltaImport_empty"), "//*[@numFound='1']");
|
||||
assertQ(req("id:1"), "//*[@numFound='1']");
|
||||
assertQ(req("desc:hello"), "//*[@numFound='1']");
|
||||
}
|
||||
|
||||
// WORKS
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void XtestCompositePk_DeltaImport_replace_delete() throws Exception {
|
||||
add1document();
|
||||
MockDataSource.clearCache();
|
||||
|
||||
List deltaRow = new ArrayList();
|
||||
deltaRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator(DELTA_QUERY,
|
||||
deltaRow.iterator());
|
||||
|
||||
List deletedRow = new ArrayList();
|
||||
deletedRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator(DELETED_PK_QUERY,
|
||||
deletedRow.iterator());
|
||||
|
||||
List parentRow = new ArrayList();
|
||||
parentRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator("select * from x where id='1'", parentRow
|
||||
.iterator());
|
||||
|
||||
List childRow = new ArrayList();
|
||||
childRow.add(createMap("desc", "goodbye"));
|
||||
MockDataSource.setIterator("select * from y where y.A='1'", childRow
|
||||
.iterator());
|
||||
|
||||
super.runDeltaImport(dataConfig_delta);
|
||||
|
||||
assertQ(req("*:* OR testCompositePk_DeltaImport_replace_delete"), "//*[@numFound='0']");
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCompositePk_DeltaImport_replace_nodelete() throws Exception {
|
||||
add1document();
|
||||
MockDataSource.clearCache();
|
||||
|
||||
List deltaRow = new ArrayList();
|
||||
deltaRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator(DELTA_QUERY,
|
||||
deltaRow.iterator());
|
||||
|
||||
MockDataSource.setIterator(DELETED_PK_QUERY, Collections
|
||||
.EMPTY_LIST.iterator());
|
||||
|
||||
List parentRow = new ArrayList();
|
||||
parentRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator("select * from x where id='1'", parentRow
|
||||
.iterator());
|
||||
|
||||
List childRow = new ArrayList();
|
||||
childRow.add(createMap("desc", "goodbye"));
|
||||
MockDataSource.setIterator("select * from y where y.A='1'", childRow
|
||||
.iterator());
|
||||
|
||||
super.runDeltaImport(dataConfig_delta);
|
||||
|
||||
assertQ(req("*:* OR XtestCompositePk_DeltaImport_replace_nodelete"), "//*[@numFound='1']");
|
||||
assertQ(req("id:1"), "//*[@numFound='1']");
|
||||
assertQ(req("desc:hello OR XtestCompositePk_DeltaImport_replace_nodelete"), "//*[@numFound='0']");
|
||||
assertQ(req("desc:goodbye"), "//*[@numFound='1']");
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCompositePk_DeltaImport_add() throws Exception {
|
||||
add1document();
|
||||
MockDataSource.clearCache();
|
||||
|
||||
List deltaRow = new ArrayList();
|
||||
deltaRow.add(createMap("id", "2"));
|
||||
MockDataSource.setIterator(DELTA_QUERY,
|
||||
deltaRow.iterator());
|
||||
|
||||
List parentRow = new ArrayList();
|
||||
parentRow.add(createMap("id", "2"));
|
||||
MockDataSource.setIterator("select * from x where id='2'", parentRow
|
||||
.iterator());
|
||||
|
||||
List childRow = new ArrayList();
|
||||
childRow.add(createMap("desc", "goodbye"));
|
||||
MockDataSource.setIterator("select * from y where y.A='2'", childRow
|
||||
.iterator());
|
||||
|
||||
super.runDeltaImport(dataConfig_delta);
|
||||
|
||||
assertQ(req("*:* OR testCompositePk_DeltaImport_add"), "//*[@numFound='2']");
|
||||
assertQ(req("id:1"), "//*[@numFound='1']");
|
||||
assertQ(req("id:2"), "//*[@numFound='1']");
|
||||
assertQ(req("desc:hello"), "//*[@numFound='1']");
|
||||
assertQ(req("desc:goodbye"), "//*[@numFound='1']");
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCompositePk_DeltaImport_nodelta() throws Exception {
|
||||
add1document();
|
||||
MockDataSource.clearCache();
|
||||
|
||||
MockDataSource.setIterator(DELTA_QUERY,
|
||||
Collections.EMPTY_LIST.iterator());
|
||||
|
||||
super.runDeltaImport(dataConfig_delta);
|
||||
|
||||
assertQ(req("*:* OR testCompositePk_DeltaImport_nodelta"), "//*[@numFound='1']");
|
||||
assertQ(req("id:1 OR testCompositePk_DeltaImport_nodelta"), "//*[@numFound='1']");
|
||||
assertQ(req("desc:hello OR testCompositePk_DeltaImport_nodelta"), "//*[@numFound='1']");
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCompositePk_DeltaImport_add_delete() throws Exception {
|
||||
add1document();
|
||||
MockDataSource.clearCache();
|
||||
|
||||
List deltaRow = new ArrayList();
|
||||
deltaRow.add(createMap("id", "2"));
|
||||
MockDataSource.setIterator(DELTA_QUERY,
|
||||
deltaRow.iterator());
|
||||
|
||||
List deletedRow = new ArrayList();
|
||||
deletedRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator(DELETED_PK_QUERY,
|
||||
deletedRow.iterator());
|
||||
|
||||
List parentRow = new ArrayList();
|
||||
parentRow.add(createMap("id", "2"));
|
||||
MockDataSource.setIterator("select * from x where id='2'", parentRow
|
||||
.iterator());
|
||||
|
||||
List childRow = new ArrayList();
|
||||
childRow.add(createMap("desc", "goodbye"));
|
||||
MockDataSource.setIterator("select * from y where y.A='2'", childRow
|
||||
.iterator());
|
||||
|
||||
super.runDeltaImport(dataConfig_delta);
|
||||
|
||||
assertQ(req("*:* OR XtestCompositePk_DeltaImport_add_delete"), "//*[@numFound='1']");
|
||||
assertQ(req("id:2"), "//*[@numFound='1']");
|
||||
assertQ(req("desc:hello"), "//*[@numFound='0']");
|
||||
assertQ(req("desc:goodbye"), "//*[@numFound='1']");
|
||||
}
|
||||
|
||||
private static String dataConfig_delta = "<dataConfig>\n"
|
||||
+ " <document>\n"
|
||||
+ " <entity name=\"x\" pk=\"id\" transformer=\"TemplateTransformer\""
|
||||
+ " query=\"" + FULLIMPORT_QUERY + "\""
|
||||
+ " deletedPkQuery=\"" + DELETED_PK_QUERY + "\""
|
||||
+ " deltaImportQuery=\"select * from x where id='${dataimporter.delta.id}'\""
|
||||
+ " deltaQuery=\"" + DELTA_QUERY + "\">\n"
|
||||
+ " <field column=\"id\" name=\"id\"/>\n"
|
||||
+ " <entity name=\"y\" query=\"select * from y where y.A='${x.id}'\">\n"
|
||||
+ " <field column=\"desc\" />\n"
|
||||
+ " </entity>\n" + " </entity>\n"
|
||||
+ " </document>\n" + "</dataConfig>\n";
|
||||
|
||||
}
|
|
@ -0,0 +1,291 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.handler.dataimport;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Test for SqlEntityProcessor which checks variations in primary key names and deleted ids
|
||||
* </p>
|
||||
*
|
||||
*
|
||||
* @version $Id: TestSqlEntityProcessor2.java 723824 2008-12-05 19:14:11Z shalin $
|
||||
* @since solr 1.3
|
||||
*/
|
||||
public class TestSqlEntityProcessorDelta2 extends AbstractDataImportHandlerTest {
|
||||
private static final String FULLIMPORT_QUERY = "select * from x";
|
||||
|
||||
private static final String DELTA_QUERY = "select id from x where last_modified > NOW";
|
||||
|
||||
private static final String DELETED_PK_QUERY = "select id from x where last_modified > NOW AND deleted='true'";
|
||||
|
||||
@Override
|
||||
public String getSchemaFile() {
|
||||
return "dataimport-solr_id-schema.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSolrConfigFile() {
|
||||
return "dataimport-solrconfig.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private void add1document() throws Exception {
|
||||
List parentRow = new ArrayList();
|
||||
parentRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator(FULLIMPORT_QUERY, parentRow.iterator());
|
||||
|
||||
List childRow = new ArrayList();
|
||||
childRow.add(createMap("desc", "hello"));
|
||||
MockDataSource.setIterator("select * from y where y.A='1'", childRow
|
||||
.iterator());
|
||||
|
||||
super.runFullImport(dataConfig_delta2);
|
||||
|
||||
assertQ(req("*:* OR add1document"), "//*[@numFound='1']");
|
||||
assertQ(req("solr_id:prefix-1"), "//*[@numFound='1']");
|
||||
assertQ(req("desc:hello"), "//*[@numFound='1']");
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCompositePk_FullImport() throws Exception {
|
||||
add1document();
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCompositePk_DeltaImport_delete() throws Exception {
|
||||
add1document();
|
||||
List deletedRow = new ArrayList();
|
||||
deletedRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator(DELETED_PK_QUERY, deletedRow.iterator());
|
||||
|
||||
MockDataSource.setIterator(DELTA_QUERY, Collections
|
||||
.EMPTY_LIST.iterator());
|
||||
|
||||
List childRow = new ArrayList();
|
||||
childRow.add(createMap("desc", "hello"));
|
||||
MockDataSource.setIterator("select * from y where y.A='1'", childRow
|
||||
.iterator());
|
||||
|
||||
super.runDeltaImport(dataConfig_delta2);
|
||||
assertQ(req("*:* OR testCompositePk_DeltaImport_delete"), "//*[@numFound='0']");
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCompositePk_DeltaImport_empty() throws Exception {
|
||||
List deltaRow = new ArrayList();
|
||||
deltaRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator(DELTA_QUERY, deltaRow.iterator());
|
||||
|
||||
MockDataSource.setIterator(DELETED_PK_QUERY, Collections
|
||||
.EMPTY_LIST.iterator());
|
||||
|
||||
List parentRow = new ArrayList();
|
||||
parentRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator("select * from x where id='1'", parentRow
|
||||
.iterator());
|
||||
|
||||
List childRow = new ArrayList();
|
||||
childRow.add(createMap("desc", "hello"));
|
||||
MockDataSource.setIterator("select * from y where y.A='1'", childRow
|
||||
.iterator());
|
||||
|
||||
super.runDeltaImport(dataConfig_delta2);
|
||||
|
||||
assertQ(req("*:* OR testCompositePk_DeltaImport_empty"), "//*[@numFound='1']");
|
||||
assertQ(req("solr_id:prefix-1"), "//*[@numFound='1']");
|
||||
assertQ(req("desc:hello"), "//*[@numFound='1']");
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void XtestCompositePk_DeltaImport_replace_delete() throws Exception {
|
||||
add1document();
|
||||
MockDataSource.clearCache();
|
||||
|
||||
List deltaRow = new ArrayList();
|
||||
deltaRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator(DELTA_QUERY,
|
||||
deltaRow.iterator());
|
||||
|
||||
List deletedRow = new ArrayList();
|
||||
deletedRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator(DELETED_PK_QUERY,
|
||||
deletedRow.iterator());
|
||||
|
||||
List parentRow = new ArrayList();
|
||||
parentRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator("select * from x where id='1'", parentRow
|
||||
.iterator());
|
||||
|
||||
List childRow = new ArrayList();
|
||||
childRow.add(createMap("desc", "goodbye"));
|
||||
MockDataSource.setIterator("select * from y where y.A='1'", childRow
|
||||
.iterator());
|
||||
|
||||
super.runDeltaImport(dataConfig_delta2);
|
||||
|
||||
assertQ(req("*:* OR testCompositePk_DeltaImport_replace_delete"), "//*[@numFound='0']");
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCompositePk_DeltaImport_replace_nodelete() throws Exception {
|
||||
add1document();
|
||||
MockDataSource.clearCache();
|
||||
|
||||
List deltaRow = new ArrayList();
|
||||
deltaRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator(DELTA_QUERY,
|
||||
deltaRow.iterator());
|
||||
|
||||
MockDataSource.setIterator(DELETED_PK_QUERY, Collections
|
||||
.EMPTY_LIST.iterator());
|
||||
|
||||
List parentRow = new ArrayList();
|
||||
parentRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator("select * from x where id='1'", parentRow
|
||||
.iterator());
|
||||
|
||||
List childRow = new ArrayList();
|
||||
childRow.add(createMap("desc", "goodbye"));
|
||||
MockDataSource.setIterator("select * from y where y.A='1'", childRow
|
||||
.iterator());
|
||||
|
||||
super.runDeltaImport(dataConfig_delta2);
|
||||
|
||||
assertQ(req("*:* OR XtestCompositePk_DeltaImport_replace_nodelete"), "//*[@numFound='1']");
|
||||
assertQ(req("solr_id:prefix-1"), "//*[@numFound='1']");
|
||||
assertQ(req("desc:hello OR XtestCompositePk_DeltaImport_replace_nodelete"), "//*[@numFound='0']");
|
||||
assertQ(req("desc:goodbye"), "//*[@numFound='1']");
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCompositePk_DeltaImport_add() throws Exception {
|
||||
add1document();
|
||||
MockDataSource.clearCache();
|
||||
|
||||
List deltaRow = new ArrayList();
|
||||
deltaRow.add(createMap("id", "2"));
|
||||
MockDataSource.setIterator(DELTA_QUERY,
|
||||
deltaRow.iterator());
|
||||
|
||||
List parentRow = new ArrayList();
|
||||
parentRow.add(createMap("id", "2"));
|
||||
MockDataSource.setIterator("select * from x where id='2'", parentRow
|
||||
.iterator());
|
||||
|
||||
List childRow = new ArrayList();
|
||||
childRow.add(createMap("desc", "goodbye"));
|
||||
MockDataSource.setIterator("select * from y where y.A='2'", childRow
|
||||
.iterator());
|
||||
|
||||
super.runDeltaImport(dataConfig_delta2);
|
||||
|
||||
assertQ(req("*:* OR testCompositePk_DeltaImport_add"), "//*[@numFound='2']");
|
||||
assertQ(req("solr_id:prefix-1"), "//*[@numFound='1']");
|
||||
assertQ(req("solr_id:prefix-2"), "//*[@numFound='1']");
|
||||
assertQ(req("desc:hello"), "//*[@numFound='1']");
|
||||
assertQ(req("desc:goodbye"), "//*[@numFound='1']");
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCompositePk_DeltaImport_nodelta() throws Exception {
|
||||
add1document();
|
||||
MockDataSource.clearCache();
|
||||
|
||||
MockDataSource.setIterator(DELTA_QUERY,
|
||||
Collections.EMPTY_LIST.iterator());
|
||||
|
||||
super.runDeltaImport(dataConfig_delta2);
|
||||
|
||||
assertQ(req("*:* OR testCompositePk_DeltaImport_nodelta"), "//*[@numFound='1']");
|
||||
assertQ(req("solr_id:prefix-1 OR testCompositePk_DeltaImport_nodelta"), "//*[@numFound='1']");
|
||||
assertQ(req("desc:hello OR testCompositePk_DeltaImport_nodelta"), "//*[@numFound='1']");
|
||||
}
|
||||
|
||||
@Test
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCompositePk_DeltaImport_add_delete() throws Exception {
|
||||
add1document();
|
||||
MockDataSource.clearCache();
|
||||
|
||||
List deltaRow = new ArrayList();
|
||||
deltaRow.add(createMap("id", "2"));
|
||||
MockDataSource.setIterator(DELTA_QUERY,
|
||||
deltaRow.iterator());
|
||||
|
||||
List deletedRow = new ArrayList();
|
||||
deletedRow.add(createMap("id", "1"));
|
||||
MockDataSource.setIterator(DELETED_PK_QUERY,
|
||||
deletedRow.iterator());
|
||||
|
||||
List parentRow = new ArrayList();
|
||||
parentRow.add(createMap("id", "2"));
|
||||
MockDataSource.setIterator("select * from x where id='2'", parentRow
|
||||
.iterator());
|
||||
|
||||
List childRow = new ArrayList();
|
||||
childRow.add(createMap("desc", "goodbye"));
|
||||
MockDataSource.setIterator("select * from y where y.A='2'", childRow
|
||||
.iterator());
|
||||
|
||||
super.runDeltaImport(dataConfig_delta2);
|
||||
|
||||
assertQ(req("*:* OR XtestCompositePk_DeltaImport_add_delete"), "//*[@numFound='1']");
|
||||
assertQ(req("solr_id:prefix-2"), "//*[@numFound='1']");
|
||||
assertQ(req("desc:hello"), "//*[@numFound='0']");
|
||||
assertQ(req("desc:goodbye"), "//*[@numFound='1']");
|
||||
}
|
||||
|
||||
private static String dataConfig_delta2 = "<dataConfig>\n"
|
||||
+ " <document>\n"
|
||||
+ " <entity name=\"x\" pk=\"id\" transformer=\"TemplateTransformer\""
|
||||
+ " query=\"" + FULLIMPORT_QUERY + "\""
|
||||
+ " deletedPkQuery=\"" + DELETED_PK_QUERY + "\""
|
||||
+ " deltaImportQuery=\"select * from x where id='${dataimporter.delta.id}'\""
|
||||
+ " deltaQuery=\"" + DELTA_QUERY + "\">\n"
|
||||
+ " <field column=\"solr_id\" template=\"prefix-${x.id}\"/>\n"
|
||||
+ " <entity name=\"y\" query=\"select * from y where y.A='${x.id}'\">\n"
|
||||
+ " <field column=\"desc\" />\n"
|
||||
+ " </entity>\n" + " </entity>\n"
|
||||
+ " </document>\n" + "</dataConfig>\n";
|
||||
|
||||
}
|
|
@ -0,0 +1,304 @@
|
|||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<!--
|
||||
This is the Solr schema file. This file should be named "schema.xml" and
|
||||
should be in the conf directory under the solr home
|
||||
(i.e. ./solr/conf/schema.xml by default)
|
||||
or located where the classloader for the Solr webapp can find it.
|
||||
|
||||
This example schema is the recommended starting point for users.
|
||||
It should be kept correct and concise, usable out-of-the-box.
|
||||
|
||||
For more information, on how to customize this file, please see
|
||||
http://wiki.apache.org/solr/SchemaXml
|
||||
-->
|
||||
|
||||
<schema name="test" version="1.1">
|
||||
<!-- attribute "name" is the name of this schema and is only used for display purposes.
|
||||
Applications should change this to reflect the nature of the search collection.
|
||||
version="1.1" is Solr's version number for the schema syntax and semantics. It should
|
||||
not normally be changed by applications.
|
||||
1.0: multiValued attribute did not exist, all fields are multiValued by nature
|
||||
1.1: multiValued attribute introduced, false by default -->
|
||||
|
||||
<types>
|
||||
<!-- field type definitions. The "name" attribute is
|
||||
just a label to be used by field definitions. The "class"
|
||||
attribute and any other attributes determine the real
|
||||
behavior of the fieldType.
|
||||
Class names starting with "solr" refer to java classes in the
|
||||
org.apache.solr.analysis package.
|
||||
-->
|
||||
|
||||
<!-- The StrField type is not analyzed, but indexed/stored verbatim.
|
||||
- StrField and TextField support an optional compressThreshold which
|
||||
limits compression (if enabled in the derived fields) to values which
|
||||
exceed a certain size (in characters).
|
||||
-->
|
||||
<fieldType name="string" class="solr.StrField" sortMissingLast="true" omitNorms="true"/>
|
||||
|
||||
<!-- boolean type: "true" or "false" -->
|
||||
<fieldType name="boolean" class="solr.BoolField" sortMissingLast="true" omitNorms="true"/>
|
||||
|
||||
<!-- The optional sortMissingLast and sortMissingFirst attributes are
|
||||
currently supported on types that are sorted internally as strings.
|
||||
- If sortMissingLast="true", then a sort on this field will cause documents
|
||||
without the field to come after documents with the field,
|
||||
regardless of the requested sort order (asc or desc).
|
||||
- If sortMissingFirst="true", then a sort on this field will cause documents
|
||||
without the field to come before documents with the field,
|
||||
regardless of the requested sort order.
|
||||
- If sortMissingLast="false" and sortMissingFirst="false" (the default),
|
||||
then default lucene sorting will be used which places docs without the
|
||||
field first in an ascending sort and last in a descending sort.
|
||||
-->
|
||||
|
||||
|
||||
<!-- numeric field types that store and index the text
|
||||
value verbatim (and hence don't support range queries, since the
|
||||
lexicographic ordering isn't equal to the numeric ordering) -->
|
||||
<fieldType name="integer" class="solr.IntField" omitNorms="true"/>
|
||||
<fieldType name="long" class="solr.LongField" omitNorms="true"/>
|
||||
<fieldType name="float" class="solr.FloatField" omitNorms="true"/>
|
||||
<fieldType name="double" class="solr.DoubleField" omitNorms="true"/>
|
||||
|
||||
|
||||
<!-- Numeric field types that manipulate the value into
|
||||
a string value that isn't human-readable in its internal form,
|
||||
but with a lexicographic ordering the same as the numeric ordering,
|
||||
so that range queries work correctly. -->
|
||||
<fieldType name="sint" class="solr.SortableIntField" sortMissingLast="true" omitNorms="true"/>
|
||||
<fieldType name="slong" class="solr.SortableLongField" sortMissingLast="true" omitNorms="true"/>
|
||||
<fieldType name="sfloat" class="solr.SortableFloatField" sortMissingLast="true" omitNorms="true"/>
|
||||
<fieldType name="sdouble" class="solr.SortableDoubleField" sortMissingLast="true" omitNorms="true"/>
|
||||
|
||||
|
||||
<!-- The format for this date field is of the form 1995-12-31T23:59:59Z, and
|
||||
is a more restricted form of the canonical representation of dateTime
|
||||
http://www.w3.org/TR/xmlschema-2/#dateTime
|
||||
The trailing "Z" designates UTC time and is mandatory.
|
||||
Optional fractional seconds are allowed: 1995-12-31T23:59:59.999Z
|
||||
All other components are mandatory.
|
||||
|
||||
Expressions can also be used to denote calculations that should be
|
||||
performed relative to "NOW" to determine the value, ie...
|
||||
|
||||
NOW/HOUR
|
||||
... Round to the start of the current hour
|
||||
NOW-1DAY
|
||||
... Exactly 1 day prior to now
|
||||
NOW/DAY+6MONTHS+3DAYS
|
||||
... 6 months and 3 days in the future from the start of
|
||||
the current day
|
||||
|
||||
Consult the DateField javadocs for more information.
|
||||
-->
|
||||
<fieldType name="date" class="solr.DateField" sortMissingLast="true" omitNorms="true"/>
|
||||
|
||||
|
||||
<!-- The "RandomSortField" is not used to store or search any
|
||||
data. You can declare fields of this type it in your schema
|
||||
to generate psuedo-random orderings of your docs for sorting
|
||||
purposes. The ordering is generated based on the field name
|
||||
and the version of the index, As long as the index version
|
||||
remains unchanged, and the same field name is reused,
|
||||
the ordering of the docs will be consistent.
|
||||
If you want differend psuedo-random orderings of documents,
|
||||
for the same version of the index, use a dynamicField and
|
||||
change the name
|
||||
-->
|
||||
<fieldType name="random" class="solr.RandomSortField" indexed="true" />
|
||||
|
||||
<!-- solr.TextField allows the specification of custom text analyzers
|
||||
specified as a tokenizer and a list of token filters. Different
|
||||
analyzers may be specified for indexing and querying.
|
||||
|
||||
The optional positionIncrementGap puts space between multiple fields of
|
||||
this type on the same document, with the purpose of preventing false phrase
|
||||
matching across fields.
|
||||
|
||||
For more info on customizing your analyzer chain, please see
|
||||
http://wiki.apache.org/solr/AnalyzersTokenizersTokenFilters
|
||||
-->
|
||||
|
||||
<!-- One can also specify an existing Analyzer class that has a
|
||||
default constructor via the class attribute on the analyzer element
|
||||
<fieldType name="text_greek" class="solr.TextField">
|
||||
<analyzer class="org.apache.lucene.analysis.el.GreekAnalyzer"/>
|
||||
</fieldType>
|
||||
-->
|
||||
|
||||
<!-- A text field that only splits on whitespace for exact matching of words -->
|
||||
<fieldType name="text_ws" class="solr.TextField" positionIncrementGap="100">
|
||||
<analyzer>
|
||||
<tokenizer class="solr.WhitespaceTokenizerFactory"/>
|
||||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
<!-- A text field that uses WordDelimiterFilter to enable splitting and matching of
|
||||
words on case-change, alpha numeric boundaries, and non-alphanumeric chars,
|
||||
so that a query of "wifi" or "wi fi" could match a document containing "Wi-Fi".
|
||||
Synonyms and stopwords are customized by external files, and stemming is enabled.
|
||||
Duplicate tokens at the same position (which may result from Stemmed Synonyms or
|
||||
WordDelim parts) are removed.
|
||||
-->
|
||||
<fieldType name="text" class="solr.TextField" positionIncrementGap="100">
|
||||
<analyzer type="index">
|
||||
<tokenizer class="solr.WhitespaceTokenizerFactory"/>
|
||||
<!-- in this example, we will only use synonyms at query time
|
||||
<filter class="solr.SynonymFilterFactory" synonyms="index_synonyms.txt" ignoreCase="true" expand="false"/>
|
||||
-->
|
||||
<!--<filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>-->
|
||||
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="1" catenateNumbers="1" catenateAll="0" splitOnCaseChange="1"/>
|
||||
<filter class="solr.LowerCaseFilterFactory"/>
|
||||
<!--<filter class="solr.EnglishPorterFilterFactory" protected="protwords.txt"/>-->
|
||||
<filter class="solr.RemoveDuplicatesTokenFilterFactory"/>
|
||||
</analyzer>
|
||||
<analyzer type="query">
|
||||
<tokenizer class="solr.WhitespaceTokenizerFactory"/>
|
||||
<!--<filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>-->
|
||||
<!--<filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>-->
|
||||
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="0" catenateNumbers="0" catenateAll="0" splitOnCaseChange="1"/>
|
||||
<filter class="solr.LowerCaseFilterFactory"/>
|
||||
<!--<filter class="solr.EnglishPorterFilterFactory" protected="protwords.txt"/>-->
|
||||
<filter class="solr.RemoveDuplicatesTokenFilterFactory"/>
|
||||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
|
||||
<!-- Less flexible matching, but less false matches. Probably not ideal for product names,
|
||||
but may be good for SKUs. Can insert dashes in the wrong place and still match. -->
|
||||
<fieldType name="textTight" class="solr.TextField" positionIncrementGap="100" >
|
||||
<analyzer>
|
||||
<tokenizer class="solr.WhitespaceTokenizerFactory"/>
|
||||
<!--<filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="false"/>-->
|
||||
<!--<filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>-->
|
||||
<filter class="solr.WordDelimiterFilterFactory" generateWordParts="0" generateNumberParts="0" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
|
||||
<filter class="solr.LowerCaseFilterFactory"/>
|
||||
<!--<filter class="solr.EnglishPorterFilterFactory" protected="protwords.txt"/>-->
|
||||
<filter class="solr.RemoveDuplicatesTokenFilterFactory"/>
|
||||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
<!-- This is an example of using the KeywordTokenizer along
|
||||
With various TokenFilterFactories to produce a sortable field
|
||||
that does not include some properties of the source text
|
||||
-->
|
||||
<fieldType name="alphaOnlySort" class="solr.TextField" sortMissingLast="true" omitNorms="true">
|
||||
<analyzer>
|
||||
<!-- KeywordTokenizer does no actual tokenizing, so the entire
|
||||
input string is preserved as a single token
|
||||
-->
|
||||
<tokenizer class="solr.KeywordTokenizerFactory"/>
|
||||
<!-- The LowerCase TokenFilter does what you expect, which can be
|
||||
when you want your sorting to be case insensitive
|
||||
-->
|
||||
<filter class="solr.LowerCaseFilterFactory" />
|
||||
<!-- The TrimFilter removes any leading or trailing whitespace -->
|
||||
<filter class="solr.TrimFilterFactory" />
|
||||
<!-- The PatternReplaceFilter gives you the flexibility to use
|
||||
Java Regular expression to replace any sequence of characters
|
||||
matching a pattern with an arbitrary replacement string,
|
||||
which may include back refrences to portions of the orriginal
|
||||
string matched by the pattern.
|
||||
|
||||
See the Java Regular Expression documentation for more
|
||||
infomation on pattern and replacement string syntax.
|
||||
|
||||
http://java.sun.com/j2se/1.5.0/docs/api/java/util/regex/package-summary.html
|
||||
-->
|
||||
<filter class="solr.PatternReplaceFilterFactory"
|
||||
pattern="([^a-z])" replacement="" replace="all"
|
||||
/>
|
||||
</analyzer>
|
||||
</fieldType>
|
||||
|
||||
<!-- since fields of this type are by default not stored or indexed, any data added to
|
||||
them will be ignored outright
|
||||
-->
|
||||
<fieldtype name="ignored" stored="false" indexed="false" class="solr.StrField" />
|
||||
|
||||
</types>
|
||||
|
||||
|
||||
<fields>
|
||||
<!-- Valid attributes for fields:
|
||||
name: mandatory - the name for the field
|
||||
type: mandatory - the name of a previously defined type from the <types> section
|
||||
indexed: true if this field should be indexed (searchable or sortable)
|
||||
stored: true if this field should be retrievable
|
||||
compressed: [false] if this field should be stored using gzip compression
|
||||
(this will only apply if the field type is compressable; among
|
||||
the standard field types, only TextField and StrField are)
|
||||
multiValued: true if this field may contain multiple values per document
|
||||
omitNorms: (expert) set to true to omit the norms associated with
|
||||
this field (this disables length normalization and index-time
|
||||
boosting for the field, and saves some memory). Only full-text
|
||||
fields or fields that need an index-time boost need norms.
|
||||
termVectors: [false] set to true to store the term vector for a given field.
|
||||
When using MoreLikeThis, fields used for similarity should be stored for
|
||||
best performance.
|
||||
-->
|
||||
|
||||
<field name="solr_id" type="string" indexed="true" stored="true" required="true" />
|
||||
<field name="desc" type="string" indexed="true" stored="true" multiValued="true" />
|
||||
|
||||
<field name="date" type="date" indexed="true" stored="true" />
|
||||
|
||||
<field name="timestamp" type="date" indexed="true" stored="true" default="NOW" multiValued="false"/>
|
||||
|
||||
|
||||
<!-- Dynamic field definitions. If a field name is not found, dynamicFields
|
||||
will be used if the name matches any of the patterns.
|
||||
RESTRICTION: the glob-like pattern in the name attribute must have
|
||||
a "*" only at the start or the end.
|
||||
EXAMPLE: name="*_i" will match any field ending in _i (like myid_i, z_i)
|
||||
Longer patterns will be matched first. if equal size patterns
|
||||
both match, the first appearing in the schema will be used. -->
|
||||
<dynamicField name="*_i" type="sint" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_s" type="string" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_l" type="slong" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_t" type="text" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_b" type="boolean" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_f" type="sfloat" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_d" type="sdouble" indexed="true" stored="true"/>
|
||||
<dynamicField name="*_dt" type="date" indexed="true" stored="true"/>
|
||||
|
||||
<dynamicField name="random*" type="random" />
|
||||
|
||||
<!-- uncomment the following to ignore any fields that don't already match an existing
|
||||
field name or dynamic field, rather than reporting them as an error.
|
||||
alternately, change the type="ignored" to some other type e.g. "text" if you want
|
||||
unknown fields indexed and/or stored by default -->
|
||||
<!--dynamicField name="*" type="ignored" /-->
|
||||
|
||||
</fields>
|
||||
|
||||
<!-- Field to use to determine and enforce document uniqueness.
|
||||
Unless this field is marked with required="false", it will be a required field
|
||||
-->
|
||||
<uniqueKey>solr_id</uniqueKey>
|
||||
|
||||
<!-- field for the QueryParser to use when an explicit fieldname is absent -->
|
||||
<defaultSearchField>desc</defaultSearchField>
|
||||
|
||||
<!-- SolrQueryParser configuration: defaultOperator="AND|OR" -->
|
||||
<solrQueryParser defaultOperator="OR"/>
|
||||
|
||||
</schema>
|
Loading…
Reference in New Issue