SOLR-2947: fix multi-threaded DIH bug (introduced w/SOLR-2382)

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1245014 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
James Dyer 2012-02-16 15:09:13 +00:00
parent c2eb461acc
commit 37bb87e70d
4 changed files with 145 additions and 73 deletions

View File

@ -297,20 +297,32 @@ public class DocBuilder {
addStatusMessage("Rolledback"); addStatusMessage("Rolledback");
} }
@SuppressWarnings("unchecked")
private void doFullDump() { private void doFullDump() {
addStatusMessage("Full Dump Started"); addStatusMessage("Full Dump Started");
if(dataImporter.getConfig().isMultiThreaded && !verboseDebug){ if (dataImporter.getConfig().isMultiThreaded && !verboseDebug) {
EntityRunner entityRunner = null;
try { try {
LOG.info("running multithreaded full-import"); LOG.info("running multithreaded full-import");
new EntityRunner(root,null).run(null,Context.FULL_DUMP,null); entityRunner = new EntityRunner(root, null);
entityRunner.run(null, Context.FULL_DUMP, null);
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException("Error in multi-threaded import", e); throw new RuntimeException("Error in multi-threaded import", e);
} finally {
if (entityRunner != null) {
List<EntityRunner> closure = new ArrayList<EntityRunner>();
closure.add(entityRunner);
for (int i = 0; i < closure.size(); i++) {
assert(!closure.get(i).entityProcessorWrapper.isEmpty());
closure.addAll(closure.get(i).entityProcessorWrapper.iterator().next().children.values());
}
for (EntityRunner er : closure) {
er.entityProcessor.destroy();
}
}
} }
} else { } else {
buildDocument(getVariableResolver(), null, null, root, true, null); buildDocument(getVariableResolver(), null, null, root, true, null);
} }
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@ -470,7 +482,6 @@ public class DocBuilder {
} }
} }
} finally { } finally {
entityProcessor.destroy();
} }
@ -572,7 +583,7 @@ public class DocBuilder {
} }
} }
} }
} }
} }
/**A reverse linked list . /**A reverse linked list .

View File

@ -1,32 +0,0 @@
<dataConfig>
<dataSource type="MockDataSource" />
<document>
<entity
name="PARENT"
processor="SqlEntityProcessor"
cacheName="PARENT"
cachePk="id"
query="SELECT * FROM PARENT"
>
<entity
name="CHILD_1"
processor="SqlEntityProcessor"
cacheImpl="SortedMapBackedCache"
cacheName="CHILD"
cachePk="id"
cacheLookup="PARENT.id"
fieldNames="id, child1a_mult_s, child1b_s"
fieldTypes="BIGDECIMAL, STRING, STRING"
query="SELECT * FROM CHILD_1"
/>
<entity
name="CHILD_2"
processor="SqlEntityProcessor"
cacheImpl="SortedMapBackedCache"
cachePk="id"
cacheLookup="PARENT.id"
query="SELECT * FROM CHILD_2"
/>
</entity>
</document>
</dataConfig>

View File

@ -0,0 +1,21 @@
package org.apache.solr.handler.dataimport;
import static org.hamcrest.CoreMatchers.nullValue;
import java.util.IdentityHashMap;
import java.util.Map;
import org.junit.Assert;
public class DestroyCountCache extends SortedMapBackedCache {
static Map<DIHCache,DIHCache> destroyed = new IdentityHashMap<DIHCache,DIHCache>();
@Override
public void destroy() {
super.destroy();
Assert.assertThat(destroyed.put(this, this), nullValue());
}
public DestroyCountCache() {}
}

View File

@ -4,24 +4,55 @@ import java.math.BigDecimal;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import static org.hamcrest.CoreMatchers.*;
import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
public class TestEphemeralCache extends AbstractDataImportHandlerTestCase { public class TestEphemeralCache extends AbstractDataImportHandlerTestCase {
@BeforeClass
public static void beforeClass() throws Exception { @BeforeClass
initCore("dataimport-solrconfig.xml", "dataimport-schema.xml"); public static void beforeClass() throws Exception {
} initCore("dataimport-solrconfig.xml", "dataimport-schema.xml");
}
public void testEphemeralCache() throws Exception {
List parentRows = new ArrayList(); @Before
parentRows.add(createMap("id", new BigDecimal("1"), "parent_s", "one")); public void reset() {
parentRows.add(createMap("id", new BigDecimal("2"), "parent_s", "two")); DestroyCountCache.destroyed.clear();
parentRows.add(createMap("id", new BigDecimal("3"), "parent_s", "three")); setupMockData();
parentRows.add(createMap("id", new BigDecimal("4"), "parent_s", "four")); }
parentRows.add(createMap("id", new BigDecimal("5"), "parent_s", "five"));
@Test
List child1Rows = new ArrayList(); public void testSingleThreaded() throws Exception {
assertFullImport(getDataConfigDotXml(0));
}
@Test
public void testWithThreadedParamEqualOne() throws Exception {
assertFullImport(getDataConfigDotXml(1));
}
@Ignore("TODO: fix included in SOLR-3011")
@Test
public void testMultiThreaded() throws Exception {
// Try between 2 and 6 threads
int numThreads = random.nextInt(4) + 2;
System.out.println("TRYING " + numThreads);
assertFullImport(getDataConfigDotXml(numThreads));
}
@SuppressWarnings("unchecked")
private void setupMockData() {
List parentRows = new ArrayList();
parentRows.add(createMap("id", new BigDecimal("1"), "parent_s", "one"));
parentRows.add(createMap("id", new BigDecimal("2"), "parent_s", "two"));
parentRows.add(createMap("id", new BigDecimal("3"), "parent_s", "three"));
parentRows.add(createMap("id", new BigDecimal("4"), "parent_s", "four"));
parentRows.add(createMap("id", new BigDecimal("5"), "parent_s", "five"));
List child1Rows = new ArrayList();
child1Rows.add(createMap("id", new BigDecimal("6"), "child1a_mult_s", "this is the number six.")); child1Rows.add(createMap("id", new BigDecimal("6"), "child1a_mult_s", "this is the number six."));
child1Rows.add(createMap("id", new BigDecimal("5"), "child1a_mult_s", "this is the number five.")); child1Rows.add(createMap("id", new BigDecimal("5"), "child1a_mult_s", "this is the number five."));
child1Rows.add(createMap("id", new BigDecimal("6"), "child1a_mult_s", "let's sing a song of six.")); child1Rows.add(createMap("id", new BigDecimal("6"), "child1a_mult_s", "let's sing a song of six."));
@ -32,7 +63,7 @@ public class TestEphemeralCache extends AbstractDataImportHandlerTestCase {
child1Rows.add(createMap("id", new BigDecimal("1"), "child1a_mult_s", "one")); child1Rows.add(createMap("id", new BigDecimal("1"), "child1a_mult_s", "one"));
child1Rows.add(createMap("id", new BigDecimal("1"), "child1a_mult_s", "uno")); child1Rows.add(createMap("id", new BigDecimal("1"), "child1a_mult_s", "uno"));
child1Rows.add(createMap("id", new BigDecimal("2"), "child1b_s", "CHILD1B", "child1a_mult_s", "this is the number two.")); child1Rows.add(createMap("id", new BigDecimal("2"), "child1b_s", "CHILD1B", "child1a_mult_s", "this is the number two."));
List child2Rows = new ArrayList(); List child2Rows = new ArrayList();
child2Rows.add(createMap("id", new BigDecimal("6"), "child2a_mult_s", "Child 2 says, 'this is the number six.'")); child2Rows.add(createMap("id", new BigDecimal("6"), "child2a_mult_s", "Child 2 says, 'this is the number six.'"));
child2Rows.add(createMap("id", new BigDecimal("5"), "child2a_mult_s", "Child 2 says, 'this is the number five.'")); child2Rows.add(createMap("id", new BigDecimal("5"), "child2a_mult_s", "Child 2 says, 'this is the number five.'"));
@ -44,25 +75,66 @@ public class TestEphemeralCache extends AbstractDataImportHandlerTestCase {
child2Rows.add(createMap("id", new BigDecimal("1"), "child2a_mult_s", "Child 2 says, 'one'")); child2Rows.add(createMap("id", new BigDecimal("1"), "child2a_mult_s", "Child 2 says, 'one'"));
child2Rows.add(createMap("id", new BigDecimal("1"), "child2a_mult_s", "Child 2 says, 'uno'")); child2Rows.add(createMap("id", new BigDecimal("1"), "child2a_mult_s", "Child 2 says, 'uno'"));
child2Rows.add(createMap("id", new BigDecimal("2"), "child2a_mult_s", "Child 2 says, 'this is the number two.'")); child2Rows.add(createMap("id", new BigDecimal("2"), "child2a_mult_s", "Child 2 says, 'this is the number two.'"));
MockDataSource.setIterator("SELECT * FROM PARENT", parentRows.iterator()); MockDataSource.setIterator("SELECT * FROM PARENT", parentRows.iterator());
MockDataSource.setIterator("SELECT * FROM CHILD_1", child1Rows.iterator()); MockDataSource.setIterator("SELECT * FROM CHILD_1", child1Rows.iterator());
MockDataSource.setIterator("SELECT * FROM CHILD_2", child2Rows.iterator()); MockDataSource.setIterator("SELECT * FROM CHILD_2", child2Rows.iterator());
runFullImport(loadDataConfig("dataimport-cache-ephemeral.xml")); }
private String getDataConfigDotXml(int numThreads) {
assertQ(req("*:*"), "//*[@numFound='5']"); return
assertQ(req("id:1"), "//*[@numFound='1']"); "<dataConfig>" +
assertQ(req("id:6"), "//*[@numFound='0']"); " <dataSource type=\"MockDataSource\" />" +
assertQ(req("parent_s:four"), "//*[@numFound='1']"); " <document>" +
assertQ(req("child1a_mult_s:this\\ is\\ the\\ numbe*"), "//*[@numFound='2']"); " <entity " +
assertQ(req("child2a_mult_s:Child\\ 2\\ say*"), "//*[@numFound='4']"); " name=\"PARENT\"" +
assertQ(req("child1b_s:CHILD1B"), "//*[@numFound='1']"); " processor=\"SqlEntityProcessor\"" +
assertQ(req("child2b_s:CHILD2B"), "//*[@numFound='1']"); " cacheImpl=\"org.apache.solr.handler.dataimport.DestroyCountCache\"" +
assertQ(req("child1a_mult_s:one"), "//*[@numFound='1']"); " cacheName=\"PARENT\"" +
assertQ(req("child1a_mult_s:uno"), "//*[@numFound='1']"); " query=\"SELECT * FROM PARENT\" " +
assertQ(req("child1a_mult_s:(uno OR one)"), "//*[@numFound='1']"); (numThreads==0 ? "" : "threads=\"" + numThreads + "\" ") +
" >" +
} " <entity" +
" name=\"CHILD_1\"" +
" processor=\"SqlEntityProcessor\"" +
" cacheImpl=\"org.apache.solr.handler.dataimport.DestroyCountCache\"" +
" cacheName=\"CHILD\"" +
" cachePk=\"id\"" +
" cacheLookup=\"PARENT.id\"" +
" fieldNames=\"id, child1a_mult_s, child1b_s\"" +
" fieldTypes=\"BIGDECIMAL, STRING, STRING\"" +
" query=\"SELECT * FROM CHILD_1\" " +
" />" +
" <entity" +
" name=\"CHILD_2\"" +
" processor=\"SqlEntityProcessor\"" +
" cacheImpl=\"org.apache.solr.handler.dataimport.DestroyCountCache\"" +
" cachePk=\"id\"" +
" cacheLookup=\"PARENT.id\"" +
" query=\"SELECT * FROM CHILD_2\" " +
" />" +
" </entity>" +
" </document>" +
"</dataConfig>"
;
}
private void assertFullImport(String dataConfig) throws Exception {
runFullImport(dataConfig);
assertQ(req("*:*"), "//*[@numFound='5']");
assertQ(req("id:1"), "//*[@numFound='1']");
assertQ(req("id:6"), "//*[@numFound='0']");
assertQ(req("parent_s:four"), "//*[@numFound='1']");
assertQ(req("child1a_mult_s:this\\ is\\ the\\ numbe*"), "//*[@numFound='2']");
assertQ(req("child2a_mult_s:Child\\ 2\\ say*"), "//*[@numFound='4']");
assertQ(req("child1b_s:CHILD1B"), "//*[@numFound='1']");
assertQ(req("child2b_s:CHILD2B"), "//*[@numFound='1']");
assertQ(req("child1a_mult_s:one"), "//*[@numFound='1']");
assertQ(req("child1a_mult_s:uno"), "//*[@numFound='1']");
assertQ(req("child1a_mult_s:(uno OR one)"), "//*[@numFound='1']");
assertThat(DestroyCountCache.destroyed.size(), is(3));
}
} }