SOLR-8762: respond child docs in DIH debug

This commit is contained in:
Mikhail Khludnev 2017-06-03 13:41:51 +03:00
parent 2c9f8604c2
commit db2b19f6ba
4 changed files with 56 additions and 13 deletions

View File

@ -315,6 +315,8 @@ Other Changes
* SOLR-8437: Improve RAMDirectory details in sample solrconfig files (Mark Miller, Varun Thacker)
* SOLR-8762: return child docs in DIH debug (Gopikannan Venugopalsamy via Mikhail Khludnev)
================== 6.6.0 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

View File

@ -16,6 +16,7 @@
*/
package org.apache.solr.handler.dataimport;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@ -25,7 +26,35 @@ import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.StrUtils;
public class DebugInfo {
public List<SolrInputDocument> debugDocuments = new ArrayList<>(0);
private static final class ChildRollupDocs extends AbstractList<SolrInputDocument> {
private List<SolrInputDocument> delegate = new ArrayList<>();
@Override
public SolrInputDocument get(int index) {
return delegate.get(index);
}
@Override
public int size() {
return delegate.size();
}
public boolean add(SolrInputDocument e) {
SolrInputDocument transformed = e.deepCopy();
if (transformed.hasChildDocuments()) {
ChildRollupDocs childList = new ChildRollupDocs();
childList.addAll(transformed.getChildDocuments());
transformed.addField("_childDocuments_", childList);
transformed.getChildDocuments().clear();
}
return delegate.add(transformed);
}
}
public List<SolrInputDocument> debugDocuments = new ChildRollupDocs();
public NamedList<String> debugVerboseOutput = null;
public boolean verbose;
@ -34,3 +63,4 @@ public class DebugInfo {
debugVerboseOutput = new NamedList<>();
}
}

View File

@ -80,11 +80,11 @@ public abstract class AbstractDataImportHandlerTestCase extends
}
}
protected void runFullImport(String dataConfig) throws Exception {
protected String runFullImport(String dataConfig) throws Exception {
LocalSolrQueryRequest request = lrf.makeRequest("command", "full-import",
"debug", "on", "clean", "true", "commit", "true", "dataConfig",
dataConfig);
h.query("/dataimport", request);
return h.query("/dataimport", request);
}
protected void runDeltaImport(String dataConfig) throws Exception {

View File

@ -39,6 +39,7 @@ import org.apache.solr.common.util.StrUtils;
import org.apache.solr.handler.dataimport.config.ConfigNameConstants;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.util.TestHarness;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
@ -118,7 +119,7 @@ public class TestHierarchicalDocBuilder extends AbstractDataImportHandlerTestCas
List<String> parentIds = createDataIterator("select * from PARENT", parentType, parentType, parentsNum);
Collections.shuffle(parentIds, random());
String parentId1 = parentIds.get(0);
final String parentId1 = parentIds.get(0);
String parentId2 = parentIds.get(1);
//parent 1 children
@ -129,20 +130,23 @@ public class TestHierarchicalDocBuilder extends AbstractDataImportHandlerTestCas
childrenNum += childrenIds.size();
// grand children of first parent first child
String childId = childrenIds.get(0);
final String childId = childrenIds.get(0);
String description = "grandchild of first parent, child of " + childId + " child";
select = "select * from GRANDCHILD where parent_id='" + childId + "'";
List<String> grandChildrenIds = createDataIterator(select, grandChildType, description, atLeast(2));
grandChildrenNum += grandChildrenIds.size();
// grand children of first parent second child
childId = childrenIds.get(1);
description = "grandchild of first parent, child of " + childId + " child";
select = "select * from GRANDCHILD where parent_id='" + childId + "'";
List<String> grandChildrenIds2 = createDataIterator(select, grandChildType, description, atLeast(2));
{
String childId2 = childrenIds.get(1);
description = "grandchild of first parent, child of " + childId2 + " child";
select = "select * from GRANDCHILD where parent_id='" + childId2 + "'";
}
final List<String> grandChildrenIds2 = createDataIterator(select, grandChildType, description, atLeast(2));
grandChildrenNum += grandChildrenIds2.size();
grandChildrenIds.addAll(grandChildrenIds2);
List<String> allGrandChildrenIds = new ArrayList<>(grandChildrenIds);
allGrandChildrenIds.addAll(grandChildrenIds2);
// third children of first parent has no grand children
@ -155,7 +159,14 @@ public class TestHierarchicalDocBuilder extends AbstractDataImportHandlerTestCas
int totalDocsNum = parentsNum + childrenNum + grandChildrenNum;
runFullImport(THREE_LEVEL_HIERARCHY_CONFIG);
String resp = runFullImport(THREE_LEVEL_HIERARCHY_CONFIG);
String xpath = "//arr[@name='documents']/lst/arr[@name='id' and .='"+parentId1+"']/../"+
"arr[@name='_childDocuments_']/lst/arr[@name='id' and .='"+childId+"']/../"+
"arr[@name='_childDocuments_']/lst/arr[@name='id' and .='"+grandChildrenIds.get(0)+"']";
String results = TestHarness.validateXPath(resp,
xpath);
assertTrue("Debug documents does not contain child documents\n"+resp+"\n"+ xpath+
"\n"+results, results == null);
assertTrue("Update request processor processAdd was not called", TestUpdateRequestProcessor.processAddCalled);
assertTrue("Update request processor processCommit was not callled", TestUpdateRequestProcessor.processCommitCalled);
@ -169,7 +180,7 @@ public class TestHierarchicalDocBuilder extends AbstractDataImportHandlerTestCas
// let's check BlockJoin
// get first parent by any grand children
String randomGrandChildId = grandChildrenIds.get(random().nextInt(grandChildrenIds.size()));
String randomGrandChildId = allGrandChildrenIds.get(random().nextInt(allGrandChildrenIds.size()));
Query query = createToParentQuery(parentType, FIELD_ID, randomGrandChildId);
assertSearch(query, FIELD_ID, parentId1);