From db2b19f6ba7766cf42bde668f8ec4e7b237e6007 Mon Sep 17 00:00:00 2001 From: Mikhail Khludnev Date: Sat, 3 Jun 2017 13:41:51 +0300 Subject: [PATCH] SOLR-8762: respond child docs in DIH debug --- solr/CHANGES.txt | 2 ++ .../solr/handler/dataimport/DebugInfo.java | 32 ++++++++++++++++++- .../AbstractDataImportHandlerTestCase.java | 4 +-- .../TestHierarchicalDocBuilder.java | 31 ++++++++++++------ 4 files changed, 56 insertions(+), 13 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 3c887be0ea1..ff6ff46357a 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -315,6 +315,8 @@ Other Changes * SOLR-8437: Improve RAMDirectory details in sample solrconfig files (Mark Miller, Varun Thacker) +* SOLR-8762: return child docs in DIH debug (Gopikannan Venugopalsamy via Mikhail Khludnev) + ================== 6.6.0 ================== Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release. diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DebugInfo.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DebugInfo.java index 9bd217aae43..623832fc9c4 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DebugInfo.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DebugInfo.java @@ -16,6 +16,7 @@ */ package org.apache.solr.handler.dataimport; +import java.util.AbstractList; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -25,7 +26,35 @@ import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.StrUtils; public class DebugInfo { - public List debugDocuments = new ArrayList<>(0); + + private static final class ChildRollupDocs extends AbstractList { + + private List delegate = new ArrayList<>(); + + @Override + public SolrInputDocument get(int index) { + return delegate.get(index); + } + + @Override + public int size() { + return delegate.size(); + } + + public boolean add(SolrInputDocument e) { + SolrInputDocument transformed = e.deepCopy(); + if (transformed.hasChildDocuments()) { + ChildRollupDocs childList = new ChildRollupDocs(); + childList.addAll(transformed.getChildDocuments()); + transformed.addField("_childDocuments_", childList); + transformed.getChildDocuments().clear(); + } + return delegate.add(transformed); + } + } + + public List debugDocuments = new ChildRollupDocs(); + public NamedList debugVerboseOutput = null; public boolean verbose; @@ -34,3 +63,4 @@ public class DebugInfo { debugVerboseOutput = new NamedList<>(); } } + diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java index db02993558d..7b8ff8849e9 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractDataImportHandlerTestCase.java @@ -80,11 +80,11 @@ public abstract class AbstractDataImportHandlerTestCase extends } } - protected void runFullImport(String dataConfig) throws Exception { + protected String runFullImport(String dataConfig) throws Exception { LocalSolrQueryRequest request = lrf.makeRequest("command", "full-import", "debug", "on", "clean", "true", "commit", "true", "dataConfig", dataConfig); - h.query("/dataimport", request); + return h.query("/dataimport", request); } protected void runDeltaImport(String dataConfig) throws Exception { diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestHierarchicalDocBuilder.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestHierarchicalDocBuilder.java index 603980ad0ca..086d7beed58 100644 --- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestHierarchicalDocBuilder.java +++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestHierarchicalDocBuilder.java @@ -39,6 +39,7 @@ import org.apache.solr.common.util.StrUtils; import org.apache.solr.handler.dataimport.config.ConfigNameConstants; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.search.SolrIndexSearcher; +import org.apache.solr.util.TestHarness; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; @@ -118,7 +119,7 @@ public class TestHierarchicalDocBuilder extends AbstractDataImportHandlerTestCas List parentIds = createDataIterator("select * from PARENT", parentType, parentType, parentsNum); Collections.shuffle(parentIds, random()); - String parentId1 = parentIds.get(0); + final String parentId1 = parentIds.get(0); String parentId2 = parentIds.get(1); //parent 1 children @@ -129,21 +130,24 @@ public class TestHierarchicalDocBuilder extends AbstractDataImportHandlerTestCas childrenNum += childrenIds.size(); // grand children of first parent first child - String childId = childrenIds.get(0); + final String childId = childrenIds.get(0); String description = "grandchild of first parent, child of " + childId + " child"; select = "select * from GRANDCHILD where parent_id='" + childId + "'"; List grandChildrenIds = createDataIterator(select, grandChildType, description, atLeast(2)); grandChildrenNum += grandChildrenIds.size(); // grand children of first parent second child - childId = childrenIds.get(1); - description = "grandchild of first parent, child of " + childId + " child"; - select = "select * from GRANDCHILD where parent_id='" + childId + "'"; - List grandChildrenIds2 = createDataIterator(select, grandChildType, description, atLeast(2)); + { + String childId2 = childrenIds.get(1); + description = "grandchild of first parent, child of " + childId2 + " child"; + select = "select * from GRANDCHILD where parent_id='" + childId2 + "'"; + } + final List grandChildrenIds2 = createDataIterator(select, grandChildType, description, atLeast(2)); grandChildrenNum += grandChildrenIds2.size(); - grandChildrenIds.addAll(grandChildrenIds2); - + List allGrandChildrenIds = new ArrayList<>(grandChildrenIds); + allGrandChildrenIds.addAll(grandChildrenIds2); + // third children of first parent has no grand children // parent 2 children (no grand children) @@ -155,7 +159,14 @@ public class TestHierarchicalDocBuilder extends AbstractDataImportHandlerTestCas int totalDocsNum = parentsNum + childrenNum + grandChildrenNum; - runFullImport(THREE_LEVEL_HIERARCHY_CONFIG); + String resp = runFullImport(THREE_LEVEL_HIERARCHY_CONFIG); + String xpath = "//arr[@name='documents']/lst/arr[@name='id' and .='"+parentId1+"']/../"+ + "arr[@name='_childDocuments_']/lst/arr[@name='id' and .='"+childId+"']/../"+ + "arr[@name='_childDocuments_']/lst/arr[@name='id' and .='"+grandChildrenIds.get(0)+"']"; + String results = TestHarness.validateXPath(resp, + xpath); + assertTrue("Debug documents does not contain child documents\n"+resp+"\n"+ xpath+ + "\n"+results, results == null); assertTrue("Update request processor processAdd was not called", TestUpdateRequestProcessor.processAddCalled); assertTrue("Update request processor processCommit was not callled", TestUpdateRequestProcessor.processCommitCalled); @@ -169,7 +180,7 @@ public class TestHierarchicalDocBuilder extends AbstractDataImportHandlerTestCas // let's check BlockJoin // get first parent by any grand children - String randomGrandChildId = grandChildrenIds.get(random().nextInt(grandChildrenIds.size())); + String randomGrandChildId = allGrandChildrenIds.get(random().nextInt(allGrandChildrenIds.size())); Query query = createToParentQuery(parentType, FIELD_ID, randomGrandChildId); assertSearch(query, FIELD_ID, parentId1);