SOLR-6170: Merge CoreContainer preRegisterInZk and register methods into create method

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1606891 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Alan Woodward 2014-06-30 19:41:07 +00:00
parent b7290e523b
commit 3fb008e7f0
12 changed files with 122 additions and 219 deletions

View File

@ -107,6 +107,10 @@ Upgrading from Solr 4.9
schema version is 1.5 or greater (SOLR-3140), but TrieDateField's default was mistakenly
not changed. As of Solr 4.10, TrieDateField omits norms by default (see SOLR-6211).
* Creating a SolrCore via CoreContainer.create() no longer requires an
additional call to CoreContainer.register() to make it available to clients
(see SOLR-6170).
Detailed Change List
----------------------
@ -154,6 +158,9 @@ Other Changes
* SOLR-6194: Allow access to DataImporter and DIHConfiguration from DataImportHandler.
(Aaron LaBella via shalin)
* SOLR-6170: CoreContainer.preRegisterInZk() and CoreContainer.register() commands
are merged into CoreContainer.create(). (Alan Woodward)
================== 4.9.0 ==================
Versions of Major Components

View File

@ -16,6 +16,25 @@
*/
package org.apache.solr.hadoop;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskID;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.CoreDescriptor;
import org.apache.solr.core.HdfsDirectoryFactory;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrResourceLoader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
@ -29,26 +48,6 @@ import java.util.Properties;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskID;
import org.apache.solr.hadoop.SolrOutputFormat;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.CoreDescriptor;
import org.apache.solr.core.HdfsDirectoryFactory;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrResourceLoader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class SolrRecordWriter<K, V> extends RecordWriter<K, V> {
private static final Logger LOG = LoggerFactory.getLogger(SolrRecordWriter.class);
@ -176,8 +175,6 @@ class SolrRecordWriter<K, V> extends RecordWriter<K, V> {
+ HdfsDirectoryFactory.class.getSimpleName());
}
container.register(core, false);
EmbeddedSolrServer solr = new EmbeddedSolrServer(container, "core1");
return solr;
}

View File

@ -253,33 +253,12 @@ public class CoreContainer {
Callable<SolrCore> task = new Callable<SolrCore>() {
@Override
public SolrCore call() {
SolrCore c = null;
try {
if (zkSys.getZkController() != null) {
preRegisterInZk(cd);
}
c = create(cd);
registerCore(cd.isTransient(), name, c, false, false);
return create(cd, false);
} catch (Exception e) {
SolrException.log(log, null, e);
try {
/* if (isZooKeeperAware()) {
try {
zkSys.zkController.unregister(name, cd);
} catch (InterruptedException e2) {
Thread.currentThread().interrupt();
SolrException.log(log, null, e2);
} catch (KeeperException e3) {
SolrException.log(log, null, e3);
}
}*/
} finally {
if (c != null) {
c.close();
}
}
return null;
}
return c;
}
};
pending.add(completionService.submit(task));
@ -450,12 +429,8 @@ public class CoreContainer {
public CoresLocator getCoresLocator() {
return coresLocator;
}
protected SolrCore registerCore(boolean isTransientCore, String name, SolrCore core, boolean returnPrevNotClosed) {
return registerCore(isTransientCore, name, core, returnPrevNotClosed, true);
}
protected SolrCore registerCore(boolean isTransientCore, String name, SolrCore core, boolean returnPrevNotClosed, boolean registerInZk) {
protected SolrCore registerCore(String name, SolrCore core, boolean registerInZk) {
if( core == null ) {
throw new RuntimeException( "Can not register a null core." );
}
@ -480,7 +455,7 @@ public class CoreContainer {
core.close();
throw new IllegalStateException("This CoreContainer has been shutdown");
}
if (isTransientCore) {
if (cd.isTransient()) {
old = solrCores.putTransientCore(cfg, name, core, loader);
} else {
old = solrCores.putCore(name, core);
@ -505,9 +480,7 @@ public class CoreContainer {
}
else {
log.info( "replacing core: "+name );
if (!returnPrevNotClosed) {
old.close();
}
old.close();
if (registerInZk) {
zkSys.registerInZk(core, false);
}
@ -516,34 +489,23 @@ public class CoreContainer {
}
/**
* Registers a SolrCore descriptor in the registry using the core's name.
* If returnPrev==false, the old core, if different, is closed.
* @return a previous core having the same name if it existed and returnPrev==true
* Creates a new core based on a CoreDescriptor, publishing the core state to the cluster
* @param cd the CoreDescriptor
* @return the newly created core
*/
public SolrCore register(SolrCore core, boolean returnPrev) {
return registerCore(core.getCoreDescriptor().isTransient(), core.getName(), core, returnPrev);
}
public SolrCore register(String name, SolrCore core, boolean returnPrev) {
return registerCore(core.getCoreDescriptor().isTransient(), name, core, returnPrev);
}
public SolrCore create(String name, String instanceDir, String... properties) {
Properties props = new Properties();
assert properties.length % 2 == 0;
for (int i = 0; i < properties.length; i += 2) {
props.setProperty(properties[i], properties[i+1]);
}
return create(new CoreDescriptor(this, name, instanceDir, props));
public SolrCore create(CoreDescriptor cd) {
return create(cd, true);
}
/**
* Creates a new core based on a descriptor but does not register it.
* Creates a new core based on a CoreDescriptor.
*
* @param dcore a core descriptor
* @param publishState publish core state to the cluster if true
*
* @param dcore a core descriptor
* @return the newly created core
*/
public SolrCore create(CoreDescriptor dcore) {
public SolrCore create(CoreDescriptor dcore, boolean publishState) {
if (isShutDown) {
throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, "Solr has shutdown.");
@ -551,6 +513,10 @@ public class CoreContainer {
try {
if (zkSys.getZkController() != null) {
zkSys.getZkController().preRegister(dcore);
}
ConfigSet coreConfig = coreConfigService.getConfig(dcore);
log.info("Creating SolrCore '{}' using configuration from {}", dcore.getName(), coreConfig.getName());
SolrCore core = new SolrCore(dcore, coreConfig);
@ -561,6 +527,8 @@ public class CoreContainer {
core.getUpdateHandler().getUpdateLog().recoverFromLog();
}
registerCore(dcore.getName(), core, publishState);
return core;
}
@ -650,7 +618,7 @@ public class CoreContainer {
SolrCore newCore = core.reload(coreConfig, core);
// keep core to orig name link
solrCores.removeCoreToOrigName(newCore, core);
registerCore(false, name, newCore, false, false);
registerCore(name, newCore, false);
} finally {
solrCores.removeFromPendingOps(name);
}
@ -694,7 +662,7 @@ public class CoreContainer {
public void rename(String name, String toName) {
try (SolrCore core = getCore(name)) {
if (core != null) {
registerCore(false, toName, core, false);
registerCore(toName, core, true);
name = checkDefault(name);
SolrCore old = solrCores.remove(name, false);
coresLocator.rename(this, old.getCoreDescriptor(), core.getCoreDescriptor());
@ -765,29 +733,12 @@ public class CoreContainer {
// the wait as a consequence of shutting down.
try {
if (core == null) {
if (zkSys.getZkController() != null) {
preRegisterInZk(desc);
}
core = create(desc); // This should throw an error if it fails.
core.open();
registerCore(desc.isTransient(), name, core, false);
} else {
core.open();
}
} catch(Exception ex){
// remains to be seen how transient cores and such
// will work in SolrCloud mode, but just to be future
// proof...
/*if (isZooKeeperAware()) {
try {
getZkController().unregister(name, desc);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
SolrException.log(log, null, e);
} catch (KeeperException e) {
SolrException.log(log, null, e);
}
}*/
throw recordAndThrow(name, "Unable to create core: " + name, ex);
} finally {
solrCores.removeFromPendingOps(name);
@ -876,10 +827,6 @@ public class CoreContainer {
return solrCores.getUnloadedCoreDescriptor(cname);
}
public void preRegisterInZk(final CoreDescriptor p) {
zkSys.getZkController().preRegister(p);
}
public String getSolrHome() {
return solrHome;
}

View File

@ -138,6 +138,19 @@ public class CoreDescriptor {
Properties coreProps) {
this(container, name, instanceDir, coreProps, null);
}
public CoreDescriptor(CoreContainer container, String name, String instanceDir, String... properties) {
this(container, name, instanceDir, toProperties(properties));
}
private static Properties toProperties(String... properties) {
Properties props = new Properties();
assert properties.length % 2 == 0;
for (int i = 0; i < properties.length; i += 2) {
props.setProperty(properties[i], properties[i+1]);
}
return props;
}
/**
* Create a new CoreDescriptor.

View File

@ -565,7 +565,6 @@ public class CoreAdminHandler extends RequestHandlerBase {
}
}
coreContainer.preRegisterInZk(dcore);
}
// make sure we can write out the descriptor first
@ -573,8 +572,6 @@ public class CoreAdminHandler extends RequestHandlerBase {
SolrCore core = coreContainer.create(dcore);
coreContainer.register(core, false);
if (coreContainer.getCoresLocator() instanceof SolrXMLCoresLocator) {
// hack - in this case we persist once more because a core create race might
// have dropped entries.

View File

@ -17,8 +17,6 @@ package org.apache.solr.cloud;
* limitations under the License.
*/
import org.apache.commons.io.FileUtils;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.cloud.ClusterState;
@ -161,14 +159,8 @@ public class ClusterStateUpdateTest extends SolrTestCaseJ4 {
CoreDescriptor dcore = buildCoreDescriptor(container1, "testcore", "testcore")
.withDataDir(dataDir4.getAbsolutePath()).build();
if (container1.getZkController() != null) {
container1.preRegisterInZk(dcore);
}
SolrCore core = container1.create(dcore);
container1.register(core, false);
ZkController zkController2 = container2.getZkController();
String host = zkController2.getHostName();

View File

@ -17,11 +17,6 @@
package org.apache.solr.core;
import java.io.File;
import java.util.Collection;
import java.util.Map;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
import org.apache.lucene.util.IOUtils;
import org.apache.solr.SolrTestCaseJ4;
@ -29,6 +24,11 @@ import org.apache.solr.common.SolrException;
import org.junit.After;
import org.xml.sax.SAXParseException;
import java.io.File;
import java.util.Collection;
import java.util.Map;
import java.util.regex.Pattern;
public class CoreContainerCoreInitFailuresTest extends SolrTestCaseJ4 {
File solrHome = null;
@ -188,7 +188,7 @@ public class CoreContainerCoreInitFailuresTest extends SolrTestCaseJ4 {
FileUtils.copyFile(getFile("solr/collection1/conf/solrconfig-defaults.xml"),
FileUtils.getFile(solrHome, "col_bad", "conf", "solrconfig.xml"));
final CoreDescriptor fixed = new CoreDescriptor(cc, "col_bad", "col_bad");
cc.register("col_bad", cc.create(fixed), false);
cc.create(fixed);
// check that we have the cores we expect
cores = cc.getCoreNames();
@ -246,24 +246,6 @@ public class CoreContainerCoreInitFailuresTest extends SolrTestCaseJ4 {
0 < cause.indexOf("bogus_path"));
}
// -----
// register bogus as an alias for col_ok and confirm failure goes away
cc.register("bogus", cc.getCore("col_ok"), false);
// check that we have the cores we expect
cores = cc.getCoreNames();
assertNotNull("core names is null", cores);
assertEquals("wrong number of cores", 3, cores.size());
assertTrue("col_ok not found", cores.contains("col_ok"));
assertTrue("col_bad not found", cores.contains("col_bad"));
assertTrue("bogus not found", cores.contains("bogus"));
// check that we have the failures we expect
failures = cc.getCoreInitFailures();
assertNotNull("core failures is a null map", failures);
assertEquals("wrong number of core failures", 0, failures.size());
// -----
// break col_bad's config and try to RELOAD to add failure
@ -294,15 +276,14 @@ public class CoreContainerCoreInitFailuresTest extends SolrTestCaseJ4 {
// check that we have the cores we expect
cores = cc.getCoreNames();
assertNotNull("core names is null", cores);
assertEquals("wrong number of cores", 3, cores.size());
assertEquals("wrong number of cores", 2, cores.size());
assertTrue("col_ok not found", cores.contains("col_ok"));
assertTrue("col_bad not found", cores.contains("col_bad"));
assertTrue("bogus not found", cores.contains("bogus"));
// check that we have the failures we expect
failures = cc.getCoreInitFailures();
assertNotNull("core failures is a null map", failures);
assertEquals("wrong number of core failures", 1, failures.size());
assertEquals("wrong number of core failures", 2, failures.size());
Throwable ex = getWrappedException(failures.get("col_bad"));
assertNotNull("null failure for test core", ex);
assertTrue("init failure isn't SAXParseException",
@ -323,15 +304,14 @@ public class CoreContainerCoreInitFailuresTest extends SolrTestCaseJ4 {
// check that we have the cores we expect
cores = cc.getCoreNames();
assertNotNull("core names is null", cores);
assertEquals("wrong number of cores", 3, cores.size());
assertEquals("wrong number of cores", 2, cores.size());
assertTrue("col_ok not found", cores.contains("col_ok"));
assertTrue("col_bad not found", cores.contains("col_bad"));
assertTrue("bogus not found", cores.contains("bogus"));
// check that we have the failures we expect
failures = cc.getCoreInitFailures();
assertNotNull("core failures is a null map", failures);
assertEquals("wrong number of core failures", 0, failures.size());
assertEquals("wrong number of core failures", 1, failures.size());
}

View File

@ -17,6 +17,17 @@
package org.apache.solr.core;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.handler.RequestHandlerBase;
import org.apache.solr.handler.component.QueryComponent;
import org.apache.solr.handler.component.SpellCheckComponent;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.util.DefaultSolrThreadFactory;
import org.apache.solr.util.plugin.SolrCoreAware;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@ -26,18 +37,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.handler.RequestHandlerBase;
import org.apache.solr.handler.component.QueryComponent;
import org.apache.solr.handler.component.SpellCheckComponent;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.util.DefaultSolrThreadFactory;
import org.apache.solr.util.plugin.SolrCoreAware;
import org.junit.Test;
public class SolrCoreTest extends SolrTestCaseJ4 {
private static final String COLLECTION1 = "collection1";
@ -58,18 +57,16 @@ public class SolrCoreTest extends SolrTestCaseJ4 {
final CoreContainer cores = h.getCoreContainer();
SolrCore core = cores.getCore("");
IndexSchema schema = h.getCore().getLatestSchema();
assertEquals(COLLECTION1, cores.getDefaultCoreName());
cores.remove("");
core.close();
core.close();
CoreDescriptor cd = new CoreDescriptor(cores, COLLECTION1, "collection1",
CoreDescriptor.CORE_DATADIR, createTempDir("dataDir2").getAbsolutePath());
SolrCore newCore = new SolrCore(COLLECTION1, createTempDir("dataDir2").getAbsolutePath(), new SolrConfig("solr/collection1", "solrconfig.xml", null), schema,
new CoreDescriptor(cores, COLLECTION1, "solr/collection1"));
cores.register(newCore, false);
cores.create(cd);
assertEquals(COLLECTION1, cores.getDefaultCoreName());

View File

@ -69,19 +69,15 @@ public class TestConfigSets extends SolrTestCaseJ4 {
@Test
public void testConfigSetServiceFindsConfigSets() {
CoreContainer container = null;
SolrCore core1 = null;
try {
container = setupContainer(getFile("solr/configsets").getAbsolutePath());
String testDirectory = new File(container.getResourceLoader().getInstanceDir()).getAbsolutePath();
core1 = container.create("core1", testDirectory + "core1", "configSet", "configset-2");
SolrCore core1 = container.create(new CoreDescriptor(container, "core1", testDirectory + "core1", "configSet", "configset-2"));
assertThat(core1.getCoreDescriptor().getName(), is("core1"));
assertThat(core1.getDataDir(), is(testDirectory + "core1" + File.separator + "data" + File.separator));
}
finally {
if (core1 != null) {
core1.close();
}
if (container != null)
container.shutdown();
}
@ -94,7 +90,7 @@ public class TestConfigSets extends SolrTestCaseJ4 {
container = setupContainer(getFile("solr/configsets").getAbsolutePath());
String testDirectory = container.getResourceLoader().getInstanceDir();
container.create("core1", testDirectory + "/core1", "configSet", "nonexistent");
container.create(new CoreDescriptor(container, "core1", testDirectory + "/core1", "configSet", "nonexistent"));
fail("Expected core creation to fail");
}
catch (Exception e) {
@ -123,8 +119,7 @@ public class TestConfigSets extends SolrTestCaseJ4 {
container.load();
// We initially don't have a /get handler defined
SolrCore core = container.create("core1", testDirectory + "/core", "configSet", "configset-2");
container.register(core, false);
SolrCore core = container.create(new CoreDescriptor(container, "core1", testDirectory + "/core", "configSet", "configset-2"));
assertThat("No /get handler should be defined in the initial configuration",
core.getRequestHandler("/get"), is(nullValue()));

View File

@ -17,9 +17,17 @@
package org.apache.solr.core;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
import org.apache.commons.io.FileUtils;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.handler.admin.CollectionsHandler;
import org.apache.solr.handler.admin.CoreAdminHandler;
import org.apache.solr.handler.admin.InfoHandler;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.xml.sax.SAXException;
import javax.xml.parsers.ParserConfigurationException;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileOutputStream;
@ -31,18 +39,8 @@ import java.util.List;
import java.util.jar.JarEntry;
import java.util.jar.JarOutputStream;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.io.FileUtils;
import org.apache.lucene.util.TestUtil;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.handler.admin.CollectionsHandler;
import org.apache.solr.handler.admin.CoreAdminHandler;
import org.apache.solr.handler.admin.InfoHandler;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.xml.sax.SAXException;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
public class TestCoreContainer extends SolrTestCaseJ4 {
@ -91,9 +89,7 @@ public class TestCoreContainer extends SolrTestCaseJ4 {
SolrCore core2 = cores.create(descriptor2);
assertSame(core1.getLatestSchema(), core2.getLatestSchema());
core1.close();
core2.close();
} finally {
cores.shutdown();
System.clearProperty("shareSchema");
@ -163,8 +159,7 @@ public class TestCoreContainer extends SolrTestCaseJ4 {
//add a new core
CoreDescriptor coreDescriptor = new CoreDescriptor(cores, "core1", solrHomeDirectory.getAbsolutePath());
SolrCore newCore = cores.create(coreDescriptor);
cores.register(newCore, false);
//assert one registered core
assertEquals("There core registered", 1, cores.getCores().size());
@ -225,7 +220,6 @@ public class TestCoreContainer extends SolrTestCaseJ4 {
ClassLoader coreLoader = core1.getResourceLoader().getClassLoader();
assertSame(sharedLoader, coreLoader.getParent());
core1.close();
} finally {
cc.shutdown();
}

View File

@ -17,26 +17,11 @@
package org.apache.solr.core;
import static org.hamcrest.core.Is.is;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.params.CoreAdminParams;
import org.apache.solr.handler.admin.CoreAdminHandler;
@ -53,8 +38,19 @@ import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import com.google.common.base.Charsets;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import static org.hamcrest.core.Is.is;
public class TestSolrXmlPersistence extends SolrTestCaseJ4 {
@ -446,8 +442,6 @@ public class TestSolrXmlPersistence extends SolrTestCaseJ4 {
try {
x = cores.create(xd);
y = cores.create(yd);
cores.register(x, false);
cores.register(y, false);
assertEquals("cores not added?", 3, cores.getCoreNames().size());

View File

@ -17,11 +17,7 @@ package org.apache.solr.update;
* limitations under the License.
*/
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.util.List;
import com.google.common.collect.Lists;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory;
@ -40,8 +36,10 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import com.google.common.collect.Lists;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.List;
public class SolrIndexSplitterTest extends SolrTestCaseJ4 {
File indexDir1 = null, indexDir2 = null, indexDir3 = null;
@ -165,19 +163,11 @@ public class SolrIndexSplitterTest extends SolrTestCaseJ4 {
CoreDescriptor dcore1 = buildCoreDescriptor(h.getCoreContainer(), "split1", instanceDir)
.withDataDir(indexDir1.getAbsolutePath()).withSchema("schema12.xml").build();
if (h.getCoreContainer().getZkController() != null) {
h.getCoreContainer().preRegisterInZk(dcore1);
}
core1 = h.getCoreContainer().create(dcore1);
h.getCoreContainer().register(core1, false);
CoreDescriptor dcore2 = buildCoreDescriptor(h.getCoreContainer(), "split2", instanceDir)
.withDataDir(indexDir2.getAbsolutePath()).withSchema("schema12.xml").build();
if (h.getCoreContainer().getZkController() != null) {
h.getCoreContainer().preRegisterInZk(dcore2);
}
core2 = h.getCoreContainer().create(dcore2);
h.getCoreContainer().register(core2, false);
LocalSolrQueryRequest request = null;
try {