SOLR-14247 Remove unneeded sleeps (#1244)

This commit is contained in:
Mike 2020-02-10 21:13:56 -06:00 committed by GitHub
parent b21312f411
commit 71b869381e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 10 additions and 35 deletions

View File

@ -50,12 +50,10 @@ import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.Pair;
import org.apache.solr.common.util.TimeSource;
import org.apache.solr.common.util.Utils;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.util.LogLevel;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
@ -71,40 +69,31 @@ public class IndexSizeTriggerMixedBoundsTest extends SolrCloudTestCase {
private static SolrCloudManager cloudManager;
private static SolrClient solrClient;
private static TimeSource timeSource;
private static int SPEED = 1;
static Map<String, List<CapturedEvent>> listenerEvents = new ConcurrentHashMap<>();
static CountDownLatch listenerCreated = new CountDownLatch(1);
static CountDownLatch finished = new CountDownLatch(1);
static CountDownLatch listenerCreated;
static CountDownLatch finished;
@BeforeClass
public static void setupCluster() throws Exception {
configureCluster(2)
.addConfig("conf", configset("cloud-minimal"))
.configure();
.addConfig("conf", configset("cloud-minimal"))
.configure();
cloudManager = cluster.getJettySolrRunner(0).getCoreContainer().getZkController().getSolrCloudManager();
solrClient = cluster.getSolrClient();
timeSource = cloudManager.getTimeSource();
}
@After
public void restoreDefaults() throws Exception {
@Before
public void setDefaults() throws Exception {
cluster.deleteAllCollections();
cloudManager.getDistribStateManager().setData(SOLR_AUTOSCALING_CONF_PATH, Utils.toJSON(new ZkNodeProps()), -1);
cloudManager.getTimeSource().sleep(5000);
listenerEvents.clear();
listenerCreated = new CountDownLatch(1);
finished = new CountDownLatch(1);
}
@AfterClass
public static void teardown() throws Exception {
solrClient = null;
cloudManager = null;
}
public static class CapturingTriggerListener extends TriggerListenerBase {
@Override
public void configure(SolrResourceLoader loader, SolrCloudManager cloudManager, AutoScalingConfig.TriggerListenerConfig config) throws TriggerValidationException {
@ -116,7 +105,7 @@ public class IndexSizeTriggerMixedBoundsTest extends SolrCloudTestCase {
public synchronized void onEvent(TriggerEvent event, TriggerEventProcessorStage stage, String actionName,
ActionContext context, Throwable error, String message) {
List<CapturedEvent> lst = listenerEvents.computeIfAbsent(config.name, s -> new ArrayList<>());
CapturedEvent ev = new CapturedEvent(timeSource.getTimeNs(), context, config, stage, actionName, event, message);
CapturedEvent ev = new CapturedEvent(cloudManager.getTimeSource().getTimeNs(), context, config, stage, actionName, event, message);
log.info("=======> " + ev);
lst.add(ev);
}
@ -235,8 +224,6 @@ public class IndexSizeTriggerMixedBoundsTest extends SolrCloudTestCase {
response = solrClient.request(req);
assertEquals(response.get("result").toString(), "success");
timeSource.sleep(TimeUnit.MILLISECONDS.convert(waitForSeconds + 1, TimeUnit.SECONDS));
boolean await = finished.await(90000 / SPEED, TimeUnit.MILLISECONDS);
assertTrue("did not finish processing in time", await);
log.info("-- suspending trigger");
@ -301,7 +288,7 @@ public class IndexSizeTriggerMixedBoundsTest extends SolrCloudTestCase {
"}";
req = AutoScalingRequest.create(SolrRequest.METHOD.POST, suspendTriggerCommand);
response = solrClient.request(req);
assertEquals(response.get("result").toString(), "success");
assertEquals("success", response.get("result").toString());
log.info("-- deleting documents");
for (int j = 0; j < 10; j++) {
@ -312,7 +299,6 @@ public class IndexSizeTriggerMixedBoundsTest extends SolrCloudTestCase {
}
solrClient.request(ureq);
}
cloudManager.getTimeSource().sleep(5000);
// make sure the actual index size is reduced by deletions, otherwise we may still violate aboveBytes
UpdateRequest ur = new UpdateRequest();
ur.setParam(UpdateParams.COMMIT, "true");
@ -324,9 +310,6 @@ public class IndexSizeTriggerMixedBoundsTest extends SolrCloudTestCase {
log.info("-- requesting optimize / expungeDeletes / commit");
solrClient.request(ur, collectionName);
// wait for the segments to merge to reduce the index size
cloudManager.getTimeSource().sleep(50000);
// add some docs so that every shard gets an update
// we can reduce the number of docs here but this also works
for (int j = 0; j < 1; j++) {
@ -348,8 +331,6 @@ public class IndexSizeTriggerMixedBoundsTest extends SolrCloudTestCase {
response = solrClient.request(req);
assertEquals(response.get("result").toString(), "success");
timeSource.sleep(TimeUnit.MILLISECONDS.convert(waitForSeconds + 1, TimeUnit.SECONDS));
await = finished.await(90000 / SPEED, TimeUnit.MILLISECONDS);
assertTrue("did not finish processing in time", await);
log.info("-- suspending trigger");

View File

@ -17,15 +17,12 @@
package org.apache.solr;
import java.lang.invoke.MethodHandles;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.solr.util.StartupLoggingUtils;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean;
@ -42,10 +39,7 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAs
*/
public class SolrTestCase extends LuceneTestCase {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
/**
/**
* Special hook for sanity checking if any tests trigger failures when an
* Assumption failure occures in a {@link BeforeClass} method
* @lucene.internal