diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java index 3a1ae917488..2f5e9b0bd67 100644 --- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java +++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java @@ -455,12 +455,7 @@ public class DataImporter { } public void runAsync(final RequestInfo reqParams, final DIHWriter sw) { - new Thread() { - @Override - public void run() { - runCmd(reqParams, sw); - } - }.start(); + new Thread(() -> runCmd(reqParams, sw)).start(); } void runCmd(RequestInfo reqParams, DIHWriter sw) { diff --git a/solr/core/src/java/org/apache/solr/cloud/Overseer.java b/solr/core/src/java/org/apache/solr/cloud/Overseer.java index cf73b62cb08..d76c240495f 100644 --- a/solr/core/src/java/org/apache/solr/cloud/Overseer.java +++ b/solr/core/src/java/org/apache/solr/cloud/Overseer.java @@ -261,13 +261,8 @@ public class Overseer implements Closeable { } } finally { log.info("Overseer Loop exiting : {}", LeaderElector.getNodeName(myId)); - new Thread("OverseerExitThread"){ - //do this in a separate thread because any wait is interrupted in this main thread - @Override - public void run() { - checkIfIamStillLeader(); - } - }.start(); + //do this in a separate thread because any wait is interrupted in this main thread + new Thread(this::checkIfIamStillLeader, "OverseerExitThread").start(); } } diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java index 27a2824336e..4e7e429bb75 100644 --- a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java +++ b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java @@ -578,17 +578,15 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler } //if there are too many nodes this command may time out. And most likely dedicated // overseers are created when there are too many nodes . So , do this operation in a separate thread - new Thread(){ - @Override - public void run() { - try { - overseerPrioritizer.prioritizeOverseerNodes(myId); - } catch (Exception e) { - log.error("Error in prioritizing Overseer",e); - } - + new Thread(() -> { + try { + overseerPrioritizer.prioritizeOverseerNodes(myId); + } catch (Exception e) { + log.error("Error in prioritizing Overseer", e); } - }.start(); + + }).start(); + } @SuppressWarnings("unchecked") diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java index a6a15082acf..3e4cbe50b44 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java +++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java @@ -2435,20 +2435,18 @@ public final class ZkController { final Set listeners = confDirectoryListeners.get(zkDir); if (listeners != null && !listeners.isEmpty()) { final Set listenersCopy = new HashSet<>(listeners); - new Thread() { - // run these in a separate thread because this can be long running - @Override - public void run() { - log.info("Running listeners for {}", zkDir); - for (final Runnable listener : listenersCopy) { - try { - listener.run(); - } catch (Exception e) { - log.warn("listener throws error", e); - } + // run these in a separate thread because this can be long running + new Thread(() -> { + log.info("Running listeners for {}", zkDir); + for (final Runnable listener : listenersCopy) { + try { + listener.run(); + } catch (Exception e) { + log.warn("listener throws error", e); } } - }.start(); + }).start(); + } } return true; diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java index ff3843b7d69..cd05bbd3e05 100644 --- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java +++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java @@ -533,24 +533,22 @@ public class CoreContainer { } finally { if (asyncSolrCoreLoad && futures != null) { - Thread shutdownThread = new Thread() { - public void run() { - try { - for (Future future : futures) { - try { - future.get(); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } catch (ExecutionException e) { - log.error("Error waiting for SolrCore to be created", e); - } + + coreContainerWorkExecutor.submit((Runnable) () -> { + try { + for (Future future : futures) { + try { + future.get(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } catch (ExecutionException e) { + log.error("Error waiting for SolrCore to be created", e); } - } finally { - ExecutorUtil.shutdownAndAwaitTermination(coreLoadExecutor); } + } finally { + ExecutorUtil.shutdownAndAwaitTermination(coreLoadExecutor); } - }; - coreContainerWorkExecutor.submit(shutdownThread); + }); } else { ExecutorUtil.shutdownAndAwaitTermination(coreLoadExecutor); } diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java index 14a4e0ff1e8..faef1c8a4fd 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrCore.java +++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java @@ -2595,18 +2595,14 @@ public final class SolrCore implements SolrInfoMBean, Closeable { final String myIndexDir = getIndexDir(); final String coreName = getName(); if (myDirFactory != null && myDataDir != null && myIndexDir != null) { - Thread cleanupThread = new Thread() { - @Override - public void run() { - log.info("Looking for old index directories to cleanup for core {} in {}", coreName, myDataDir); - try { - myDirFactory.cleanupOldIndexDirectories(myDataDir, myIndexDir); - } catch (Exception exc) { - log.error("Failed to cleanup old index directories for core "+coreName, exc); - } + Thread cleanupThread = new Thread(() -> { + log.info("Looking for old index directories to cleanup for core {} in {}", coreName, myDataDir); + try { + myDirFactory.cleanupOldIndexDirectories(myDataDir, myIndexDir); + } catch (Exception exc) { + log.error("Failed to cleanup old index directories for core "+coreName, exc); } - }; - cleanupThread.setName("OldIndexDirectoryCleanupThreadForCore-"+coreName); + }, "OldIndexDirectoryCleanupThreadForCore-"+coreName); cleanupThread.setDaemon(true); cleanupThread.start(); } diff --git a/solr/core/src/java/org/apache/solr/core/ZkContainer.java b/solr/core/src/java/org/apache/solr/core/ZkContainer.java index 0daafd6b9ac..22afe998fa7 100644 --- a/solr/core/src/java/org/apache/solr/core/ZkContainer.java +++ b/solr/core/src/java/org/apache/solr/core/ZkContainer.java @@ -174,42 +174,38 @@ public class ZkContainer { } public void registerInZk(final SolrCore core, boolean background) { - Thread thread = new Thread() { - @Override - public void run() { - MDCLoggingContext.setCore(core); + Runnable r = () -> { + MDCLoggingContext.setCore(core); + try { try { + zkController.register(core.getName(), core.getCoreDescriptor()); + } catch (InterruptedException e) { + // Restore the interrupted status + Thread.currentThread().interrupt(); + SolrException.log(log, "", e); + } catch (Exception e) { try { - zkController.register(core.getName(), core.getCoreDescriptor()); - } catch (InterruptedException e) { - // Restore the interrupted status + zkController.publish(core.getCoreDescriptor(), Replica.State.DOWN); + } catch (InterruptedException e1) { Thread.currentThread().interrupt(); - SolrException.log(log, "", e); - } catch (Exception e) { - try { - zkController.publish(core.getCoreDescriptor(), Replica.State.DOWN); - } catch (InterruptedException e1) { - Thread.currentThread().interrupt(); - log.error("", e1); - } catch (Exception e1) { - log.error("", e1); - } - SolrException.log(log, "", e); + log.error("", e1); + } catch (Exception e1) { + log.error("", e1); } - } finally { - MDCLoggingContext.clear(); + SolrException.log(log, "", e); } + } finally { + MDCLoggingContext.clear(); } - }; - + if (zkController != null) { if (background) { - coreZkRegister.execute(thread); + coreZkRegister.execute(r); } else { MDCLoggingContext.setCore(core); try { - thread.run(); + r.run(); } finally { MDCLoggingContext.clear(); } diff --git a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java index 9c1cbb60b97..714b8004d5e 100644 --- a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java +++ b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java @@ -768,18 +768,15 @@ public class IndexFetcher { private void reloadCore() { final CountDownLatch latch = new CountDownLatch(1); - new Thread() { - @Override - public void run() { - try { - solrCore.getCoreDescriptor().getCoreContainer().reload(solrCore.getName()); - } catch (Exception e) { - LOG.error("Could not reload core ", e); - } finally { - latch.countDown(); - } + new Thread(() -> { + try { + solrCore.getCoreDescriptor().getCoreContainer().reload(solrCore.getName()); + } catch (Exception e) { + LOG.error("Could not reload core ", e); + } finally { + latch.countDown(); } - }.start(); + }).start(); try { latch.await(); } catch (InterruptedException e) { diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java index d1bd7d23d22..ff93c42d18b 100644 --- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java @@ -275,12 +275,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw return; } final SolrParams paramsCopy = new ModifiableSolrParams(solrParams); - Thread fetchThread = new Thread("explicit-fetchindex-cmd") { - @Override - public void run() { - doFetch(paramsCopy, false); - } - }; + Thread fetchThread = new Thread(() -> doFetch(paramsCopy, false), "explicit-fetchindex-cmd") ; fetchThread.setDaemon(false); fetchThread.start(); if (solrParams.getBool(WAIT, false)) { diff --git a/solr/core/src/java/org/apache/solr/handler/SnapShooter.java b/solr/core/src/java/org/apache/solr/handler/SnapShooter.java index cc3f69efc0b..4b39097770e 100644 --- a/solr/core/src/java/org/apache/solr/handler/SnapShooter.java +++ b/solr/core/src/java/org/apache/solr/handler/SnapShooter.java @@ -128,12 +128,7 @@ public class SnapShooter { } protected void deleteSnapAsync(final ReplicationHandler replicationHandler) { - new Thread() { - @Override - public void run() { - deleteNamedSnapshot(replicationHandler); - } - }.start(); + new Thread(() -> deleteNamedSnapshot(replicationHandler)).start(); } public void validateCreateSnapshot() throws IOException { @@ -170,28 +165,27 @@ public class SnapShooter { public void createSnapAsync(final IndexCommit indexCommit, final int numberToKeep, Consumer result) { solrCore.getDeletionPolicy().saveCommitPoint(indexCommit.getGeneration()); - new Thread() { //TODO should use Solr's ExecutorUtil - @Override - public void run() { + //TODO should use Solr's ExecutorUtil + new Thread(() -> { + try { + result.accept(createSnapshot(indexCommit)); + } catch (Exception e) { + LOG.error("Exception while creating snapshot", e); + NamedList snapShootDetails = new NamedList<>(); + snapShootDetails.add("snapShootException", e.getMessage()); + result.accept(snapShootDetails); + } finally { + solrCore.getDeletionPolicy().releaseCommitPoint(indexCommit.getGeneration()); + } + if (snapshotName == null) { try { - result.accept(createSnapshot(indexCommit)); - } catch (Exception e) { - LOG.error("Exception while creating snapshot", e); - NamedList snapShootDetails = new NamedList<>(); - snapShootDetails.add("snapShootException", e.getMessage()); - result.accept(snapShootDetails); - } finally { - solrCore.getDeletionPolicy().releaseCommitPoint(indexCommit.getGeneration()); - } - if (snapshotName == null) { - try { - deleteOldBackups(numberToKeep); - } catch (IOException e) { - LOG.warn("Unable to delete old snapshots ", e); - } + deleteOldBackups(numberToKeep); + } catch (IOException e) { + LOG.warn("Unable to delete old snapshots ", e); } } - }.start(); + }).start(); + } // note: remember to reserve the indexCommit first so it won't get deleted concurrently diff --git a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java index 972859705c9..20418f5431e 100644 --- a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java @@ -206,23 +206,20 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa log.info("I already have the expected version {} of params", expectedVersion); } if (checkStale && req.getCore().getResourceLoader() instanceof ZkSolrResourceLoader) { - new Thread(SolrConfigHandler.class.getSimpleName() + "-refreshconf") { - @Override - public void run() { - if (!reloadLock.tryLock()) { - log.info("Another reload is in progress . Not doing anything"); - return; - } - try { - log.info("Trying to update my configs"); - SolrCore.getConfListener(req.getCore(), (ZkSolrResourceLoader) req.getCore().getResourceLoader()).run(); - } catch (Exception e) { - log.error("Unable to refresh conf ", e); - } finally { - reloadLock.unlock(); - } + new Thread(() -> { + if (!reloadLock.tryLock()) { + log.info("Another reload is in progress . Not doing anything"); + return; } - }.start(); + try { + log.info("Trying to update my configs"); + SolrCore.getConfListener(req.getCore(), (ZkSolrResourceLoader) req.getCore().getResourceLoader()).run(); + } catch (Exception e) { + log.error("Unable to refresh conf ", e); + } finally { + reloadLock.unlock(); + } + }, SolrConfigHandler.class.getSimpleName() + "-refreshconf").start(); } else { log.info("checkStale {} , resourceloader {}", checkStale, req.getCore().getResourceLoader().getClass().getName()); } diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java index bf892277d78..fa3beddac6d 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminOperation.java @@ -582,24 +582,20 @@ enum CoreAdminOperation { public void call(final CallInfo callInfo) throws IOException { final SolrParams params = callInfo.req.getParams(); log.info("It has been requested that we recover: core="+params.get(CoreAdminParams.CORE)); - Thread thread = new Thread() { - @Override - public void run() { - String cname = params.get(CoreAdminParams.CORE); - if (cname == null) { - cname = ""; - } - try (SolrCore core = callInfo.handler.coreContainer.getCore(cname)) { - if (core != null) { - core.getUpdateHandler().getSolrCoreState().doRecovery(callInfo.handler.coreContainer, core.getCoreDescriptor()); - } else { - SolrException.log(log, "Could not find core to call recovery:" + cname); - } + new Thread(() -> { + String cname = params.get(CoreAdminParams.CORE); + if (cname == null) { + cname = ""; + } + try (SolrCore core = callInfo.handler.coreContainer.getCore(cname)) { + if (core != null) { + core.getUpdateHandler().getSolrCoreState().doRecovery(callInfo.handler.coreContainer, core.getCoreDescriptor()); + } else { + SolrException.log(log, "Could not find core to call recovery:" + cname); } } - }; + }).start(); - thread.start(); } }, REQUESTSYNCSHARD_OP(REQUESTSYNCSHARD) { diff --git a/solr/core/src/java/org/apache/solr/util/ConcurrentLFUCache.java b/solr/core/src/java/org/apache/solr/util/ConcurrentLFUCache.java index 25b9342a7eb..6f2ff2d09ba 100644 --- a/solr/core/src/java/org/apache/solr/util/ConcurrentLFUCache.java +++ b/solr/core/src/java/org/apache/solr/util/ConcurrentLFUCache.java @@ -16,10 +16,6 @@ */ package org.apache.solr.util; -import org.apache.solr.common.util.Cache; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import java.lang.invoke.MethodHandles; import java.lang.ref.WeakReference; import java.util.LinkedHashMap; @@ -30,6 +26,10 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReentrantLock; +import org.apache.solr.common.util.Cache; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * A LFU cache implementation based upon ConcurrentHashMap. *

@@ -139,12 +139,7 @@ public class ConcurrentLFUCache implements Cache { // in this method. if (currentSize > upperWaterMark && !isCleaning) { if (newThreadForCleanup) { - new Thread() { - @Override - public void run() { - markAndSweep(); - } - }.start(); + new Thread(this::markAndSweep).start(); } else if (cleanupThread != null) { cleanupThread.wakeThread(); } else { diff --git a/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java b/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java index 3b02ed65418..3b6db53ae92 100644 --- a/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java +++ b/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java @@ -136,12 +136,7 @@ public class ConcurrentLRUCache implements Cache { // in this method. if (currentSize > upperWaterMark && !isCleaning) { if (newThreadForCleanup) { - new Thread() { - @Override - public void run() { - markAndSweep(); - } - }.start(); + new Thread(this::markAndSweep).start(); } else if (cleanupThread != null){ cleanupThread.wakeThread(); } else {