diff --git a/lucene/core/src/java/org/apache/lucene/search/TotalHits.java b/lucene/core/src/java/org/apache/lucene/search/TotalHits.java index 7a0412f743b..b3a4c83e1de 100644 --- a/lucene/core/src/java/org/apache/lucene/search/TotalHits.java +++ b/lucene/core/src/java/org/apache/lucene/search/TotalHits.java @@ -35,7 +35,7 @@ public final class TotalHits { */ EQUAL_TO, /** - * The total hit count is greater than or eual to {@link TotalHits#value}. + * The total hit count is greater than or equal to {@link TotalHits#value}. */ GREATER_THAN_OR_EQUAL_TO } diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 04ada3dccac..ca510caead6 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -86,6 +86,8 @@ Improvements * SOLR-12121: JWT Token authentication plugin with OpenID Connect implicit flow login through Admin UI (janhoy) +* SOLR-13227: Optimizing facet.range.other by avoiding expensive exceptions (Nikolay Khitrin via Mikhail Khludnev) + Other Changes ---------------------- @@ -105,6 +107,10 @@ Other Changes * SOLR-13060: Improve HdfsAutoAddReplicasIntegrationTest and HdfsCollectionsAPIDistributedZkTest (Kevin Risden) +* SOLR-13074: MoveReplicaHDFSTest leaks threads, falls into an endless loop, logging like crazy (Kevin Risden) + +* SOLR-9762: Remove the workaround implemented for HADOOP-13346 (Kevin Risden) + ================== 8.0.0 ================== Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release. diff --git a/solr/core/src/java/org/apache/solr/handler/component/RangeFacetProcessor.java b/solr/core/src/java/org/apache/solr/handler/component/RangeFacetProcessor.java index 6f2fc266c76..d81bc9edb3e 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/RangeFacetProcessor.java +++ b/solr/core/src/java/org/apache/solr/handler/component/RangeFacetProcessor.java @@ -178,35 +178,29 @@ public class RangeFacetProcessor extends SimpleFacets { IntervalFacets.FacetInterval after = null; for (RangeFacetRequest.FacetRange range : rfr.getFacetRanges()) { - try { - FacetRangeOther other = FacetRangeOther.get(range.name); - if (other != null) { - switch (other) { - case BEFORE: - assert range.lower == null; - intervals.set(0, new IntervalFacets.FacetInterval(sf, "*", range.upper, range.includeLower, - range.includeUpper, FacetRangeOther.BEFORE.toString())); - break; - case AFTER: - assert range.upper == null; - after = new IntervalFacets.FacetInterval(sf, range.lower, "*", - range.includeLower, range.includeUpper, FacetRangeOther.AFTER.toString()); - break; - case BETWEEN: - intervals.set(includeBefore ? 1 : 0, new IntervalFacets.FacetInterval(sf, range.lower, range.upper, - range.includeLower, range.includeUpper, FacetRangeOther.BETWEEN.toString())); - break; - case ALL: - case NONE: - break; - } + if (range.other != null) { + switch (range.other) { + case BEFORE: + assert range.lower == null; + intervals.set(0, new IntervalFacets.FacetInterval(sf, "*", range.upper, range.includeLower, + range.includeUpper, FacetRangeOther.BEFORE.toString())); + break; + case AFTER: + assert range.upper == null; + after = new IntervalFacets.FacetInterval(sf, range.lower, "*", + range.includeLower, range.includeUpper, FacetRangeOther.AFTER.toString()); + break; + case BETWEEN: + intervals.set(includeBefore ? 1 : 0, new IntervalFacets.FacetInterval(sf, range.lower, range.upper, + range.includeLower, range.includeUpper, FacetRangeOther.BETWEEN.toString())); + break; + case ALL: + case NONE: + break; } - continue; - } catch (SolrException e) { - // safe to ignore + } else { + intervals.add(new IntervalFacets.FacetInterval(sf, range.lower, range.upper, range.includeLower, range.includeUpper, range.lower)); } - - intervals.add(new IntervalFacets.FacetInterval(sf, range.lower, range.upper, range.includeLower, range.includeUpper, range.lower)); } if (includeAfter) { diff --git a/solr/core/src/java/org/apache/solr/security/HadoopAuthPlugin.java b/solr/core/src/java/org/apache/solr/security/HadoopAuthPlugin.java index cce4a896246..6881d8a59ce 100644 --- a/solr/core/src/java/org/apache/solr/security/HadoopAuthPlugin.java +++ b/solr/core/src/java/org/apache/solr/security/HadoopAuthPlugin.java @@ -20,7 +20,6 @@ import static org.apache.solr.security.RequestContinuesRecorderAuthenticationHan import static org.apache.solr.security.HadoopAuthFilter.DELEGATION_TOKEN_ZK_CLIENT; import java.io.IOException; -import java.io.PrintWriter; import java.lang.invoke.MethodHandles; import java.util.Collection; import java.util.Collections; @@ -37,15 +36,15 @@ import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import javax.servlet.http.HttpServletResponseWrapper; +import com.fasterxml.jackson.core.JsonGenerator; import org.apache.commons.collections.iterators.IteratorEnumeration; import org.apache.hadoop.security.authentication.server.AuthenticationFilter; +import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationHandler; import org.apache.solr.client.solrj.impl.Krb5HttpClientBuilder; import org.apache.solr.cloud.ZkController; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; -import org.apache.solr.common.util.SuppressForbidden; import org.apache.solr.core.CoreContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -189,6 +188,10 @@ public class HadoopAuthPlugin extends AuthenticationPlugin { // Configure proxy user settings. params.putAll(proxyUserConfigs); + // Needed to work around HADOOP-13346 + params.put(DelegationTokenAuthenticationHandler.JSON_MAPPER_PREFIX + JsonGenerator.Feature.AUTO_CLOSE_TARGET, + "false"); + final ServletContext servletContext = new AttributeOnlyServletContext(); log.info("Params: "+params); @@ -244,20 +247,7 @@ public class HadoopAuthPlugin extends AuthenticationPlugin { log.info("-------------------------------"); } - // Workaround until HADOOP-13346 is fixed. - HttpServletResponse rspCloseShield = new HttpServletResponseWrapper(frsp) { - @SuppressForbidden(reason = "Hadoop DelegationTokenAuthenticationFilter uses response writer, this" + - "is providing a CloseShield on top of that") - @Override - public PrintWriter getWriter() throws IOException { - final PrintWriter pw = new PrintWriterWrapper(frsp.getWriter()) { - @Override - public void close() {}; - }; - return pw; - } - }; - authFilter.doFilter(request, rspCloseShield, filterChain); + authFilter.doFilter(request, frsp, filterChain); switch (frsp.getStatus()) { case HttpServletResponse.SC_UNAUTHORIZED: diff --git a/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java b/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java index 87f37a8382f..8bc562518e2 100644 --- a/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java +++ b/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java @@ -16,8 +16,6 @@ */ package org.apache.solr.security; -import java.io.IOException; -import java.io.PrintWriter; import java.lang.invoke.MethodHandles; import java.util.Enumeration; import java.util.HashMap; @@ -30,11 +28,11 @@ import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; -import javax.servlet.http.HttpServletResponse; -import javax.servlet.http.HttpServletResponseWrapper; +import com.fasterxml.jackson.core.JsonGenerator; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.collections.iterators.IteratorEnumeration; +import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationHandler; import org.apache.solr.client.solrj.impl.Http2SolrClient; import org.apache.solr.client.solrj.impl.Krb5HttpClientBuilder; import org.apache.solr.client.solrj.impl.SolrHttpClientBuilder; @@ -42,7 +40,6 @@ import org.apache.solr.cloud.ZkController; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.cloud.SecurityAwareZkACLProvider; -import org.apache.solr.common.util.SuppressForbidden; import org.apache.solr.core.CoreContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -166,6 +163,11 @@ public class KerberosPlugin extends AuthenticationPlugin implements HttpClientBu params.put(key, System.getProperty(key)); } } + + // Needed to work around HADOOP-13346 + params.put(DelegationTokenAuthenticationHandler.JSON_MAPPER_PREFIX + JsonGenerator.Feature.AUTO_CLOSE_TARGET, + "false"); + final ServletContext servletContext = new AttributeOnlyServletContext(); if (controller != null) { servletContext.setAttribute(DELEGATION_TOKEN_ZK_CLIENT, controller.getZkClient()); @@ -223,25 +225,7 @@ public class KerberosPlugin extends AuthenticationPlugin implements HttpClientBu public boolean doAuthenticate(ServletRequest req, ServletResponse rsp, FilterChain chain) throws Exception { log.debug("Request to authenticate using kerberos: "+req); - - final HttpServletResponse frsp = (HttpServletResponse)rsp; - - // kerberosFilter may close the stream and write to closed streams, - // see HADOOP-13346. To work around, pass a PrintWriter that ignores - // closes - HttpServletResponse rspCloseShield = new HttpServletResponseWrapper(frsp) { - @SuppressForbidden(reason = "Hadoop DelegationTokenAuthenticationFilter uses response writer, this" + - "is providing a CloseShield on top of that") - @Override - public PrintWriter getWriter() throws IOException { - final PrintWriter pw = new PrintWriterWrapper(frsp.getWriter()) { - @Override - public void close() {}; - }; - return pw; - } - }; - kerberosFilter.doFilter(req, rspCloseShield, chain); + kerberosFilter.doFilter(req, rsp, chain); String requestContinuesAttr = (String)req.getAttribute(RequestContinuesRecorderAuthenticationHandler.REQUEST_CONTINUES_ATTR); if (requestContinuesAttr == null) { log.warn("Could not find " + RequestContinuesRecorderAuthenticationHandler.REQUEST_CONTINUES_ATTR); diff --git a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaHDFSFailoverTest.java b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaHDFSFailoverTest.java index e50ee811a52..f6e906890cc 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaHDFSFailoverTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaHDFSFailoverTest.java @@ -40,8 +40,7 @@ import org.junit.BeforeClass; import org.junit.Test; @ThreadLeakFilters(defaultFilters = true, filters = { - BadHdfsThreadsFilter.class, // hdfs currently leaks thread(s) - MoveReplicaHDFSTest.ForkJoinThreadsFilter.class + BadHdfsThreadsFilter.class // hdfs currently leaks thread(s) }) @Nightly // test is too long for non nightly public class MoveReplicaHDFSFailoverTest extends SolrCloudTestCase { diff --git a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaHDFSTest.java b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaHDFSTest.java index e05033c9155..b46434177cf 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaHDFSTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaHDFSTest.java @@ -16,75 +16,51 @@ */ package org.apache.solr.cloud; -import java.io.IOException; - -import com.carrotsearch.randomizedtesting.ThreadFilter; import com.carrotsearch.randomizedtesting.annotations.Nightly; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.apache.lucene.util.IOUtils; -import org.apache.lucene.util.LuceneTestCase.AwaitsFix; +import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.TimeUnits; import org.apache.solr.cloud.hdfs.HdfsTestUtil; -import org.apache.solr.common.cloud.ZkConfigManager; import org.apache.solr.util.BadHdfsThreadsFilter; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; -/** - * - */ +@Slow +@Nightly @ThreadLeakFilters(defaultFilters = true, filters = { - BadHdfsThreadsFilter.class, // hdfs currently leaks thread(s) - MoveReplicaHDFSTest.ForkJoinThreadsFilter.class + BadHdfsThreadsFilter.class // hdfs currently leaks thread(s) }) -@Nightly // test is too long for non nightly @TimeoutSuite(millis = TimeUnits.HOUR) -@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-13060") public class MoveReplicaHDFSTest extends MoveReplicaTest { - private static MiniDFSCluster dfsCluster; @BeforeClass public static void setupClass() throws Exception { - System.setProperty("solr.hdfs.blockcache.enabled", "false"); + System.setProperty("solr.hdfs.blockcache.blocksperbank", "512"); + System.setProperty("tests.hdfs.numdatanodes", "1"); dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath()); - - ZkConfigManager configManager = new ZkConfigManager(zkClient()); - configManager.uploadConfigDir(configset("cloud-hdfs"), "conf1"); - - System.setProperty("solr.hdfs.home", HdfsTestUtil.getDataDir(dfsCluster, "data")); } @AfterClass public static void teardownClass() throws Exception { try { - IOUtils.close( - () -> { - try { - if (cluster != null) cluster.shutdown(); - } catch (Exception e) { - throw new IOException("Could not shut down the cluster.", e); - } - }, - () -> { - try { - if (dfsCluster != null) HdfsTestUtil.teardownClass(dfsCluster); - } catch (Exception e) { - throw new IOException("Could not shut down dfs cluster.", e); - } - } - ); + HdfsTestUtil.teardownClass(dfsCluster); } finally { - cluster = null; dfsCluster = null; + System.setProperty("solr.hdfs.blockcache.blocksperbank", "512"); + System.setProperty("tests.hdfs.numdatanodes", "1"); } } + @Override + protected String getConfigSet() { + return "cloud-hdfs"; + } + @Test - // 12-Jun-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") //2018-03-10 public void testNormalMove() throws Exception { inPlaceMove = false; test(); @@ -108,13 +84,5 @@ public class MoveReplicaHDFSTest extends MoveReplicaTest { public void testFailedMove() throws Exception { super.testFailedMove(); } - - public static class ForkJoinThreadsFilter implements ThreadFilter { - @Override - public boolean reject(Thread t) { - String name = t.getName(); - return name.startsWith("ForkJoinPool.commonPool"); - } - } } diff --git a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java index e0b71c48f0c..843b238ad83 100644 --- a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java @@ -22,7 +22,6 @@ import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -41,46 +40,38 @@ import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; -import org.apache.solr.common.params.CollectionParams; -import org.apache.solr.common.params.ModifiableSolrParams; -import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.util.IdUtils; -import org.apache.solr.util.LogLevel; import org.junit.After; import org.junit.Before; -import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@LogLevel("org.apache.solr.cloud=DEBUG;org.apache.solr.cloud.autoscaling=DEBUG;") public class MoveReplicaTest extends SolrCloudTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); // used by MoveReplicaHDFSTest protected boolean inPlaceMove = true; - @BeforeClass - public static void setupCluster() throws Exception { - - } - - protected String getSolrXml() { - return "solr.xml"; + protected String getConfigSet() { + return "cloud-dynamic"; } @Before public void beforeTest() throws Exception { inPlaceMove = true; + configureCluster(4) - .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf")) + .addConfig("conf1", configset(getConfigSet())) + .addConfig("conf2", configset(getConfigSet())) + .withSolrXml(TEST_PATH().resolve("solr.xml")) .configure(); + NamedList overSeerStatus = cluster.getSolrClient().request(CollectionAdminRequest.getOverseerStatus()); JettySolrRunner overseerJetty = null; String overseerLeader = (String) overSeerStatus.get("leader"); - for (int i = 0; i < cluster.getJettySolrRunners().size(); i++) { - JettySolrRunner jetty = cluster.getJettySolrRunner(i); + for (JettySolrRunner jetty : cluster.getJettySolrRunners()) { if (jetty.getNodeName().equals(overseerLeader)) { overseerJetty = jetty; break; @@ -93,7 +84,11 @@ public class MoveReplicaTest extends SolrCloudTestCase { @After public void afterTest() throws Exception { - cluster.shutdown(); + try { + shutdownCluster(); + } finally { + super.tearDown(); + } } @Test @@ -147,7 +142,7 @@ public class MoveReplicaTest extends SolrCloudTestCase { success = true; break; } - assertFalse(rsp.getRequestStatus() == RequestStatusState.FAILED); + assertNotSame(rsp.getRequestStatus(), RequestStatusState.FAILED); Thread.sleep(500); } assertTrue(success); @@ -292,7 +287,7 @@ public class MoveReplicaTest extends SolrCloudTestCase { boolean success = true; for (int i = 0; i < 200; i++) { CollectionAdminRequest.RequestStatusResponse rsp = requestStatus.process(cloudClient); - assertTrue(rsp.getRequestStatus().toString(), rsp.getRequestStatus() != RequestStatusState.COMPLETED); + assertNotSame(rsp.getRequestStatus().toString(), rsp.getRequestStatus(), RequestStatusState.COMPLETED); if (rsp.getRequestStatus() == RequestStatusState.FAILED) { success = false; break; @@ -306,46 +301,11 @@ public class MoveReplicaTest extends SolrCloudTestCase { } private CollectionAdminRequest.MoveReplica createMoveReplicaRequest(String coll, Replica replica, String targetNode, String shardId) { - if (random().nextBoolean()) { - return new CollectionAdminRequest.MoveReplica(coll, shardId, targetNode, replica.getNodeName()); - } else { - // for backcompat testing of SOLR-11068 - // todo remove in solr 8.0 - return new BackCompatMoveReplicaRequest(coll, shardId, targetNode, replica.getNodeName()); - } + return new CollectionAdminRequest.MoveReplica(coll, shardId, targetNode, replica.getNodeName()); } private CollectionAdminRequest.MoveReplica createMoveReplicaRequest(String coll, Replica replica, String targetNode) { - if (random().nextBoolean()) { - return new CollectionAdminRequest.MoveReplica(coll, replica.getName(), targetNode); - } else { - // for backcompat testing of SOLR-11068 - // todo remove in solr 8.0 - return new BackCompatMoveReplicaRequest(coll, replica.getName(), targetNode); - } - } - - /** - * Added for backcompat testing - * todo remove in solr 8.0 - */ - static class BackCompatMoveReplicaRequest extends CollectionAdminRequest.MoveReplica { - public BackCompatMoveReplicaRequest(String collection, String replica, String targetNode) { - super(collection, replica, targetNode); - } - - public BackCompatMoveReplicaRequest(String collection, String shard, String sourceNode, String targetNode) { - super(collection, shard, sourceNode, targetNode); - } - - @Override - public SolrParams getParams() { - ModifiableSolrParams params = (ModifiableSolrParams) super.getParams(); - if (randomlyMoveReplica) { - params.set(CollectionParams.FROM_NODE, sourceNode); - } - return params; - } + return new CollectionAdminRequest.MoveReplica(coll, replica.getName(), targetNode); } private Replica getRandomReplica(String coll, CloudSolrClient cloudClient) { @@ -369,9 +329,8 @@ public class MoveReplicaTest extends SolrCloudTestCase { return status.getCoreStatus().size(); } else { int size = 0; - Iterator>> it = status.getCoreStatus().iterator(); - while (it.hasNext()) { - String coll = (String)it.next().getValue().findRecursive("cloud", "collection"); + for (Map.Entry> stringNamedListEntry : status.getCoreStatus()) { + String coll = (String) stringNamedListEntry.getValue().findRecursive("cloud", "collection"); if (collectionName.equals(coll)) { size++; } diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HDFSCollectionsAPITest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HDFSCollectionsAPITest.java index 48417a21ff5..425f4a97dde 100644 --- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HDFSCollectionsAPITest.java +++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HDFSCollectionsAPITest.java @@ -17,14 +17,12 @@ package org.apache.solr.cloud.hdfs; - import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.cloud.MoveReplicaHDFSTest; import org.apache.solr.cloud.SolrCloudTestCase; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.cloud.Replica; @@ -34,8 +32,7 @@ import org.junit.AfterClass; import org.junit.BeforeClass; @ThreadLeakFilters(defaultFilters = true, filters = { - BadHdfsThreadsFilter.class, // hdfs currently leaks thread(s) - MoveReplicaHDFSTest.ForkJoinThreadsFilter.class + BadHdfsThreadsFilter.class // hdfs currently leaks thread(s) }) public class HDFSCollectionsAPITest extends SolrCloudTestCase {