From c40689875a8dd7b5e48a99c5cbd7d79585610559 Mon Sep 17 00:00:00 2001 From: Dawid Weiss Date: Thu, 12 Dec 2019 19:25:22 +0100 Subject: [PATCH 1/5] Correct regexes normalizing jar filenames so that the resulting licenses are consistent with gradle (which will use proper artifact name, not a chain of regexes). --- lucene/build.xml | 2 +- lucene/tools/custom-tasks.xml | 2 +- solr/build.xml | 2 +- .../{log4j-LICENSE-ASL.txt => log4j-1.2-api-LICENSE-ASL.txt} | 0 solr/licenses/{log4j-NOTICE.txt => log4j-1.2-api-NOTICE.txt} | 0 ...ics-jetty-LICENSE-ASL.txt => metrics-jetty9-LICENSE-ASL.txt} | 0 .../{metrics-jetty-NOTICE.txt => metrics-jetty9-NOTICE.txt} | 0 7 files changed, 3 insertions(+), 3 deletions(-) rename solr/licenses/{log4j-LICENSE-ASL.txt => log4j-1.2-api-LICENSE-ASL.txt} (100%) rename solr/licenses/{log4j-NOTICE.txt => log4j-1.2-api-NOTICE.txt} (100%) rename solr/licenses/{metrics-jetty-LICENSE-ASL.txt => metrics-jetty9-LICENSE-ASL.txt} (100%) rename solr/licenses/{metrics-jetty-NOTICE.txt => metrics-jetty9-NOTICE.txt} (100%) diff --git a/lucene/build.xml b/lucene/build.xml index d4eb59e6bde..322d2d27eba 100644 --- a/lucene/build.xml +++ b/lucene/build.xml @@ -89,7 +89,7 @@ - + diff --git a/lucene/tools/custom-tasks.xml b/lucene/tools/custom-tasks.xml index 11bb4530331..4b5c3ea8ce2 100644 --- a/lucene/tools/custom-tasks.xml +++ b/lucene/tools/custom-tasks.xml @@ -43,7 +43,7 @@ - + diff --git a/solr/build.xml b/solr/build.xml index ef597603faa..b57fe998ef2 100644 --- a/solr/build.xml +++ b/solr/build.xml @@ -333,7 +333,7 @@ - + diff --git a/solr/licenses/log4j-LICENSE-ASL.txt b/solr/licenses/log4j-1.2-api-LICENSE-ASL.txt similarity index 100% rename from solr/licenses/log4j-LICENSE-ASL.txt rename to solr/licenses/log4j-1.2-api-LICENSE-ASL.txt diff --git a/solr/licenses/log4j-NOTICE.txt b/solr/licenses/log4j-1.2-api-NOTICE.txt similarity index 100% rename from solr/licenses/log4j-NOTICE.txt rename to solr/licenses/log4j-1.2-api-NOTICE.txt diff --git a/solr/licenses/metrics-jetty-LICENSE-ASL.txt b/solr/licenses/metrics-jetty9-LICENSE-ASL.txt similarity index 100% rename from solr/licenses/metrics-jetty-LICENSE-ASL.txt rename to solr/licenses/metrics-jetty9-LICENSE-ASL.txt diff --git a/solr/licenses/metrics-jetty-NOTICE.txt b/solr/licenses/metrics-jetty9-NOTICE.txt similarity index 100% rename from solr/licenses/metrics-jetty-NOTICE.txt rename to solr/licenses/metrics-jetty9-NOTICE.txt From e155649026a62e684667e657175d0f722601c05b Mon Sep 17 00:00:00 2001 From: Andrzej Bialecki Date: Thu, 12 Dec 2019 19:04:23 +0100 Subject: [PATCH 2/5] SOLR-13975: Make sure the stall time is adjusted up when an unusually long poll time is configured. --- .../solrj/impl/ConcurrentUpdateHttp2SolrClient.java | 11 +++++++++-- .../client/solrj/impl/ConcurrentUpdateSolrClient.java | 10 +++++++++- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java index bb0c5822f6e..0e224a1b85b 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java @@ -152,6 +152,9 @@ public class ConcurrentUpdateHttp2SolrClient extends SolrClient { this.streamDeletes = builder.streamDeletes; this.basePath = builder.baseSolrUrl; this.stallTime = Integer.getInteger("solr.cloud.client.stallTime", 15000); + if (stallTime < pollQueueTime * 2) { + throw new RuntimeException("Invalid stallTime: " + stallTime + "ms, must be 2x > pollQueueTime " + pollQueueTime); + } if (builder.executorService != null) { this.scheduler = builder.executorService; @@ -214,7 +217,6 @@ public class ConcurrentUpdateHttp2SolrClient extends SolrClient { try { Update update; notifyQueueAndRunnersIfEmptyQueue(); - //log.info("-- polling 1"); update = queue.poll(pollQueueTime, TimeUnit.MILLISECONDS); if (update == null) { @@ -662,7 +664,12 @@ public class ConcurrentUpdateHttp2SolrClient extends SolrClient { */ public void setPollQueueTime(int pollQueueTime) { this.pollQueueTime = pollQueueTime; - this.stallTime = this.pollQueueTime * 3 / 2; + // make sure the stall time is larger than the polling time + // to give a chance for the queue to change + int minimalStallTime = pollQueueTime * 2; + if (minimalStallTime > this.stallTime) { + this.stallTime = minimalStallTime; + } } /** diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java index d921cb2114a..edc8270cfc3 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java @@ -134,7 +134,9 @@ public class ConcurrentUpdateSolrClient extends SolrClient { this.connectionTimeout = builder.connectionTimeoutMillis; this.soTimeout = builder.socketTimeoutMillis; this.stallTime = Integer.getInteger("solr.cloud.client.stallTime", 15000); - + if (stallTime < pollQueueTime * 2) { + throw new RuntimeException("Invalid stallTime: " + stallTime + "ms, must be 2x > pollQueueTime " + pollQueueTime); + } if (builder.executorService != null) { this.scheduler = builder.executorService; @@ -827,6 +829,12 @@ public class ConcurrentUpdateSolrClient extends SolrClient { */ public void setPollQueueTime(int pollQueueTime) { this.pollQueueTime = pollQueueTime; + // make sure the stall time is larger than the polling time + // to give a chance for the queue to change + int minimalStallTime = pollQueueTime * 2; + if (minimalStallTime > this.stallTime) { + this.stallTime = minimalStallTime; + } } public void setRequestWriter(RequestWriter requestWriter) { From 3ba005465a5dff3975b85f9c44d365bd3cd36346 Mon Sep 17 00:00:00 2001 From: Kevin Risden Date: Wed, 11 Dec 2019 22:57:50 -0500 Subject: [PATCH 3/5] SOLR-14048: Improve Hadoop test sanity checks Signed-off-by: Kevin Risden --- .../test/org/apache/hadoop/fs/FileUtil.java | 1 + .../test/org/apache/hadoop/fs/HardLink.java | 1 + .../apache/hadoop/fs/RawLocalFileSystem.java | 2 + .../fsdataset/impl/BlockPoolSlice.java | 4 +- .../namenode/NameNodeResourceChecker.java | 2 + .../org/apache/hadoop/http/HttpServer2.java | 2 + .../test/org/apache/hadoop/package-info.java | 39 +++++++++++++++++++ .../org/apache/hadoop/util/DiskChecker.java | 2 + .../apache/solr/cloud/hdfs/HdfsTestUtil.java | 29 ++++++++++++++ 9 files changed, 81 insertions(+), 1 deletion(-) create mode 100644 solr/core/src/test/org/apache/hadoop/package-info.java diff --git a/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java b/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java index 98e281dc821..f49604f683a 100644 --- a/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java +++ b/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java @@ -74,6 +74,7 @@ import org.slf4j.LoggerFactory; @InterfaceAudience.Public @InterfaceStability.Evolving public class FileUtil { + public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object(); private static final Logger LOG = LoggerFactory.getLogger(FileUtil.class); diff --git a/solr/core/src/test/org/apache/hadoop/fs/HardLink.java b/solr/core/src/test/org/apache/hadoop/fs/HardLink.java index f3a173e9336..3b546cedbb7 100644 --- a/solr/core/src/test/org/apache/hadoop/fs/HardLink.java +++ b/solr/core/src/test/org/apache/hadoop/fs/HardLink.java @@ -38,6 +38,7 @@ import static java.nio.file.Files.createLink; * efficient - and minimizes the impact of the extra buffer creations. */ public class HardLink { + public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object(); public final LinkStats linkStats; //not static diff --git a/solr/core/src/test/org/apache/hadoop/fs/RawLocalFileSystem.java b/solr/core/src/test/org/apache/hadoop/fs/RawLocalFileSystem.java index 4ee69b9cdbc..5373c03bcc0 100644 --- a/solr/core/src/test/org/apache/hadoop/fs/RawLocalFileSystem.java +++ b/solr/core/src/test/org/apache/hadoop/fs/RawLocalFileSystem.java @@ -62,6 +62,8 @@ import org.apache.hadoop.util.StringUtils; @InterfaceAudience.Public @InterfaceStability.Stable public class RawLocalFileSystem extends FileSystem { + public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object(); + static final URI NAME = URI.create("file:///"); private Path workingDir; // Temporary workaround for HADOOP-9652. diff --git a/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java b/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java index ff19c72a45a..bfc18a3073d 100644 --- a/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java +++ b/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java @@ -78,7 +78,9 @@ import com.google.common.annotations.VisibleForTesting; * * This class is synchronized by {@link FsVolumeImpl}. */ -class BlockPoolSlice { +public class BlockPoolSlice { + public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object(); + static final Logger LOG = LoggerFactory.getLogger(BlockPoolSlice.class); private final String bpid; diff --git a/solr/core/src/test/org/apache/hadoop/hdfs/server/namenode/NameNodeResourceChecker.java b/solr/core/src/test/org/apache/hadoop/hdfs/server/namenode/NameNodeResourceChecker.java index bf18facd2fb..0f500e1ef69 100644 --- a/solr/core/src/test/org/apache/hadoop/hdfs/server/namenode/NameNodeResourceChecker.java +++ b/solr/core/src/test/org/apache/hadoop/hdfs/server/namenode/NameNodeResourceChecker.java @@ -32,6 +32,8 @@ import org.apache.hadoop.conf.Configuration; */ @InterfaceAudience.Private public class NameNodeResourceChecker { + public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object(); + /** * Create a NameNodeResourceChecker, which will check the edits dirs and any * additional dirs to check set in conf. diff --git a/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java b/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java index 757b211d159..e7ae95c0456 100644 --- a/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java +++ b/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java @@ -116,6 +116,8 @@ import org.slf4j.LoggerFactory; @InterfaceAudience.Private @InterfaceStability.Evolving public final class HttpServer2 implements FilterContainer { + public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object(); + public static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class); public static final String HTTP_SCHEME = "http"; diff --git a/solr/core/src/test/org/apache/hadoop/package-info.java b/solr/core/src/test/org/apache/hadoop/package-info.java new file mode 100644 index 00000000000..f10ed779f79 --- /dev/null +++ b/solr/core/src/test/org/apache/hadoop/package-info.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * The classes under this package were copied from Apache Hadoop and modified + * to avoid certain issues during tests. The copied classes override the + * Apache Hadoop dependency versions during tests. + * + * HttpServer2 class was copied to avoid Jetty 9.4 dependency issues. Since + * Solr uses Jetty 9.4, Hadoop integration tests needs to use Jetty 9.4 as + * well. The HttpServer2 class should be removed when Hadoop is upgraded to + * 3.3.0 due to HADOOP-16152 upgrading Hadoop to Jetty 9.4. + * + * The classes BlockPoolSlice (HDFS-14251), DiskChecker, FileUtil, HardLink, + * NameNodeResourceChecker, and RawLocalFileSystem were copied to avoid + * issues with running Hadoop integration tests under the Java security + * manager. Many of these classes use org.apache.hadoop.util.Shell + * which shells out to try to do common filesystem checks. + * + * Overtime these classes should be removed as upstream fixes to Apache + * Hadoop are made. When the Apache Hadoop dependency is upgraded in + * Solr, the classes should be compared against that version. + */ +package org.apache.hadoop; + diff --git a/solr/core/src/test/org/apache/hadoop/util/DiskChecker.java b/solr/core/src/test/org/apache/hadoop/util/DiskChecker.java index 3043da9c107..54ba286470e 100644 --- a/solr/core/src/test/org/apache/hadoop/util/DiskChecker.java +++ b/solr/core/src/test/org/apache/hadoop/util/DiskChecker.java @@ -43,6 +43,8 @@ import org.slf4j.LoggerFactory; @InterfaceAudience.Private @InterfaceStability.Unstable public class DiskChecker { + public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object(); + public static final Logger LOG = LoggerFactory.getLogger(DiskChecker.class); public static class DiskErrorException extends IOException { diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java index 2afee3598c6..98f82747a92 100644 --- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java +++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java @@ -19,8 +19,10 @@ package org.apache.solr.cloud.hdfs; import java.io.File; import java.lang.invoke.MethodHandles; import java.net.URI; +import java.util.Arrays; import java.util.Enumeration; import java.util.HashMap; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Timer; @@ -33,12 +35,19 @@ import org.apache.commons.lang3.time.FastDateFormat; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.fs.HardLink; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.RawLocalFileSystem; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSNNTopology; +import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.BlockPoolSlice; import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter; +import org.apache.hadoop.hdfs.server.namenode.NameNodeResourceChecker; import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.io.nativeio.NativeIO; +import org.apache.hadoop.util.DiskChecker; import org.apache.lucene.util.Constants; import org.apache.lucene.util.LuceneTestCase; import org.apache.solr.SolrTestCaseJ4; @@ -54,6 +63,8 @@ import static org.apache.lucene.util.LuceneTestCase.random; public class HdfsTestUtil { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final String SOLR_HACK_FOR_CLASS_VERIFICATION_FIELD = "SOLR_HACK_FOR_CLASS_VERIFICATION"; + private static final String LOGICAL_HOSTNAME = "ha-nn-uri-%d"; private static final boolean HA_TESTING_ENABLED = false; // SOLR-XXX @@ -76,6 +87,7 @@ public class HdfsTestUtil { public static void checkAssumptions() { ensureHadoopHomeNotSet(); checkHadoopWindows(); + checkOverriddenHadoopClasses(); checkFastDateFormat(); checkGeneratedIdMatches(); } @@ -103,6 +115,23 @@ public class HdfsTestUtil { !Constants.WINDOWS || NativeIO.isAvailable()); } + /** + * Ensure that the tests are picking up the modified Hadoop classes + */ + private static void checkOverriddenHadoopClasses() { + List> modifiedHadoopClasses = Arrays.asList(BlockPoolSlice.class, DiskChecker.class, + FileUtil.class, HardLink.class, HttpServer2.class, NameNodeResourceChecker.class, RawLocalFileSystem.class); + for (Class clazz : modifiedHadoopClasses) { + try { + LuceneTestCase.assertNotNull("Field on " + clazz.getCanonicalName() + " should not have been null", + clazz.getField(SOLR_HACK_FOR_CLASS_VERIFICATION_FIELD)); + } catch (NoSuchFieldException e) { + LuceneTestCase.fail("Expected to load Solr modified Hadoop class " + clazz.getCanonicalName() + + " , but it was not found."); + } + } + } + /** * Checks that commons-lang3 FastDateFormat works with configured locale */ From a6e7c770c27f6467d8aa717d4b07c8682d09ebc8 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 13 Dec 2019 03:50:15 -0500 Subject: [PATCH 4/5] SOLR-14064: remove some hadoop brain damage from build environment Some permissions and build hacks were made on behalf of hadoop: hacks on top of hacks. Now that the major problems such as classpath pollution and hadoop test code are fixed, so we can remove hacks built on top of them. --- lucene/common-build.xml | 1 - lucene/tools/junit4/solr-tests.policy | 7 ------- 2 files changed, 8 deletions(-) diff --git a/lucene/common-build.xml b/lucene/common-build.xml index e077c6b3c8c..4d4e555a7e7 100644 --- a/lucene/common-build.xml +++ b/lucene/common-build.xml @@ -1094,7 +1094,6 @@ - diff --git a/lucene/tools/junit4/solr-tests.policy b/lucene/tools/junit4/solr-tests.policy index 816c7c382e5..faa9ecb2317 100644 --- a/lucene/tools/junit4/solr-tests.policy +++ b/lucene/tools/junit4/solr-tests.policy @@ -34,15 +34,8 @@ grant { permission java.io.FilePermission "${clover.db.dir}${/}-", "read,write,delete"; permission java.io.FilePermission "${tests.linedocsfile}", "read"; // hadoop - permission java.io.FilePermission "${ant.library.dir}${/}-", "read"; - permission java.io.FilePermission "${user.home}${/}.ant${/}lib${/}-", "read"; permission java.io.FilePermission "${user.home}${/}hadoop-metrics2.properties", "read"; permission java.io.FilePermission "${user.home}${/}hadoop-metrics2-namenode.properties", "read"; - // kerberos - permission java.io.FilePermission "${user.home}${/}.java.login.config", "read"; - // SolrTestCaseJ4 explicitly uses these - permission java.io.FilePermission "/dev/./urandom", "read"; - permission java.io.FilePermission "/dev/random", "read"; // DirectoryFactoryTest messes with these (wtf?) permission java.io.FilePermission "/tmp/inst1/conf/solrcore.properties", "read"; permission java.io.FilePermission "/path/to/myinst/conf/solrcore.properties", "read"; From f083f40b28f4474dddab8cc19c65aef8db015cb1 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Fri, 13 Dec 2019 04:06:07 -0500 Subject: [PATCH 5/5] SOLR-14076: clean up static fields leak in nightly-only test --- .../cloud/autoscaling/SearchRateTriggerIntegrationTest.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/SearchRateTriggerIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/SearchRateTriggerIntegrationTest.java index 0c90548ab4f..a5b3cb63dd6 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/SearchRateTriggerIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/SearchRateTriggerIntegrationTest.java @@ -51,6 +51,7 @@ import org.apache.solr.common.util.Utils; import org.apache.solr.core.SolrResourceLoader; import org.apache.solr.util.LogLevel; import org.apache.zookeeper.data.Stat; +import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -93,6 +94,11 @@ public class SearchRateTriggerIntegrationTest extends SolrCloudTestCase { } + @AfterClass + public static void cleanUpAfterClass() throws Exception { + cloudManager = null; + } + @Before public void beforeTest() throws Exception { cluster.deleteAllCollections();