From ed930f4b29f473dd3e7e72d099d086a9c81f71d7 Mon Sep 17 00:00:00 2001 From: Dawid Weiss Date: Thu, 10 Sep 2020 22:25:33 +0200 Subject: [PATCH] SOLR-14417: Gradle build sometimes fails RE BlockPoolSlice (#1854) * SOLR-14417: workaround the compiler scope problem. * Make the list modifiable. --- .../apache/solr/cloud/hdfs/HdfsTestUtil.java | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java index 873fe17d747..3872167d5ca 100644 --- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java +++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java @@ -19,6 +19,7 @@ package org.apache.solr.cloud.hdfs; import java.io.File; import java.lang.invoke.MethodHandles; import java.net.URI; +import java.util.ArrayList; import java.util.Arrays; import java.util.Enumeration; import java.util.HashMap; @@ -41,7 +42,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RawLocalFileSystem; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSNNTopology; -import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.BlockPoolSlice; import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter; import org.apache.hadoop.hdfs.server.namenode.NameNodeResourceChecker; import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil; @@ -122,8 +122,21 @@ public class HdfsTestUtil { * Ensure that the tests are picking up the modified Hadoop classes */ private static void checkOverriddenHadoopClasses() { - List> modifiedHadoopClasses = Arrays.asList(BlockPoolSlice.class, DiskChecker.class, - FileUtil.class, HardLink.class, HttpServer2.class, NameNodeResourceChecker.class, RawLocalFileSystem.class); + List> modifiedHadoopClasses = new ArrayList<>(Arrays.asList( + DiskChecker.class, + FileUtil.class, + HardLink.class, + HttpServer2.class, + NameNodeResourceChecker.class, + RawLocalFileSystem.class)); + // Dodge weird scope errors from the compiler (SOLR-14417) + try { + modifiedHadoopClasses.add( + Class.forName("org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.BlockPoolSlice")); + } catch (Exception e) { + throw new RuntimeException(e); + } + for (Class clazz : modifiedHadoopClasses) { try { LuceneTestCase.assertNotNull("Field on " + clazz.getCanonicalName() + " should not have been null",