SOLR-14417: Gradle build sometimes fails RE BlockPoolSlice (#1854)

* SOLR-14417: workaround the compiler scope problem.

* Make the list modifiable.
This commit is contained in:
Dawid Weiss 2020-09-10 22:25:33 +02:00 committed by GitHub
parent 37e4dbef6d
commit ed930f4b29
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 16 additions and 3 deletions

View File

@ -19,6 +19,7 @@ package org.apache.solr.cloud.hdfs;
import java.io.File; import java.io.File;
import java.lang.invoke.MethodHandles; import java.lang.invoke.MethodHandles;
import java.net.URI; import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Enumeration; import java.util.Enumeration;
import java.util.HashMap; import java.util.HashMap;
@ -41,7 +42,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RawLocalFileSystem; import org.apache.hadoop.fs.RawLocalFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSNNTopology; import org.apache.hadoop.hdfs.MiniDFSNNTopology;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.BlockPoolSlice;
import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter; import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
import org.apache.hadoop.hdfs.server.namenode.NameNodeResourceChecker; import org.apache.hadoop.hdfs.server.namenode.NameNodeResourceChecker;
import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil; import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil;
@ -122,8 +122,21 @@ public class HdfsTestUtil {
* Ensure that the tests are picking up the modified Hadoop classes * Ensure that the tests are picking up the modified Hadoop classes
*/ */
private static void checkOverriddenHadoopClasses() { private static void checkOverriddenHadoopClasses() {
List<Class<?>> modifiedHadoopClasses = Arrays.asList(BlockPoolSlice.class, DiskChecker.class, List<Class<?>> modifiedHadoopClasses = new ArrayList<>(Arrays.asList(
FileUtil.class, HardLink.class, HttpServer2.class, NameNodeResourceChecker.class, RawLocalFileSystem.class); DiskChecker.class,
FileUtil.class,
HardLink.class,
HttpServer2.class,
NameNodeResourceChecker.class,
RawLocalFileSystem.class));
// Dodge weird scope errors from the compiler (SOLR-14417)
try {
modifiedHadoopClasses.add(
Class.forName("org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.BlockPoolSlice"));
} catch (Exception e) {
throw new RuntimeException(e);
}
for (Class<?> clazz : modifiedHadoopClasses) { for (Class<?> clazz : modifiedHadoopClasses) {
try { try {
LuceneTestCase.assertNotNull("Field on " + clazz.getCanonicalName() + " should not have been null", LuceneTestCase.assertNotNull("Field on " + clazz.getCanonicalName() + " should not have been null",