mirror of https://github.com/apache/lucene.git
SOLR-14048: Improve Hadoop test sanity checks
Signed-off-by: Kevin Risden <krisden@apache.org>
This commit is contained in:
parent
e155649026
commit
3ba005465a
|
@ -74,6 +74,7 @@ import org.slf4j.LoggerFactory;
|
|||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
public class FileUtil {
|
||||
public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(FileUtil.class);
|
||||
|
||||
|
|
|
@ -38,6 +38,7 @@ import static java.nio.file.Files.createLink;
|
|||
* efficient - and minimizes the impact of the extra buffer creations.
|
||||
*/
|
||||
public class HardLink {
|
||||
public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
|
||||
|
||||
public final LinkStats linkStats; //not static
|
||||
|
||||
|
|
|
@ -62,6 +62,8 @@ import org.apache.hadoop.util.StringUtils;
|
|||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Stable
|
||||
public class RawLocalFileSystem extends FileSystem {
|
||||
public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
|
||||
|
||||
static final URI NAME = URI.create("file:///");
|
||||
private Path workingDir;
|
||||
// Temporary workaround for HADOOP-9652.
|
||||
|
|
|
@ -78,7 +78,9 @@ import com.google.common.annotations.VisibleForTesting;
|
|||
*
|
||||
* This class is synchronized by {@link FsVolumeImpl}.
|
||||
*/
|
||||
class BlockPoolSlice {
|
||||
public class BlockPoolSlice {
|
||||
public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
|
||||
|
||||
static final Logger LOG = LoggerFactory.getLogger(BlockPoolSlice.class);
|
||||
|
||||
private final String bpid;
|
||||
|
|
|
@ -32,6 +32,8 @@ import org.apache.hadoop.conf.Configuration;
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class NameNodeResourceChecker {
|
||||
public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
|
||||
|
||||
/**
|
||||
* Create a NameNodeResourceChecker, which will check the edits dirs and any
|
||||
* additional dirs to check set in <code>conf</code>.
|
||||
|
|
|
@ -116,6 +116,8 @@ import org.slf4j.LoggerFactory;
|
|||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
public final class HttpServer2 implements FilterContainer {
|
||||
public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
|
||||
|
||||
public static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class);
|
||||
|
||||
public static final String HTTP_SCHEME = "http";
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* The classes under this package were copied from Apache Hadoop and modified
|
||||
* to avoid certain issues during tests. The copied classes override the
|
||||
* Apache Hadoop dependency versions during tests.
|
||||
*
|
||||
* HttpServer2 class was copied to avoid Jetty 9.4 dependency issues. Since
|
||||
* Solr uses Jetty 9.4, Hadoop integration tests needs to use Jetty 9.4 as
|
||||
* well. The HttpServer2 class should be removed when Hadoop is upgraded to
|
||||
* 3.3.0 due to HADOOP-16152 upgrading Hadoop to Jetty 9.4.
|
||||
*
|
||||
* The classes BlockPoolSlice (HDFS-14251), DiskChecker, FileUtil, HardLink,
|
||||
* NameNodeResourceChecker, and RawLocalFileSystem were copied to avoid
|
||||
* issues with running Hadoop integration tests under the Java security
|
||||
* manager. Many of these classes use org.apache.hadoop.util.Shell
|
||||
* which shells out to try to do common filesystem checks.
|
||||
*
|
||||
* Overtime these classes should be removed as upstream fixes to Apache
|
||||
* Hadoop are made. When the Apache Hadoop dependency is upgraded in
|
||||
* Solr, the classes should be compared against that version.
|
||||
*/
|
||||
package org.apache.hadoop;
|
||||
|
|
@ -43,6 +43,8 @@ import org.slf4j.LoggerFactory;
|
|||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Unstable
|
||||
public class DiskChecker {
|
||||
public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
|
||||
|
||||
public static final Logger LOG = LoggerFactory.getLogger(DiskChecker.class);
|
||||
|
||||
public static class DiskErrorException extends IOException {
|
||||
|
|
|
@ -19,8 +19,10 @@ package org.apache.solr.cloud.hdfs;
|
|||
import java.io.File;
|
||||
import java.lang.invoke.MethodHandles;
|
||||
import java.net.URI;
|
||||
import java.util.Arrays;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Timer;
|
||||
|
@ -33,12 +35,19 @@ import org.apache.commons.lang3.time.FastDateFormat;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.fs.HardLink;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.RawLocalFileSystem;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.hdfs.MiniDFSNNTopology;
|
||||
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.BlockPoolSlice;
|
||||
import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
|
||||
import org.apache.hadoop.hdfs.server.namenode.NameNodeResourceChecker;
|
||||
import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil;
|
||||
import org.apache.hadoop.http.HttpServer2;
|
||||
import org.apache.hadoop.io.nativeio.NativeIO;
|
||||
import org.apache.hadoop.util.DiskChecker;
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
|
@ -54,6 +63,8 @@ import static org.apache.lucene.util.LuceneTestCase.random;
|
|||
public class HdfsTestUtil {
|
||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||
|
||||
private static final String SOLR_HACK_FOR_CLASS_VERIFICATION_FIELD = "SOLR_HACK_FOR_CLASS_VERIFICATION";
|
||||
|
||||
private static final String LOGICAL_HOSTNAME = "ha-nn-uri-%d";
|
||||
|
||||
private static final boolean HA_TESTING_ENABLED = false; // SOLR-XXX
|
||||
|
@ -76,6 +87,7 @@ public class HdfsTestUtil {
|
|||
public static void checkAssumptions() {
|
||||
ensureHadoopHomeNotSet();
|
||||
checkHadoopWindows();
|
||||
checkOverriddenHadoopClasses();
|
||||
checkFastDateFormat();
|
||||
checkGeneratedIdMatches();
|
||||
}
|
||||
|
@ -103,6 +115,23 @@ public class HdfsTestUtil {
|
|||
!Constants.WINDOWS || NativeIO.isAvailable());
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure that the tests are picking up the modified Hadoop classes
|
||||
*/
|
||||
private static void checkOverriddenHadoopClasses() {
|
||||
List<Class<?>> modifiedHadoopClasses = Arrays.asList(BlockPoolSlice.class, DiskChecker.class,
|
||||
FileUtil.class, HardLink.class, HttpServer2.class, NameNodeResourceChecker.class, RawLocalFileSystem.class);
|
||||
for (Class<?> clazz : modifiedHadoopClasses) {
|
||||
try {
|
||||
LuceneTestCase.assertNotNull("Field on " + clazz.getCanonicalName() + " should not have been null",
|
||||
clazz.getField(SOLR_HACK_FOR_CLASS_VERIFICATION_FIELD));
|
||||
} catch (NoSuchFieldException e) {
|
||||
LuceneTestCase.fail("Expected to load Solr modified Hadoop class " + clazz.getCanonicalName() +
|
||||
" , but it was not found.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that commons-lang3 FastDateFormat works with configured locale
|
||||
*/
|
||||
|
|
Loading…
Reference in New Issue