HBASE-13716 use HdfsConstants instead of deprecated FSConstants.

Conflicts:
	hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
This commit is contained in:
Sean Busbey 2015-05-20 09:09:00 -05:00 committed by Sean Busbey
parent d28091eead
commit d5f57027b8
3 changed files with 8 additions and 7 deletions

View File

@ -1570,7 +1570,8 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
LOG.fatal("Failed to become active master", t); LOG.fatal("Failed to become active master", t);
// HBASE-5680: Likely hadoop23 vs hadoop 20.x/1.x incompatibility // HBASE-5680: Likely hadoop23 vs hadoop 20.x/1.x incompatibility
if (t instanceof NoClassDefFoundError && if (t instanceof NoClassDefFoundError &&
t.getMessage().contains("org/apache/hadoop/hdfs/protocol/FSConstants$SafeModeAction")) { t.getMessage()
.contains("org/apache/hadoop/hdfs/protocol/HdfsConstants$SafeModeAction")) {
// improved error message for this special case // improved error message for this special case
abort("HBase is having a problem with its Hadoop jars. You may need to " abort("HBase is having a problem with its Hadoop jars. You may need to "
+ "recompile HBase against Hadoop version " + "recompile HBase against Hadoop version "

View File

@ -505,7 +505,7 @@ public abstract class FSUtils {
/** /**
* We use reflection because {@link DistributedFileSystem#setSafeMode( * We use reflection because {@link DistributedFileSystem#setSafeMode(
* FSConstants.SafeModeAction action, boolean isChecked)} is not in hadoop 1.1 * HdfsConstants.SafeModeAction action, boolean isChecked)} is not in hadoop 1.1
* *
* @param dfs * @param dfs
* @return whether we're in safe mode * @return whether we're in safe mode
@ -515,15 +515,15 @@ public abstract class FSUtils {
boolean inSafeMode = false; boolean inSafeMode = false;
try { try {
Method m = DistributedFileSystem.class.getMethod("setSafeMode", new Class<?> []{ Method m = DistributedFileSystem.class.getMethod("setSafeMode", new Class<?> []{
org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction.class, boolean.class}); org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.class, boolean.class});
inSafeMode = (Boolean) m.invoke(dfs, inSafeMode = (Boolean) m.invoke(dfs,
org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction.SAFEMODE_GET, true); org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET, true);
} catch (Exception e) { } catch (Exception e) {
if (e instanceof IOException) throw (IOException) e; if (e instanceof IOException) throw (IOException) e;
// Check whether dfs is on safemode. // Check whether dfs is on safemode.
inSafeMode = dfs.setSafeMode( inSafeMode = dfs.setSafeMode(
org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction.SAFEMODE_GET); org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction.SAFEMODE_GET);
} }
return inSafeMode; return inSafeMode;
} }

View File

@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.FSConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
@ -383,7 +383,7 @@ public class TestWALFactory {
// Stop the cluster. (ensure restart since we're sharing MiniDFSCluster) // Stop the cluster. (ensure restart since we're sharing MiniDFSCluster)
try { try {
DistributedFileSystem dfs = (DistributedFileSystem) cluster.getFileSystem(); DistributedFileSystem dfs = (DistributedFileSystem) cluster.getFileSystem();
dfs.setSafeMode(FSConstants.SafeModeAction.SAFEMODE_ENTER); dfs.setSafeMode(HdfsConstants.SafeModeAction.SAFEMODE_ENTER);
TEST_UTIL.shutdownMiniDFSCluster(); TEST_UTIL.shutdownMiniDFSCluster();
try { try {
// wal.writer.close() will throw an exception, // wal.writer.close() will throw an exception,