From 3108c9a01449e967b419d4b1b71436bd654b3dbb Mon Sep 17 00:00:00 2001 From: Zhihong Yu Date: Mon, 21 Apr 2014 20:46:52 +0000 Subject: [PATCH] HBASE-11032 Replace deprecated methods in FileSystem with their replacements (Gustavo) git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1588979 13f79535-47bb-0310-9956-ffa450edef68 --- .../hbase/util/CoprocessorClassLoader.java | 2 +- .../hadoop/hbase/util/DynamicClassLoader.java | 2 +- .../LongTermArchivingHFileCleaner.java | 2 +- .../apache/hadoop/hbase/io/hfile/HFile.java | 2 +- .../hbase/mapreduce/HLogInputFormat.java | 2 +- .../mapreduce/LoadIncrementalHFiles.java | 4 ++-- .../hbase/master/cleaner/CleanerChore.java | 2 +- .../hbase/regionserver/CompactionTool.java | 2 +- .../hadoop/hbase/regionserver/StoreFile.java | 2 +- .../hbase/regionserver/StoreFileInfo.java | 2 +- .../hadoop/hbase/regionserver/wal/FSHLog.java | 2 +- .../access/SecureBulkLoadEndpoint.java | 2 +- .../hadoop/hbase/snapshot/ExportSnapshot.java | 2 +- .../hadoop/hbase/util/FSRegionScanner.java | 2 +- .../org/apache/hadoop/hbase/util/FSUtils.java | 16 +++++++------- .../apache/hadoop/hbase/util/HBaseFsck.java | 2 +- .../hadoop/hbase/HBaseTestingUtility.java | 2 +- .../hbase/backup/TestHFileArchiving.java | 2 +- .../TestZooKeeperTableArchiveClient.java | 4 ++-- .../apache/hadoop/hbase/io/TestFileLink.java | 10 ++++----- .../handler/TestTableDeleteFamilyHandler.java | 8 +++---- .../hbase/migration/TestUpgradeTo96.java | 2 +- .../regionserver/HFileReadWriteTest.java | 4 ++-- .../hbase/regionserver/TestHRegion.java | 2 +- .../regionserver/wal/TestLogRolling.java | 22 ++++++++++++------- .../hbase/rest/PerformanceEvaluation.java | 2 +- .../security/access/TestAccessController.java | 2 +- .../hbase/snapshot/SnapshotTestingUtils.java | 2 +- .../hbase/snapshot/TestExportSnapshot.java | 4 ++-- .../hbase/util/HFileArchiveTestingUtil.java | 2 +- .../hadoop/hbase/util/TestFSVisitor.java | 2 +- .../hadoop/hbase/util/TestHBaseFsck.java | 2 +- 32 files changed, 63 insertions(+), 57 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java index 745896a6ed5..0e47b7830b0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java @@ -155,7 +155,7 @@ public class CoprocessorClassLoader extends ClassLoaderBase { FileSystem fs = FileSystem.getLocal(conf); fs.delete(parentDir, true); // it's ok if the dir doesn't exist now parentDirLockSet.add(parentDirStr); - if (!fs.mkdirs(parentDir) && !fs.getFileStatus(parentDir).isDir()) { + if (!fs.mkdirs(parentDir) && !fs.getFileStatus(parentDir).isDirectory()) { throw new RuntimeException("Failed to create local dir " + parentDirStr + ", CoprocessorClassLoader failed to init"); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java index b8a221fea9d..3095ee6b263 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java @@ -185,7 +185,7 @@ public class DynamicClassLoader extends ClassLoaderBase { } for (FileStatus status: statuses) { - if (status.isDir()) continue; // No recursive lookup + if (status.isDirectory()) continue; // No recursive lookup Path path = status.getPath(); String fileName = path.getName(); if (!fileName.endsWith(".jar")) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java index 0b9a6ec8369..ec5a48f4ae5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java @@ -51,7 +51,7 @@ public class LongTermArchivingHFileCleaner extends BaseHFileCleanerDelegate { public boolean isFileDeletable(FileStatus fStat) { try { // if its a directory, then it can be deleted - if (fStat.isDir()) return true; + if (fStat.isDirectory()) return true; Path file = fStat.getPath(); // check to see if diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java index 7a906f94f24..2b2f9d08e7a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java @@ -879,7 +879,7 @@ public class HFile { for(FileStatus dir : familyDirs) { FileStatus[] files = fs.listStatus(dir.getPath()); for (FileStatus file : files) { - if (!file.isDir()) { + if (!file.isDirectory()) { res.add(file.getPath()); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HLogInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HLogInputFormat.java index 7fa852e89e4..67019ddfb74 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HLogInputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HLogInputFormat.java @@ -237,7 +237,7 @@ public class HLogInputFormat extends InputFormat { FileStatus[] files = fs.listStatus(dir); if (files == null) return Collections.emptyList(); for (FileStatus file : files) { - if (file.isDir()) { + if (file.isDirectory()) { // recurse into sub directories result.addAll(getFiles(fs, file.getPath(), startTime, endTime)); } else { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java index 4fb84208a01..aabac3fc681 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java @@ -181,7 +181,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool { } for (FileStatus stat : familyDirStatuses) { - if (!stat.isDir()) { + if (!stat.isDirectory()) { LOG.warn("Skipping non-directory " + stat.getPath()); continue; } @@ -823,7 +823,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool { TreeMap map = new TreeMap(Bytes.BYTES_COMPARATOR); for (FileStatus stat : familyDirStatuses) { - if (!stat.isDir()) { + if (!stat.isDirectory()) { LOG.warn("Skipping non-directory " + stat.getPath()); continue; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java index 5282dc2691d..6e2f4fd2076 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java @@ -144,7 +144,7 @@ public abstract class CleanerChore extends Chore List files = Lists.newArrayListWithCapacity(entries.length); for (FileStatus child : entries) { Path path = child.getPath(); - if (child.isDir()) { + if (child.isDirectory()) { // for each subdirectory delete it and all entries if possible if (!checkAndDeleteDirectory(path)) { allEntriesDeleted = false; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java index 93512d6e916..5a4c904b1d1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java @@ -408,7 +408,7 @@ public class CompactionTool extends Configured implements Tool { } else if (!opt.startsWith("-")) { Path path = new Path(opt); FileStatus status = fs.getFileStatus(path); - if (!status.isDir()) { + if (!status.isDirectory()) { printUsage("Specified path is not a directory. path=" + path); return 1; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java index ebbc83667b5..c278792e1ea 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java @@ -632,7 +632,7 @@ public class StoreFile { */ public static Path getUniqueFile(final FileSystem fs, final Path dir) throws IOException { - if (!fs.getFileStatus(dir).isDir()) { + if (!fs.getFileStatus(dir).isDirectory()) { throw new IOException("Expecting " + dir.toString() + " to be a directory"); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java index 2d8a8ea6a58..49edadbf63b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java @@ -344,7 +344,7 @@ public class StoreFileInfo { throws IOException { final Path p = fileStatus.getPath(); - if (fileStatus.isDir()) + if (fileStatus.isDirectory()) return false; // Check for empty hfile. Should never be the case but can happen diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java index a65841c36f7..b034bbdfa93 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java @@ -1613,7 +1613,7 @@ class FSHLog implements HLog, Syncable { if (!fs.exists(p)) { throw new FileNotFoundException(p.toString()); } - if (!fs.getFileStatus(p).isDir()) { + if (!fs.getFileStatus(p).isDirectory()) { throw new IOException(p + " is not a directory"); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java index 7f845cc8ac9..b85aa212329 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java @@ -382,7 +382,7 @@ public class SecureBulkLoadEndpoint extends SecureBulkLoadService */ private boolean isFile(Path p) throws IOException { FileStatus status = srcFs.getFileStatus(p); - boolean isFile = !status.isDir(); + boolean isFile = !status.isDirectory(); try { isFile = isFile && !(Boolean)Methods.call(FileStatus.class, status, "isSymlink", null, null); } catch (Exception e) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java index 6c7f488375c..3568b112da0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java @@ -145,7 +145,7 @@ public final class ExportSnapshot extends Configured implements Tool { } // Use the default block size of the outputFs if bigger - int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(), BUFFER_SIZE); + int defaultBlockSize = Math.max((int) outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE); bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize); LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize)); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java index 7506beca94d..497a12ee552 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSRegionScanner.java @@ -86,7 +86,7 @@ class FSRegionScanner implements Runnable { // for each cf, get all the blocks information for (FileStatus cfStatus : cfList) { - if (!cfStatus.isDir()) { + if (!cfStatus.isDirectory()) { // skip because this is not a CF directory continue; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java index ae22c12210e..aec5b7de45a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java @@ -204,7 +204,7 @@ public abstract class FSUtils { m = null; // could happen on setAccessible() } if (m == null) { - return fs.getDefaultBlockSize(); + return fs.getDefaultBlockSize(path); } else { try { Object ret = m.invoke(fs, path); @@ -238,7 +238,7 @@ public abstract class FSUtils { m = null; // could happen on setAccessible() } if (m == null) { - return fs.getDefaultReplication(); + return fs.getDefaultReplication(path); } else { try { Object ret = m.invoke(fs, path); @@ -1220,7 +1220,7 @@ public abstract class FSUtils { if (blacklist.contains(p.getName().toString())) { isValid = false; } else { - isValid = fs.getFileStatus(p).isDir(); + isValid = fs.getFileStatus(p).isDirectory(); } } catch (IOException e) { LOG.warn("An error occurred while verifying if [" + p.toString() @@ -1364,7 +1364,7 @@ public abstract class FSUtils { } try { - return fs.getFileStatus(rd).isDir(); + return fs.getFileStatus(rd).isDirectory(); } catch (IOException ioe) { // Maybe the file was moved or the fs was disconnected. LOG.warn("Skipping file " + rd +" due to IOException", ioe); @@ -1414,7 +1414,7 @@ public abstract class FSUtils { } try { - return fs.getFileStatus(rd).isDir(); + return fs.getFileStatus(rd).isDirectory(); } catch (IOException ioe) { // Maybe the file was moved or the fs was disconnected. LOG.warn("Skipping file " + rd +" due to IOException", ioe); @@ -1463,7 +1463,7 @@ public abstract class FSUtils { try { // only files - return !fs.getFileStatus(rd).isDir(); + return !fs.getFileStatus(rd).isDirectory(); } catch (IOException ioe) { // Maybe the file was moved or the fs was disconnected. LOG.warn("Skipping file " + rd +" due to IOException", ioe); @@ -1686,7 +1686,7 @@ public abstract class FSUtils { if (files == null) return; for (FileStatus file : files) { - if (file.isDir()) { + if (file.isDirectory()) { LOG.debug(prefix + file.getPath().getName() + "/"); logFSTree(LOG, fs, file.getPath(), prefix + "---"); } else { @@ -1836,7 +1836,7 @@ public abstract class FSUtils { continue; } - if (!regionStatus.isDir()) { + if (!regionStatus.isDirectory()) { continue; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index c73d8a3ba82..1fcbc5c873a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -4109,7 +4109,7 @@ public class HBaseFsck extends Configured { return; } - if (fs.getFileStatus(p).isDir()) { + if (fs.getFileStatus(p).isDirectory()) { FileStatus[] fss= fs.listStatus(p); for (FileStatus status : fss) { debugLsr(conf, status.getPath(), errors); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index cc50cd2d51c..12c86bfd6f1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -1972,7 +1972,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { Path tableDir = new Path(getDefaultRootDirPath().toString() + System.getProperty("file.separator") + htd.getTableName() + System.getProperty("file.separator") + regionToDeleteInFS); - FileSystem.get(c).delete(tableDir); + FileSystem.get(c).delete(tableDir, true); // flush cache of regions HConnection conn = table.getConnection(); conn.clearRegionCache(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java index b5cfa9af2b6..1694d095072 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java @@ -422,7 +422,7 @@ public class TestHFileArchiving { if (files == null || files.length == 0) return fileNames; for (FileStatus file : files) { - if (file.isDir()) { + if (file.isDirectory()) { recurseOnFiles(fs, FSUtils.listStatus(fs, file.getPath(), null), fileNames); } else fileNames.add(file.getPath().getName()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java index 8745e638f87..cbe47b60159 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/example/TestZooKeeperTableArchiveClient.java @@ -362,7 +362,7 @@ public class TestZooKeeperTableArchiveClient { List allFiles = new ArrayList(); for (FileStatus file : files) { - if (file.isDir()) { + if (file.isDirectory()) { List subFiles = getAllFiles(fs, file.getPath()); if (subFiles != null) allFiles.addAll(subFiles); continue; @@ -414,4 +414,4 @@ public class TestZooKeeperTableArchiveClient { // stop the cleaner stop.stop(""); } -} \ No newline at end of file +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java index b40a849160f..570727a5e7a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java @@ -128,8 +128,8 @@ public class TestFileLink { assertEquals(256 << 20, size); } finally { in.close(); - if (fs.exists(originalPath)) fs.delete(originalPath); - if (fs.exists(archivedPath)) fs.delete(archivedPath); + if (fs.exists(originalPath)) fs.delete(originalPath, true); + if (fs.exists(archivedPath)) fs.delete(archivedPath, true); } } @@ -173,19 +173,19 @@ public class TestFileLink { // Switch to file 1 n = in.read(data); dataVerify(data, n, (byte)0); - fs.delete(files.get(0)); + fs.delete(files.get(0), true); skipBuffer(in, (byte)0); // Switch to file 2 n = in.read(data); dataVerify(data, n, (byte)1); - fs.delete(files.get(1)); + fs.delete(files.get(1), true); skipBuffer(in, (byte)1); // Switch to file 3 n = in.read(data); dataVerify(data, n, (byte)2); - fs.delete(files.get(2)); + fs.delete(files.get(2), true); skipBuffer(in, (byte)2); // No more files available diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDeleteFamilyHandler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDeleteFamilyHandler.java index 9d828b3cb1d..8f90fa60d89 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDeleteFamilyHandler.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDeleteFamilyHandler.java @@ -120,11 +120,11 @@ public class TestTableDeleteFamilyHandler { // 4 - Check if all the 3 column families exist in FS FileStatus[] fileStatus = fs.listStatus(tableDir); for (int i = 0; i < fileStatus.length; i++) { - if (fileStatus[i].isDir() == true) { + if (fileStatus[i].isDirectory() == true) { FileStatus[] cf = fs.listStatus(fileStatus[i].getPath()); int k = 1; for (int j = 0; j < cf.length; j++) { - if (cf[j].isDir() == true + if (cf[j].isDirectory() == true && cf[j].getPath().getName().startsWith(".") == false) { assertEquals(cf[j].getPath().getName(), "cf" + k); k++; @@ -147,10 +147,10 @@ public class TestTableDeleteFamilyHandler { // 6 - Check if the second column family is gone from the FS fileStatus = fs.listStatus(tableDir); for (int i = 0; i < fileStatus.length; i++) { - if (fileStatus[i].isDir() == true) { + if (fileStatus[i].isDirectory() == true) { FileStatus[] cf = fs.listStatus(fileStatus[i].getPath()); for (int j = 0; j < cf.length; j++) { - if (cf[j].isDir() == true) { + if (cf[j].isDirectory() == true) { assertFalse(cf[j].getPath().getName().equals("cf2")); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/migration/TestUpgradeTo96.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/migration/TestUpgradeTo96.java index e0277068538..ab293f21e31 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/migration/TestUpgradeTo96.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/migration/TestUpgradeTo96.java @@ -174,7 +174,7 @@ public class TestUpgradeTo96 { } FileStatus[] cfs = fs.listStatus(targetRegion); for (FileStatus f : cfs) { - if (f.isDir()) { + if (f.isDirectory()) { columnFamilyDir = f.getPath(); break; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java index dd16e5e1348..fe1fa551b9d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java @@ -295,14 +295,14 @@ public class HFileReadWriteTest { return false; } - if (fs.getFileStatus(path).isDir()) { + if (fs.getFileStatus(path).isDirectory()) { LOG.error(inputFileName + " is a directory"); return false; } } if (outputDir != null && - (!fs.exists(outputDir) || !fs.getFileStatus(outputDir).isDir())) { + (!fs.exists(outputDir) || !fs.getFileStatus(outputDir).isDirectory())) { LOG.error(outputDir.toString() + " does not exist or is not a " + "directory"); return false; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 1edcba40601..195834f347c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -3858,7 +3858,7 @@ public class TestHRegion { fs.exists(regionInfoFile)); // Remove the .regioninfo file and verify is recreated on region open - fs.delete(regionInfoFile); + fs.delete(regionInfoFile, true); assertFalse(HRegionFileSystem.REGION_INFO_FILE + " should be removed from the region dir", fs.exists(regionInfoFile)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java index d218c5020a9..487ac63680e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java @@ -323,8 +323,9 @@ public class TestLogRolling { public void testLogRollOnDatanodeDeath() throws Exception { TEST_UTIL.ensureSomeRegionServersAvailable(2); assertTrue("This test requires HLog file replication set to 2.", - fs.getDefaultReplication() == 2); - LOG.info("Replication=" + fs.getDefaultReplication()); + fs.getDefaultReplication(TEST_UTIL.getDataTestDirOnTestFS()) == 2); + LOG.info("Replication=" + + fs.getDefaultReplication(TEST_UTIL.getDataTestDirOnTestFS())); this.server = cluster.getRegionServer(0); this.log = server.getWAL(); @@ -362,8 +363,10 @@ public class TestLogRolling { } assertTrue("DataNodes " + dfsCluster.getDataNodes().size() + - " default replication " + fs.getDefaultReplication(), - dfsCluster.getDataNodes().size() >= fs.getDefaultReplication() + 1); + " default replication " + + fs.getDefaultReplication(TEST_UTIL.getDataTestDirOnTestFS()), + dfsCluster.getDataNodes().size() >= + fs.getDefaultReplication(TEST_UTIL.getDataTestDirOnTestFS()) + 1); writeData(table, 2); @@ -375,7 +378,8 @@ public class TestLogRolling { assertTrue("The log shouldn't have rolled yet", oldFilenum == ((FSHLog) log).getFilenum()); final DatanodeInfo[] pipeline = getPipeline(log); - assertTrue(pipeline.length == fs.getDefaultReplication()); + assertTrue(pipeline.length == + fs.getDefaultReplication(TEST_UTIL.getDataTestDirOnTestFS())); // kill a datanode in the pipeline to force a log roll on the next sync() // This function is synchronous, when it returns the node is killed. @@ -410,7 +414,8 @@ public class TestLogRolling { batchWriteAndWait(table, 13, true, 10000); assertTrue("New log file should have the default replication instead of " + ((FSHLog) log).getLogReplication(), - ((FSHLog) log).getLogReplication() == fs.getDefaultReplication()); + ((FSHLog) log).getLogReplication() == + fs.getDefaultReplication(TEST_UTIL.getDataTestDirOnTestFS())); assertTrue("LowReplication Roller should've been enabled", log.isLowReplicationRollEnabled()); } @@ -424,8 +429,9 @@ public class TestLogRolling { public void testLogRollOnPipelineRestart() throws Exception { LOG.info("Starting testLogRollOnPipelineRestart"); assertTrue("This test requires HLog file replication.", - fs.getDefaultReplication() > 1); - LOG.info("Replication=" + fs.getDefaultReplication()); + fs.getDefaultReplication(TEST_UTIL.getDataTestDirOnTestFS()) > 1); + LOG.info("Replication=" + + fs.getDefaultReplication(TEST_UTIL.getDataTestDirOnTestFS())); // When the hbase:meta table can be opened, the region servers are running new HTable(TEST_UTIL.getConfiguration(), TableName.META_TABLE_NAME); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java index 61f3dbceea3..f1bae8c5fca 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java @@ -338,7 +338,7 @@ public class PerformanceEvaluation extends Configured implements Tool { List splitList = new ArrayList(); for (FileStatus file: listStatus(job)) { - if (file.isDir()) { + if (file.isDirectory()) { continue; } Path path = file.getPath(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java index dc7a5477cd1..03142c55a6f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java @@ -900,7 +900,7 @@ public class TestAccessController extends SecureTestUtil { } public void setPermission(Path dir, FsPermission perm) throws IOException { - if(!fs.getFileStatus(dir).isDir()) { + if(!fs.getFileStatus(dir).isDirectory()) { fs.setPermission(dir,perm); } else { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java index ac9efff1e64..19cc6bfae12 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java @@ -419,7 +419,7 @@ public class SnapshotTestingUtils { throws IOException { HFileLink link = HFileLink.create(util.getConfiguration(), table, region, family, hfile); if (corruptedFiles.size() % 2 == 0) { - fs.delete(link.getAvailablePath(fs)); + fs.delete(link.getAvailablePath(fs), true); corruptedFiles.add(hfile); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java index 9b21f36da40..94b26aaba8e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java @@ -274,7 +274,7 @@ public class TestExportSnapshot { assertEquals(filesExpected, rootFiles.length); for (FileStatus fileStatus: rootFiles) { String name = fileStatus.getPath().getName(); - assertTrue(fileStatus.isDir()); + assertTrue(fileStatus.isDirectory()); assertTrue(name.equals(HConstants.SNAPSHOT_DIR_NAME) || name.equals(HConstants.HFILE_ARCHIVE_DIRECTORY)); } @@ -381,7 +381,7 @@ public class TestExportSnapshot { if (list != null) { for (FileStatus fstat: list) { LOG.debug(fstat.getPath()); - if (fstat.isDir()) { + if (fstat.isDirectory()) { files.addAll(listFiles(fs, root, fstat.getPath())); } else { files.add(fstat.getPath().toString().substring(rootPrefix)); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java index 386fea6b2ad..7b5aed57d12 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java @@ -158,7 +158,7 @@ public class HFileArchiveTestingUtil { Path parent = f.getPath().getParent(); String shortName = name.split("[.]")[0]; Path modPath = new Path(parent, shortName); - FileStatus file = new FileStatus(f.getLen(), f.isDir(), f.getReplication(), + FileStatus file = new FileStatus(f.getLen(), f.isDirectory(), f.getReplication(), f.getBlockSize(), f.getModificationTime(), modPath); backedupFiles.add(file); } else { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java index 43749c5cf15..01ceb3c2d6b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java @@ -84,7 +84,7 @@ public class TestFSVisitor { @After public void tearDown() throws Exception { - fs.delete(rootDir); + fs.delete(rootDir, true); } @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java index 4a7e798b2fd..134a953e696 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java @@ -1717,7 +1717,7 @@ public class TestHBaseFsck { continue; } for (FileStatus hfs : hfFss) { - if (!hfs.isDir()) { + if (!hfs.isDirectory()) { return hfs.getPath(); } }