HBASE-5545 region can't be opened for a long time. Because the creating File failed. (Ram)

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1327677 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
larsh 2012-04-18 20:54:59 +00:00
parent e8ec0b9f8c
commit 5eb1259399
3 changed files with 64 additions and 0 deletions

View File

@ -729,6 +729,15 @@ public class HRegion implements HeapSize { // , Writable{
// and then create the file // and then create the file
Path tmpPath = new Path(getTmpDir(), REGIONINFO_FILE); Path tmpPath = new Path(getTmpDir(), REGIONINFO_FILE);
// if datanode crashes or if the RS goes down just before the close is called while trying to
// close the created regioninfo file in the .tmp directory then on next
// creation we will be getting AlreadyCreatedException.
// Hence delete and create the file if exists.
if (FSUtils.isExists(fs, tmpPath)) {
FSUtils.delete(fs, tmpPath, true);
}
FSDataOutputStream out = FSUtils.create(fs, tmpPath, perms); FSDataOutputStream out = FSUtils.create(fs, tmpPath, perms);
try { try {

View File

@ -1016,4 +1016,30 @@ public abstract class FSUtils {
if (status == null || status.length < 1) return null; if (status == null || status.length < 1) return null;
return status; return status;
} }
/**
* Calls fs.delete() and returns the value returned by the fs.delete()
*
* @param fs
* @param path
* @param recursive
* @return
* @throws IOException
*/
public static boolean delete(final FileSystem fs, final Path path, final boolean recursive)
throws IOException {
return fs.delete(path, recursive);
}
/**
* Calls fs.exists(). Checks if the specified path exists
*
* @param fs
* @param path
* @return
* @throws IOException
*/
public static boolean isExists(final FileSystem fs, final Path path) throws IOException {
return fs.exists(path);
}
} }

View File

@ -186,6 +186,35 @@ public class TestFSUtils {
} }
} }
@Test
public void testDeleteAndExists() throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.setBoolean(HConstants.ENABLE_DATA_FILE_UMASK, true);
FileSystem fs = FileSystem.get(conf);
FsPermission perms = FSUtils.getFilePermissions(fs, conf, HConstants.DATA_FILE_UMASK_KEY);
// then that the correct file is created
String file = UUID.randomUUID().toString();
Path p = new Path("temptarget" + File.separator + file);
Path p1 = new Path("temppath" + File.separator + file);
try {
FSDataOutputStream out = FSUtils.create(fs, p, perms);
out.close();
assertTrue("The created file should be present", FSUtils.isExists(fs, p));
// delete the file with recursion as false. Only the file will be deleted.
FSUtils.delete(fs, p, false);
// Create another file
FSDataOutputStream out1 = FSUtils.create(fs, p1, perms);
out1.close();
// delete the file with recursion as false. Still the file only will be deleted
FSUtils.delete(fs, p1, true);
assertFalse("The created file should be present", FSUtils.isExists(fs, p1));
// and then cleanup
} finally {
FSUtils.delete(fs, p, true);
FSUtils.delete(fs, p1, true);
}
}
@org.junit.Rule @org.junit.Rule
public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu = public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
new org.apache.hadoop.hbase.ResourceCheckerJUnitRule(); new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();