HBASE-6667 TestCatalogJanitor occasionally fails; PATCH THAT ADDS DEBUG AROUND FAILING TEST

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1379682 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2012-09-01 00:36:06 +00:00
parent 8393cf10d6
commit 46349a89d9
3 changed files with 51 additions and 19 deletions

View File

@ -245,6 +245,7 @@ class CatalogJanitor extends Chore {
this.services.getAssignmentManager().regionOffline(parent);
}
FileSystem fs = this.services.getMasterFileSystem().getFileSystem();
LOG.debug("Archiving parent region:" + parent);
HFileArchiver.archiveRegion(fs, parent);
MetaEditor.deleteRegion(this.server.getCatalogTracker(), parent);
result = true;

View File

@ -32,6 +32,8 @@ import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
@ -76,6 +78,8 @@ import com.google.protobuf.ServiceException;
@Category(SmallTests.class)
public class TestCatalogJanitor {
private static final Log LOG = LogFactory.getLog(TestCatalogJanitor.class);
/**
* Pseudo server for below tests.
* Be sure to call stop on the way out else could leave some mess around.
@ -529,6 +533,10 @@ public class TestCatalogJanitor {
janitor.join();
}
/**
* Test that we correctly archive all the storefiles when a region is deleted
* @throws Exception
*/
@Test
public void testArchiveOldRegion() throws Exception {
String table = "table";
@ -546,10 +554,10 @@ public class TestCatalogJanitor {
HRegionInfo parent = new HRegionInfo(htd.getName(), Bytes.toBytes("aaa"), Bytes.toBytes("eee"));
HRegionInfo splita = new HRegionInfo(htd.getName(), Bytes.toBytes("aaa"), Bytes.toBytes("ccc"));
HRegionInfo splitb = new HRegionInfo(htd.getName(), Bytes.toBytes("ccc"), Bytes.toBytes("eee"));
// Test that when both daughter regions are in place, that we do not
// remove the parent.
Result r = createResult(parent, splita, splitb);
Result parentMetaRow = createResult(parent, splita, splitb);
FileSystem fs = FileSystem.get(htu.getConfiguration());
Path rootdir = services.getMasterFileSystem().getRootDir();
// have to set the root directory since we use it in HFileDisposer to figure out to get to the
@ -559,32 +567,53 @@ public class TestCatalogJanitor {
Path tabledir = HTableDescriptor.getTableDir(rootdir, htd.getName());
Path storedir = HStore.getStoreHomedir(tabledir, parent.getEncodedName(),
htd.getColumnFamilies()[0].getName());
// delete the file and ensure that the files have been archived
Path storeArchive = HFileArchiveUtil.getStoreArchivePath(services.getConfiguration(), parent,
tabledir, htd.getColumnFamilies()[0].getName());
LOG.debug("Table dir:" + tabledir);
LOG.debug("Store dir:" + storedir);
LOG.debug("Store archive dir:" + storeArchive);
// enable archiving, make sure that files get archived
addMockStoreFiles(2, services, storedir);
// add a couple of store files that we can check for
FileStatus[] mockFiles = addMockStoreFiles(2, services, storedir);
// get the current store files for comparison
FileStatus[] storeFiles = fs.listStatus(storedir);
int index = 0;
for (FileStatus file : storeFiles) {
System.out.println("Have store file:" + file.getPath());
LOG.debug("Have store file:" + file.getPath());
assertEquals("Got unexpected store file", mockFiles[index].getPath(),
storeFiles[index].getPath());
index++;
}
// do the cleaning of the parent
assertTrue(janitor.cleanParent(parent, r));
assertTrue(janitor.cleanParent(parent, parentMetaRow));
LOG.debug("Finished cleanup of parent region");
// and now check to make sure that the files have actually been archived
FileStatus[] archivedStoreFiles = fs.listStatus(storeArchive);
logFiles("archived files", storeFiles);
logFiles("archived files", archivedStoreFiles);
assertArchiveEqualToOriginal(storeFiles, archivedStoreFiles, fs);
// cleanup
FSUtils.delete(fs, rootdir, true);
services.stop("Test finished");
server.stop("shutdown");
server.stop("Test finished");
janitor.join();
}
/**
* @param description description of the files for logging
* @param storeFiles the status of the files to log
*/
private void logFiles(String description, FileStatus[] storeFiles) {
LOG.debug("Current " + description + ": ");
for (FileStatus file : storeFiles) {
LOG.debug(file.getPath());
}
}
/**
* Test that if a store file with the same name is present as those already backed up cause the
* already archived files to be timestamped backup
@ -657,7 +686,7 @@ public class TestCatalogJanitor {
janitor.join();
}
private void addMockStoreFiles(int count, MasterServices services, Path storedir)
private FileStatus[] addMockStoreFiles(int count, MasterServices services, Path storedir)
throws IOException {
// get the existing store files
FileSystem fs = services.getMasterFileSystem().getFileSystem();
@ -669,9 +698,11 @@ public class TestCatalogJanitor {
dos.writeBytes("Some data: " + i);
dos.close();
}
LOG.debug("Adding " + count + " store files to the storedir:" + storedir);
// make sure the mock store files are there
FileStatus[] storeFiles = fs.listStatus(storedir);
assertEquals(count, storeFiles.length);
assertEquals("Didn't have expected store files", count, storeFiles.length);
return storeFiles;
}
private String setRootDirAndCleanIt(final HBaseTestingUtility htu,

View File

@ -85,29 +85,29 @@ public class HFileArchiveTestingUtil {
/**
* Compare the archived files to the files in the original directory
* @param previous original files that should have been archived
* @param archived files that were archived
* @param expected original files that should have been archived
* @param actual files that were archived
* @param fs filessystem on which the archiving took place
* @throws IOException
*/
public static void assertArchiveEqualToOriginal(FileStatus[] previous, FileStatus[] archived,
public static void assertArchiveEqualToOriginal(FileStatus[] expected, FileStatus[] actual,
FileSystem fs) throws IOException {
assertArchiveEqualToOriginal(previous, archived, fs, false);
assertArchiveEqualToOriginal(expected, actual, fs, false);
}
/**
* Compare the archived files to the files in the original directory
* @param previous original files that should have been archived
* @param archived files that were archived
* @param expected original files that should have been archived
* @param actual files that were archived
* @param fs {@link FileSystem} on which the archiving took place
* @param hasTimedBackup <tt>true</tt> if we expect to find an archive backup directory with a
* copy of the files in the archive directory (and the original files).
* @throws IOException
*/
public static void assertArchiveEqualToOriginal(FileStatus[] previous, FileStatus[] archived,
public static void assertArchiveEqualToOriginal(FileStatus[] expected, FileStatus[] actual,
FileSystem fs, boolean hasTimedBackup) throws IOException {
List<List<String>> lists = getFileLists(previous, archived);
List<List<String>> lists = getFileLists(expected, actual);
List<String> original = lists.get(0);
Collections.sort(original);