HBASE-6439 Ignore .archive directory as a table

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1393916 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2012-10-04 05:01:27 +00:00
parent 1ca8d32e63
commit 7d2479983a
7 changed files with 40 additions and 77 deletions

View File

@ -619,12 +619,6 @@ public final class HConstants {
*/
public static final float HBASE_CLUSTER_MINIMUM_MEMORY_THRESHOLD = 0.2f;
public static final List<String> HBASE_NON_USER_TABLE_DIRS = new ArrayList<String>(
Arrays.asList(new String[]{ HREGION_LOGDIR_NAME, HREGION_OLDLOGDIR_NAME,
CORRUPT_DIR_NAME, Bytes.toString(META_TABLE_NAME),
Bytes.toString(ROOT_TABLE_NAME), SPLIT_LOGDIR_NAME,
HBCK_SIDELINEDIR_NAME }));
public static final Pattern CP_HTD_ATTR_KEY_PATTERN = Pattern.compile
("^coprocessor\\$([0-9]+)$", Pattern.CASE_INSENSITIVE);
public static final Pattern CP_HTD_ATTR_VALUE_PATTERN =
@ -699,9 +693,6 @@ public final class HConstants {
/** delimiter used between portions of a region name */
public static final int DELIMITER = ',';
/** Configuration key for the directory to backup HFiles for a table */
public static final String HFILE_ARCHIVE_DIRECTORY = "hbase.table.archive.directory";
/**
* QOS attributes: these attributes are used to demarcate RPC call processing
* by different set of handlers. For example, HIGH_QOS tagged methods are
@ -712,6 +703,14 @@ public final class HConstants {
public static final int HIGH_QOS = 100;
public static final int REPLICATION_QOS = 5; // normal_QOS < replication_QOS < high_QOS
/** Directory under /hbase where archived hfiles are stored */
public static final String HFILE_ARCHIVE_DIRECTORY = ".archive";
public static final List<String> HBASE_NON_USER_TABLE_DIRS = new ArrayList<String>(
Arrays.asList(new String[] { HREGION_LOGDIR_NAME, HREGION_OLDLOGDIR_NAME, CORRUPT_DIR_NAME,
Bytes.toString(META_TABLE_NAME), Bytes.toString(ROOT_TABLE_NAME), SPLIT_LOGDIR_NAME,
HBCK_SIDELINEDIR_NAME, HFILE_ARCHIVE_DIRECTORY }));
private HConstants() {
// Can't be instantiated with this ctor.
}

View File

@ -21,34 +21,28 @@ package org.apache.hadoop.hbase.io;
import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
/**
* HFileLink describes a link to an hfile.
*
* An hfile can be served from a region or from the hfile archive directory as
* specified by {@value HConstants.HFILE_ARCHIVE_DIRECTORY} conf property.
* An hfile can be served from a region or from the hfile archive directory (/hbase/.archive)
* HFileLink allows to access the referenced hfile regardless of the location where it is.
*
* <p>Searches for hfiles in the following order and locations:
* <ul>
* <li>/hbase/table/region/cf/hfile</li>
* <li>/hbase/archive/table/region/cf/hfile</li>
* <li>/hbase/.archive/table/region/cf/hfile</li>
* </ul>
*
* The link checks first in the original path if it is not present
@ -145,7 +139,7 @@ public class HFileLink extends FileLink {
/**
* The returned path can be the "original" file path like: /hbase/table/region/cf/hfile
* or a path to the archived file like: /hbase/archive/table/region/cf/hfile
* or a path to the archived file like: /hbase/.archive/table/region/cf/hfile
*
* @param fs {@link FileSystem} on which to check the HFileLink
* @param conf {@link Configuration} from which to extract specific archive locations
@ -161,7 +155,7 @@ public class HFileLink extends FileLink {
/**
* The returned path can be the "original" file path like: /hbase/table/region/cf/hfile
* or a path to the archived file like: /hbase/archive/table/region/cf/hfile
* or a path to the archived file like: /hbase/.archive/table/region/cf/hfile
*
* @param fs {@link FileSystem} on which to check the HFileLink
* @param rootdir root hbase directory

View File

@ -33,8 +33,6 @@ import org.apache.hadoop.hbase.regionserver.HStore;
* Helper class for all utilities related to archival/retrieval of HFiles
*/
public class HFileArchiveUtil {
public static final String DEFAULT_HFILE_ARCHIVE_DIRECTORY = ".archive";
private HFileArchiveUtil() {
// non-external instantiation - util class
}
@ -78,7 +76,7 @@ public class HFileArchiveUtil {
*/
public static Path getStoreArchivePath(Configuration conf, HRegionInfo region, Path tabledir,
byte[] family) {
Path tableArchiveDir = getTableArchivePath(conf, tabledir);
Path tableArchiveDir = getTableArchivePath(tabledir);
return HStore.getStoreHomedir(tableArchiveDir,
HRegionInfo.encodeRegionName(region.getRegionName()), family);
}
@ -93,7 +91,7 @@ public class HFileArchiveUtil {
*/
public static Path getRegionArchiveDir(Configuration conf, Path tabledir, Path regiondir) {
// get the archive directory for a table
Path archiveDir = getTableArchivePath(conf, tabledir);
Path archiveDir = getTableArchivePath(tabledir);
// then add on the region path under the archive
String encodedRegionName = regiondir.getName();
@ -103,19 +101,15 @@ public class HFileArchiveUtil {
/**
* Get the path to the table archive directory based on the configured archive directory.
* <p>
* Assumed that the table should already be archived.
* @param conf {@link Configuration} to read the archive directory property. Can be null
* Get the path to the table's archive directory.
* <p>
* Generally of the form: /hbase/.archive/[tablename]
* @param tabledir directory of the table to be archived. Cannot be null.
* @return {@link Path} to the archive directory for the table
*/
public static Path getTableArchivePath(Configuration conf, Path tabledir) {
String archiveName = getConfiguredArchiveDirName(conf);
public static Path getTableArchivePath(Path tabledir) {
Path root = tabledir.getParent();
// now build the archive directory path
// first the top-level archive directory
// generally "/hbase/.archive/[table]
return archiveName.length() == 0 ? new Path(root, tabledir) : new Path(new Path(root,
archiveName), tabledir.getName());
return new Path(new Path(root,HConstants.HFILE_ARCHIVE_DIRECTORY), tabledir.getName());
}
/**
@ -131,18 +125,6 @@ public class HFileArchiveUtil {
return new Path(getArchivePath(conf), tableName);
}
/**
* Get the archive directory as per the configuration
* @param conf {@link Configuration} to read the archive directory from (can be null, in which
* case you get the default value). Can be null.
* @return the configured archived directory or the default specified by
* {@value HFileArchiveUtil#DEFAULT_HFILE_ARCHIVE_DIRECTORY}
*/
public static String getConfiguredArchiveDirName(Configuration conf) {
return conf == null ? HFileArchiveUtil.DEFAULT_HFILE_ARCHIVE_DIRECTORY : conf.get(
HConstants.HFILE_ARCHIVE_DIRECTORY, HFileArchiveUtil.DEFAULT_HFILE_ARCHIVE_DIRECTORY);
}
/**
* Get the full path to the archive directory on the configured {@link FileSystem}
* @param conf to look for archive directory name and root directory. Cannot be null. Notes for
@ -151,6 +133,6 @@ public class HFileArchiveUtil {
* @throws IOException if an unexpected error occurs
*/
public static Path getArchivePath(Configuration conf) throws IOException {
return new Path(FSUtils.getRootDir(conf), getConfiguredArchiveDirName(conf));
return new Path(FSUtils.getRootDir(conf), HConstants.HFILE_ARCHIVE_DIRECTORY);
}
}

View File

@ -34,6 +34,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.client.Put;
@ -277,7 +278,7 @@ public class TestZooKeeperTableArchiveClient {
}
private Path getArchiveDir() throws IOException {
return new Path(UTIL.getDataTestDir(), HFileArchiveUtil.DEFAULT_HFILE_ARCHIVE_DIRECTORY);
return new Path(UTIL.getDataTestDir(), HConstants.HFILE_ARCHIVE_DIRECTORY);
}
private Path getTableDir(String tableName) throws IOException {

View File

@ -26,11 +26,11 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.catalog.CatalogTracker;
import org.apache.hadoop.hbase.util.HFileArchiveUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@ -48,8 +48,7 @@ public class TestHFileCleaner {
long ttl = 2000;
conf.setLong(TimeToLiveHFileCleaner.TTL_CONF_KEY, ttl);
Server server = new DummyServer();
Path archivedHfileDir = new Path(TEST_UTIL.getDataTestDir(),
HFileArchiveUtil.getConfiguredArchiveDirName(conf));
Path archivedHfileDir = new Path(TEST_UTIL.getDataTestDir(), HConstants.HFILE_ARCHIVE_DIRECTORY);
FileSystem fs = FileSystem.get(conf);
HFileCleaner cleaner = new HFileCleaner(1000, server, conf, fs, archivedHfileDir);

View File

@ -252,5 +252,18 @@ public class TestFSTableDescriptors {
}
}
@Test
public void testReadingArchiveDirectoryFromFS() throws IOException {
FileSystem fs = FileSystem.get(UTIL.getConfiguration());
try {
new FSTableDescriptors(fs, FSUtils.getRootDir(UTIL.getConfiguration()))
.get(HConstants.HFILE_ARCHIVE_DIRECTORY);
fail("Shouldn't be able to read a table descriptor for the archive directory.");
} catch (IOException e) {
LOG.debug("Correctly got error when reading a table descriptor from the archive directory: "
+ e.getMessage());
}
}
}

View File

@ -35,26 +35,10 @@ import org.mockito.Mockito;
@Category(SmallTests.class)
public class TestHFileArchiveUtil {
@Test
public void testGetConfiguredArchiveDir() {
assertEquals(HFileArchiveUtil.DEFAULT_HFILE_ARCHIVE_DIRECTORY,
HFileArchiveUtil.getConfiguredArchiveDirName(null));
Configuration conf = new Configuration();
assertEquals(HFileArchiveUtil.DEFAULT_HFILE_ARCHIVE_DIRECTORY,
HFileArchiveUtil.getConfiguredArchiveDirName(conf));
conf.set(HConstants.HFILE_ARCHIVE_DIRECTORY, "");
assertEquals("", HFileArchiveUtil.getConfiguredArchiveDirName(conf));
String archiveDir = "somearchive";
conf.set(HConstants.HFILE_ARCHIVE_DIRECTORY, archiveDir);
assertEquals(archiveDir, HFileArchiveUtil.getConfiguredArchiveDirName(conf));
}
@Test
public void testGetTableArchivePath() {
assertNotNull(HFileArchiveUtil.getTableArchivePath(null, new Path("table")));
Configuration conf = new Configuration();
conf.set(HConstants.HFILE_ARCHIVE_DIRECTORY, "");
assertNotNull(HFileArchiveUtil.getTableArchivePath(conf, new Path("root", new Path("table"))));
assertNotNull(HFileArchiveUtil.getTableArchivePath(new Path("table")));
assertNotNull(HFileArchiveUtil.getTableArchivePath(new Path("root", new Path("table"))));
}
@Test
@ -62,10 +46,6 @@ public class TestHFileArchiveUtil {
Configuration conf = new Configuration();
FSUtils.setRootDir(conf, new Path("root"));
assertNotNull(HFileArchiveUtil.getArchivePath(conf));
String archiveDir = "somearchive";
conf.set(HConstants.HFILE_ARCHIVE_DIRECTORY, archiveDir);
assertEquals(new Path(FSUtils.getRootDir(conf), archiveDir),
HFileArchiveUtil.getArchivePath(conf));
}
@Test
@ -84,8 +64,6 @@ public class TestHFileArchiveUtil {
assertNotNull(HFileArchiveUtil.getStoreArchivePath(conf, region, tabledir, family));
conf = new Configuration();
assertNotNull(HFileArchiveUtil.getStoreArchivePath(conf, region, tabledir, family));
conf.set(HConstants.HFILE_ARCHIVE_DIRECTORY, "archiveDir");
assertNotNull(HFileArchiveUtil.getStoreArchivePath(conf, region, tabledir, family));
// do a little mocking of a region to get the same results
HRegion mockRegion = Mockito.mock(HRegion.class);
@ -95,8 +73,5 @@ public class TestHFileArchiveUtil {
assertNotNull(HFileArchiveUtil.getStoreArchivePath(null, mockRegion, family));
conf = new Configuration();
assertNotNull(HFileArchiveUtil.getStoreArchivePath(conf, mockRegion, family));
conf.set(HConstants.HFILE_ARCHIVE_DIRECTORY, "archiveDir");
assertNotNull(HFileArchiveUtil.getStoreArchivePath(conf, mockRegion, family));
}
}