HADOOP-2079 HADOOP-2056 Fix generated HLog, HRegion names
HLog.splitLog was generating incorrect file names, HRegion was generating file names that could be far too long especially for local file systems, HMaster had a race condition in which an old HLog would get split by two threads simultaneously. git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@586680 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
cc202c4b74
commit
fa1c77bd3e
|
@ -14,6 +14,7 @@ Trunk (unreleased changes)
|
||||||
(e.g. nightly #272)
|
(e.g. nightly #272)
|
||||||
HADOOP-2064 TestSplit assertion and NPE failures (Patch build #952 and #953)
|
HADOOP-2064 TestSplit assertion and NPE failures (Patch build #952 and #953)
|
||||||
HADOOP-2056 A table with row keys containing colon fails to split regions
|
HADOOP-2056 A table with row keys containing colon fails to split regions
|
||||||
|
HADOOP-2079 HADOOP-2056 Fix generated HLog, HRegion names
|
||||||
|
|
||||||
IMPROVEMENTS
|
IMPROVEMENTS
|
||||||
HADOOP-2401 Add convenience put method that takes writable
|
HADOOP-2401 Add convenience put method that takes writable
|
||||||
|
|
|
@ -159,7 +159,7 @@ public class HLog implements HConstants {
|
||||||
SequenceFile.Writer w = logWriters.get(regionName);
|
SequenceFile.Writer w = logWriters.get(regionName);
|
||||||
if (w == null) {
|
if (w == null) {
|
||||||
Path logfile = new Path(HRegion.getRegionDir(rootDir,
|
Path logfile = new Path(HRegion.getRegionDir(rootDir,
|
||||||
HRegionInfo.rootRegionInfo.getEncodedName()),
|
HRegionInfo.encodeRegionName(regionName)),
|
||||||
HREGION_OLDLOGFILE_NAME);
|
HREGION_OLDLOGFILE_NAME);
|
||||||
|
|
||||||
if (LOG.isDebugEnabled()) {
|
if (LOG.isDebugEnabled()) {
|
||||||
|
|
|
@ -326,7 +326,8 @@ HMasterRegionInterface {
|
||||||
if (!hasReferencesA && !hasReferencesB) {
|
if (!hasReferencesA && !hasReferencesB) {
|
||||||
LOG.info("Deleting region " + parent.getRegionName() +
|
LOG.info("Deleting region " + parent.getRegionName() +
|
||||||
" because daughter splits no longer hold references");
|
" because daughter splits no longer hold references");
|
||||||
if (!HRegion.deleteRegion(fs, dir, parent.getEncodedName())) {
|
if (!HRegion.deleteRegion(fs, dir,
|
||||||
|
HRegionInfo.encodeRegionName(parent.getRegionName()))) {
|
||||||
LOG.warn("Deletion of " + parent.getRegionName() + " failed");
|
LOG.warn("Deletion of " + parent.getRegionName() + " failed");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -370,7 +371,8 @@ HMasterRegionInterface {
|
||||||
}
|
}
|
||||||
for (Text family: split.getTableDesc().families().keySet()) {
|
for (Text family: split.getTableDesc().families().keySet()) {
|
||||||
Path p = HStoreFile.getMapDir(fs.makeQualified(dir),
|
Path p = HStoreFile.getMapDir(fs.makeQualified(dir),
|
||||||
split.getEncodedName(), HStoreKey.extractFamily(family));
|
HRegionInfo.encodeRegionName(split.getRegionName()),
|
||||||
|
HStoreKey.extractFamily(family));
|
||||||
|
|
||||||
// Look for reference files. Call listPaths with an anonymous
|
// Look for reference files. Call listPaths with an anonymous
|
||||||
// instance of PathFilter.
|
// instance of PathFilter.
|
||||||
|
@ -418,6 +420,7 @@ HMasterRegionInterface {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
HServerInfo storedInfo = null;
|
HServerInfo storedInfo = null;
|
||||||
|
boolean deadServer = false;
|
||||||
if (serverName.length() != 0) {
|
if (serverName.length() != 0) {
|
||||||
Map<Text, HRegionInfo> regionsToKill = killList.get(serverName);
|
Map<Text, HRegionInfo> regionsToKill = killList.get(serverName);
|
||||||
if (regionsToKill != null &&
|
if (regionsToKill != null &&
|
||||||
|
@ -432,6 +435,9 @@ HMasterRegionInterface {
|
||||||
}
|
}
|
||||||
synchronized (serversToServerInfo) {
|
synchronized (serversToServerInfo) {
|
||||||
storedInfo = serversToServerInfo.get(serverName);
|
storedInfo = serversToServerInfo.get(serverName);
|
||||||
|
if (storedInfo != null && deadServers.contains(serverName)) {
|
||||||
|
deadServer = true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (LOG.isDebugEnabled()) {
|
if (LOG.isDebugEnabled()) {
|
||||||
|
@ -439,13 +445,17 @@ HMasterRegionInterface {
|
||||||
}
|
}
|
||||||
if (!(unassignedRegions.containsKey(info.getRegionName()) ||
|
if (!(unassignedRegions.containsKey(info.getRegionName()) ||
|
||||||
pendingRegions.contains(info.getRegionName()))
|
pendingRegions.contains(info.getRegionName()))
|
||||||
&& (storedInfo == null || storedInfo.getStartCode() != startCode)) {
|
&& (storedInfo == null ||
|
||||||
|
(storedInfo.getStartCode() != startCode && !deadServer))) {
|
||||||
// The current assignment is no good
|
// The current assignment is no good
|
||||||
if (LOG.isDebugEnabled()) {
|
if (LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Current assignment of " + info.getRegionName() +
|
LOG.debug("Current assignment of " + info.getRegionName() +
|
||||||
" is no good");
|
" is no good");
|
||||||
}
|
}
|
||||||
// Recover the region server's log if there is one.
|
// Recover the region server's log if there is one.
|
||||||
|
// This is only done from here if we are restarting and there is stale
|
||||||
|
// data in the meta region. Once we are on-line, dead server log
|
||||||
|
// recovery is handled by lease expiration and PendingServerShutdown
|
||||||
if (serverName.length() != 0) {
|
if (serverName.length() != 0) {
|
||||||
StringBuilder dirName = new StringBuilder("log_");
|
StringBuilder dirName = new StringBuilder("log_");
|
||||||
dirName.append(serverName.replace(":", "_"));
|
dirName.append(serverName.replace(":", "_"));
|
||||||
|
@ -831,6 +841,10 @@ HMasterRegionInterface {
|
||||||
final Map<String, HServerInfo> serversToServerInfo =
|
final Map<String, HServerInfo> serversToServerInfo =
|
||||||
new HashMap<String, HServerInfo>();
|
new HashMap<String, HServerInfo>();
|
||||||
|
|
||||||
|
/** Set of known dead servers */
|
||||||
|
final Set<String> deadServers =
|
||||||
|
Collections.synchronizedSet(new HashSet<String>());
|
||||||
|
|
||||||
/** SortedMap server load -> Set of server names */
|
/** SortedMap server load -> Set of server names */
|
||||||
SortedMap<HServerLoad, Set<String>> loadToServers;
|
SortedMap<HServerLoad, Set<String>> loadToServers;
|
||||||
|
|
||||||
|
@ -864,8 +878,8 @@ HMasterRegionInterface {
|
||||||
this.fs = FileSystem.get(conf);
|
this.fs = FileSystem.get(conf);
|
||||||
this.rand = new Random();
|
this.rand = new Random();
|
||||||
|
|
||||||
Path rootRegionDir =
|
Path rootRegionDir = HRegion.getRegionDir(dir,
|
||||||
HRegion.getRegionDir(dir, HRegionInfo.rootRegionInfo.getEncodedName());
|
HRegionInfo.encodeRegionName(HRegionInfo.rootRegionInfo.getRegionName()));
|
||||||
LOG.info("Root region dir: " + rootRegionDir.toString());
|
LOG.info("Root region dir: " + rootRegionDir.toString());
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -2107,6 +2121,7 @@ HMasterRegionInterface {
|
||||||
Thread.currentThread().getName());
|
Thread.currentThread().getName());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
deadServers.remove(deadServerName);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
@ -2240,7 +2255,8 @@ HMasterRegionInterface {
|
||||||
|
|
||||||
} else if (deleteRegion) {
|
} else if (deleteRegion) {
|
||||||
try {
|
try {
|
||||||
HRegion.deleteRegion(fs, dir, regionInfo.getEncodedName());
|
HRegion.deleteRegion(fs, dir,
|
||||||
|
HRegionInfo.encodeRegionName(regionInfo.getRegionName()));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
e = RemoteExceptionHandler.checkIOException(e);
|
e = RemoteExceptionHandler.checkIOException(e);
|
||||||
LOG.error("failed delete region " + regionInfo.getRegionName(), e);
|
LOG.error("failed delete region " + regionInfo.getRegionName(), e);
|
||||||
|
@ -2857,7 +2873,8 @@ HMasterRegionInterface {
|
||||||
// Delete the region
|
// Delete the region
|
||||||
|
|
||||||
try {
|
try {
|
||||||
HRegion.deleteRegion(fs, dir, i.getEncodedName());
|
HRegion.deleteRegion(fs, dir,
|
||||||
|
HRegionInfo.encodeRegionName(i.getRegionName()));
|
||||||
|
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("failed to delete region " + i.getRegionName(),
|
LOG.error("failed to delete region " + i.getRegionName(),
|
||||||
|
@ -2924,8 +2941,9 @@ HMasterRegionInterface {
|
||||||
|
|
||||||
// Delete the directories used by the column
|
// Delete the directories used by the column
|
||||||
|
|
||||||
fs.delete(HStoreFile.getMapDir(dir, i.getEncodedName(), columnName));
|
String encodedName = HRegionInfo.encodeRegionName(i.getRegionName());
|
||||||
fs.delete(HStoreFile.getInfoDir(dir, i.getEncodedName(), columnName));
|
fs.delete(HStoreFile.getMapDir(dir, encodedName, columnName));
|
||||||
|
fs.delete(HStoreFile.getInfoDir(dir, encodedName, columnName));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2985,6 +3003,7 @@ HMasterRegionInterface {
|
||||||
loadToServers.put(load, servers);
|
loadToServers.put(load, servers);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
deadServers.add(server);
|
||||||
}
|
}
|
||||||
serversToServerInfo.notifyAll();
|
serversToServerInfo.notifyAll();
|
||||||
}
|
}
|
||||||
|
|
|
@ -121,8 +121,8 @@ public class HRegion implements HConstants {
|
||||||
}
|
}
|
||||||
|
|
||||||
HRegionInfo newRegionInfo = new HRegionInfo(tabledesc, startKey, endKey);
|
HRegionInfo newRegionInfo = new HRegionInfo(tabledesc, startKey, endKey);
|
||||||
Path newRegionDir =
|
Path newRegionDir = HRegion.getRegionDir(merges,
|
||||||
HRegion.getRegionDir(merges, newRegionInfo.getEncodedName());
|
HRegionInfo.encodeRegionName(newRegionInfo.getRegionName()));
|
||||||
if(fs.exists(newRegionDir)) {
|
if(fs.exists(newRegionDir)) {
|
||||||
throw new IOException("Cannot merge; target file collision at " +
|
throw new IOException("Cannot merge; target file collision at " +
|
||||||
newRegionDir);
|
newRegionDir);
|
||||||
|
@ -138,9 +138,9 @@ public class HRegion implements HConstants {
|
||||||
for (Map.Entry<Text, Vector<HStoreFile>> es : byFamily.entrySet()) {
|
for (Map.Entry<Text, Vector<HStoreFile>> es : byFamily.entrySet()) {
|
||||||
Text colFamily = es.getKey();
|
Text colFamily = es.getKey();
|
||||||
Vector<HStoreFile> srcFiles = es.getValue();
|
Vector<HStoreFile> srcFiles = es.getValue();
|
||||||
HStoreFile dst =
|
HStoreFile dst = new HStoreFile(conf, merges,
|
||||||
new HStoreFile(conf, merges, newRegionInfo.getEncodedName(),
|
HRegionInfo.encodeRegionName(newRegionInfo.getRegionName()),
|
||||||
colFamily, Math.abs(rand.nextLong()));
|
colFamily, Math.abs(rand.nextLong()));
|
||||||
dst.mergeStoreFiles(srcFiles, fs, conf);
|
dst.mergeStoreFiles(srcFiles, fs, conf);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -215,6 +215,7 @@ public class HRegion implements HConstants {
|
||||||
private final HLocking lock = new HLocking();
|
private final HLocking lock = new HLocking();
|
||||||
private long desiredMaxFileSize;
|
private long desiredMaxFileSize;
|
||||||
private final long minSequenceId;
|
private final long minSequenceId;
|
||||||
|
private final String encodedRegionName;
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
// Constructor
|
// Constructor
|
||||||
|
@ -247,6 +248,8 @@ public class HRegion implements HConstants {
|
||||||
this.fs = fs;
|
this.fs = fs;
|
||||||
this.conf = conf;
|
this.conf = conf;
|
||||||
this.regionInfo = regionInfo;
|
this.regionInfo = regionInfo;
|
||||||
|
this.encodedRegionName =
|
||||||
|
HRegionInfo.encodeRegionName(this.regionInfo.getRegionName());
|
||||||
this.memcache = new HMemcache();
|
this.memcache = new HMemcache();
|
||||||
this.threadWakeFrequency = conf.getLong(THREAD_WAKE_FREQUENCY, 10 * 1000);
|
this.threadWakeFrequency = conf.getLong(THREAD_WAKE_FREQUENCY, 10 * 1000);
|
||||||
this.optionalFlushCount =
|
this.optionalFlushCount =
|
||||||
|
@ -254,8 +257,7 @@ public class HRegion implements HConstants {
|
||||||
|
|
||||||
// Declare the regionName. This is a unique string for the region, used to
|
// Declare the regionName. This is a unique string for the region, used to
|
||||||
// build a unique filename.
|
// build a unique filename.
|
||||||
this.regiondir =
|
this.regiondir = HRegion.getRegionDir(rootDir, this.encodedRegionName);
|
||||||
HRegion.getRegionDir(rootDir, this.regionInfo.getEncodedName());
|
|
||||||
Path oldLogFile = new Path(regiondir, HREGION_OLDLOGFILE_NAME);
|
Path oldLogFile = new Path(regiondir, HREGION_OLDLOGFILE_NAME);
|
||||||
|
|
||||||
// Move prefab HStore files into place (if any). This picks up split files
|
// Move prefab HStore files into place (if any). This picks up split files
|
||||||
|
@ -270,8 +272,8 @@ public class HRegion implements HConstants {
|
||||||
this.regionInfo.getTableDesc().families().entrySet()) {
|
this.regionInfo.getTableDesc().families().entrySet()) {
|
||||||
Text colFamily = HStoreKey.extractFamily(e.getKey());
|
Text colFamily = HStoreKey.extractFamily(e.getKey());
|
||||||
|
|
||||||
HStore store = new HStore(rootDir, this.regionInfo.getEncodedName(),
|
HStore store = new HStore(rootDir, this.regionInfo.getRegionName(),
|
||||||
e.getValue(), fs, oldLogFile, conf);
|
this.encodedRegionName, e.getValue(), fs, oldLogFile, conf);
|
||||||
|
|
||||||
stores.put(colFamily, store);
|
stores.put(colFamily, store);
|
||||||
|
|
||||||
|
@ -420,13 +422,15 @@ public class HRegion implements HConstants {
|
||||||
Path splits = getSplitsDir();
|
Path splits = getSplitsDir();
|
||||||
HRegionInfo regionAInfo = new HRegionInfo(this.regionInfo.getTableDesc(),
|
HRegionInfo regionAInfo = new HRegionInfo(this.regionInfo.getTableDesc(),
|
||||||
this.regionInfo.getStartKey(), midKey);
|
this.regionInfo.getStartKey(), midKey);
|
||||||
Path dirA = getSplitRegionDir(splits, regionAInfo.getEncodedName());
|
Path dirA = getSplitRegionDir(splits,
|
||||||
|
HRegionInfo.encodeRegionName(regionAInfo.getRegionName()));
|
||||||
if(fs.exists(dirA)) {
|
if(fs.exists(dirA)) {
|
||||||
throw new IOException("Cannot split; target file collision at " + dirA);
|
throw new IOException("Cannot split; target file collision at " + dirA);
|
||||||
}
|
}
|
||||||
HRegionInfo regionBInfo = new HRegionInfo(this.regionInfo.getTableDesc(),
|
HRegionInfo regionBInfo = new HRegionInfo(this.regionInfo.getTableDesc(),
|
||||||
midKey, null);
|
midKey, null);
|
||||||
Path dirB = getSplitRegionDir(splits, regionBInfo.getEncodedName());
|
Path dirB = getSplitRegionDir(splits,
|
||||||
|
HRegionInfo.encodeRegionName(regionBInfo.getRegionName()));
|
||||||
if(this.fs.exists(dirB)) {
|
if(this.fs.exists(dirB)) {
|
||||||
throw new IOException("Cannot split; target file collision at " + dirB);
|
throw new IOException("Cannot split; target file collision at " + dirB);
|
||||||
}
|
}
|
||||||
|
@ -457,18 +461,18 @@ public class HRegion implements HConstants {
|
||||||
for(HStoreFile h: hstoreFilesToSplit) {
|
for(HStoreFile h: hstoreFilesToSplit) {
|
||||||
// A reference to the bottom half of the hsf store file.
|
// A reference to the bottom half of the hsf store file.
|
||||||
HStoreFile.Reference aReference = new HStoreFile.Reference(
|
HStoreFile.Reference aReference = new HStoreFile.Reference(
|
||||||
this.regionInfo.getEncodedName(), h.getFileId(), new HStoreKey(midKey),
|
this.encodedRegionName, h.getFileId(), new HStoreKey(midKey),
|
||||||
HStoreFile.Range.bottom);
|
HStoreFile.Range.bottom);
|
||||||
HStoreFile a = new HStoreFile(this.conf, splits,
|
HStoreFile a = new HStoreFile(this.conf, splits,
|
||||||
regionAInfo.getEncodedName(), h.getColFamily(),
|
HRegionInfo.encodeRegionName(regionAInfo.getRegionName()),
|
||||||
Math.abs(rand.nextLong()), aReference);
|
h.getColFamily(), Math.abs(rand.nextLong()), aReference);
|
||||||
// Reference to top half of the hsf store file.
|
// Reference to top half of the hsf store file.
|
||||||
HStoreFile.Reference bReference = new HStoreFile.Reference(
|
HStoreFile.Reference bReference = new HStoreFile.Reference(
|
||||||
this.regionInfo.getEncodedName(), h.getFileId(), new HStoreKey(midKey),
|
this.encodedRegionName, h.getFileId(), new HStoreKey(midKey),
|
||||||
HStoreFile.Range.top);
|
HStoreFile.Range.top);
|
||||||
HStoreFile b = new HStoreFile(this.conf, splits,
|
HStoreFile b = new HStoreFile(this.conf, splits,
|
||||||
regionBInfo.getEncodedName(), h.getColFamily(),
|
HRegionInfo.encodeRegionName(regionBInfo.getRegionName()),
|
||||||
Math.abs(rand.nextLong()), bReference);
|
h.getColFamily(), Math.abs(rand.nextLong()), bReference);
|
||||||
h.splitStoreFile(a, b, this.fs);
|
h.splitStoreFile(a, b, this.fs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1824,7 +1828,8 @@ public class HRegion implements HConstants {
|
||||||
*/
|
*/
|
||||||
static HRegion createHRegion(final HRegionInfo info, final Path rootDir,
|
static HRegion createHRegion(final HRegionInfo info, final Path rootDir,
|
||||||
final Configuration conf, final Path initialFiles) throws IOException {
|
final Configuration conf, final Path initialFiles) throws IOException {
|
||||||
Path regionDir = HRegion.getRegionDir(rootDir, info.getEncodedName());
|
Path regionDir = HRegion.getRegionDir(rootDir,
|
||||||
|
HRegionInfo.encodeRegionName(info.getRegionName()));
|
||||||
FileSystem fs = FileSystem.get(conf);
|
FileSystem fs = FileSystem.get(conf);
|
||||||
fs.mkdirs(regionDir);
|
fs.mkdirs(regionDir);
|
||||||
return new HRegion(rootDir,
|
return new HRegion(rootDir,
|
||||||
|
|
|
@ -23,19 +23,50 @@ import java.io.DataInput;
|
||||||
import java.io.DataOutput;
|
import java.io.DataOutput;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import java.security.MessageDigest;
|
||||||
|
import java.security.NoSuchAlgorithmException;
|
||||||
|
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.WritableComparable;
|
import org.apache.hadoop.io.WritableComparable;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.util.Base64;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HRegion information.
|
* HRegion information.
|
||||||
* Contains HRegion id, start and end keys, a reference to this
|
* Contains HRegion id, start and end keys, a reference to this
|
||||||
* HRegions' table descriptor, etc.
|
* HRegions' table descriptor, etc.
|
||||||
*/
|
*/
|
||||||
public class HRegionInfo implements WritableComparable {
|
public class HRegionInfo implements WritableComparable {
|
||||||
|
private static MessageDigest encoder = null;
|
||||||
|
|
||||||
|
static {
|
||||||
|
try {
|
||||||
|
if (encoder == null) {
|
||||||
|
encoder = MessageDigest.getInstance("SHA");
|
||||||
|
}
|
||||||
|
} catch (NoSuchAlgorithmException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param regionName
|
||||||
|
* @return the encodedName
|
||||||
|
*/
|
||||||
|
public static String encodeRegionName(final Text regionName) {
|
||||||
|
byte[] bytes = null;
|
||||||
|
synchronized (encoder) {
|
||||||
|
encoder.update(regionName.getBytes(), 0, regionName.getLength());
|
||||||
|
bytes = encoder.digest();
|
||||||
|
encoder.reset();
|
||||||
|
}
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
for (int i = 0; i < bytes.length; i++) {
|
||||||
|
sb.append(bytes[i]);
|
||||||
|
}
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
/** delimiter used between portions of a region name */
|
/** delimiter used between portions of a region name */
|
||||||
public static final String DELIMITER = ",";
|
private static final String DELIMITER = ",";
|
||||||
|
|
||||||
/** HRegionInfo for root region */
|
/** HRegionInfo for root region */
|
||||||
public static final HRegionInfo rootRegionInfo =
|
public static final HRegionInfo rootRegionInfo =
|
||||||
|
@ -62,35 +93,6 @@ public class HRegionInfo implements WritableComparable {
|
||||||
return new Text(tableName);
|
return new Text(tableName);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Converts an encoded region name to its unencoded form
|
|
||||||
*
|
|
||||||
* @param encodedName
|
|
||||||
* @return unencoded region name
|
|
||||||
*/
|
|
||||||
public static Text decodeRegionName(String encodedName) {
|
|
||||||
int offset = encodedName.indexOf(DELIMITER);
|
|
||||||
if (offset == -1) {
|
|
||||||
throw new IllegalArgumentException(
|
|
||||||
"encoded region name does not contain '" + DELIMITER + "': " +
|
|
||||||
encodedName);
|
|
||||||
}
|
|
||||||
String regionName = encodedName.substring(0, offset++);
|
|
||||||
String remainder = encodedName.substring(offset);
|
|
||||||
offset = remainder.indexOf(DELIMITER);
|
|
||||||
if (offset == -1) {
|
|
||||||
throw new IllegalArgumentException(
|
|
||||||
"improperly formatted encoded region name " + encodedName);
|
|
||||||
}
|
|
||||||
Text startKey = new Text();
|
|
||||||
if (offset != 0) {
|
|
||||||
startKey.set(Base64.decode(remainder.substring(0, offset), Base64.ORDERED));
|
|
||||||
}
|
|
||||||
offset += 1;
|
|
||||||
return new Text(regionName + DELIMITER + startKey.toString() + DELIMITER +
|
|
||||||
remainder.substring(offset));
|
|
||||||
}
|
|
||||||
|
|
||||||
private Text endKey;
|
private Text endKey;
|
||||||
private boolean offLine;
|
private boolean offLine;
|
||||||
private long regionId;
|
private long regionId;
|
||||||
|
@ -189,16 +191,6 @@ public class HRegionInfo implements WritableComparable {
|
||||||
return regionName;
|
return regionName;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the encodedName
|
|
||||||
*/
|
|
||||||
public String getEncodedName() {
|
|
||||||
return tableDesc.getName().toString() + DELIMITER +
|
|
||||||
(startKey == null || startKey.getLength() == 0 ? "" :
|
|
||||||
Base64.encodeBytes(startKey.getBytes(), Base64.ORDERED)) + DELIMITER +
|
|
||||||
regionId;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** @return the startKey */
|
/** @return the startKey */
|
||||||
public Text getStartKey(){
|
public Text getStartKey(){
|
||||||
return startKey;
|
return startKey;
|
||||||
|
|
|
@ -24,7 +24,6 @@ import java.io.DataOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.UnsupportedEncodingException;
|
import java.io.UnsupportedEncodingException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
@ -72,6 +71,7 @@ class HStore implements HConstants {
|
||||||
private static final String BLOOMFILTER_FILE_NAME = "filter";
|
private static final String BLOOMFILTER_FILE_NAME = "filter";
|
||||||
|
|
||||||
Path dir;
|
Path dir;
|
||||||
|
Text regionName;
|
||||||
String encodedRegionName;
|
String encodedRegionName;
|
||||||
HColumnDescriptor family;
|
HColumnDescriptor family;
|
||||||
Text familyName;
|
Text familyName;
|
||||||
|
@ -131,19 +131,22 @@ class HStore implements HConstants {
|
||||||
* file will be deleted (by whoever has instantiated the HStore).
|
* file will be deleted (by whoever has instantiated the HStore).
|
||||||
*
|
*
|
||||||
* @param dir log file directory
|
* @param dir log file directory
|
||||||
* @param encodedRegionName filename friendly name of region
|
* @param regionName
|
||||||
|
* @param encodedName
|
||||||
* @param family name of column family
|
* @param family name of column family
|
||||||
* @param fs file system object
|
* @param fs file system object
|
||||||
* @param reconstructionLog existing log file to apply if any
|
* @param reconstructionLog existing log file to apply if any
|
||||||
* @param conf configuration object
|
* @param conf configuration object
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
HStore(Path dir, String encodedRegionName, HColumnDescriptor family,
|
HStore(Path dir, Text regionName, String encodedName,
|
||||||
FileSystem fs, Path reconstructionLog, Configuration conf)
|
HColumnDescriptor family, FileSystem fs, Path reconstructionLog,
|
||||||
|
Configuration conf)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
this.dir = dir;
|
this.dir = dir;
|
||||||
this.compactionDir = new Path(dir, "compaction.dir");
|
this.compactionDir = new Path(dir, "compaction.dir");
|
||||||
this.encodedRegionName = encodedRegionName;
|
this.regionName = regionName;
|
||||||
|
this.encodedRegionName = encodedName;
|
||||||
this.family = family;
|
this.family = family;
|
||||||
this.familyName = HStoreKey.extractFamily(this.family.getName());
|
this.familyName = HStoreKey.extractFamily(this.family.getName());
|
||||||
this.compression = SequenceFile.CompressionType.NONE;
|
this.compression = SequenceFile.CompressionType.NONE;
|
||||||
|
@ -187,7 +190,7 @@ class HStore implements HConstants {
|
||||||
// MapFiles are in a reliable state. Every entry in 'mapdir' must have a
|
// MapFiles are in a reliable state. Every entry in 'mapdir' must have a
|
||||||
// corresponding one in 'loginfodir'. Without a corresponding log info
|
// corresponding one in 'loginfodir'. Without a corresponding log info
|
||||||
// file, the entry in 'mapdir' must be deleted.
|
// file, the entry in 'mapdir' must be deleted.
|
||||||
Collection<HStoreFile> hstoreFiles = HStoreFile.loadHStoreFiles(conf, dir,
|
List<HStoreFile> hstoreFiles = HStoreFile.loadHStoreFiles(conf, dir,
|
||||||
encodedRegionName, familyName, fs);
|
encodedRegionName, familyName, fs);
|
||||||
for(HStoreFile hsf: hstoreFiles) {
|
for(HStoreFile hsf: hstoreFiles) {
|
||||||
this.storefiles.put(Long.valueOf(hsf.loadInfo(fs)), hsf);
|
this.storefiles.put(Long.valueOf(hsf.loadInfo(fs)), hsf);
|
||||||
|
@ -265,7 +268,6 @@ class HStore implements HConstants {
|
||||||
SequenceFile.Reader login =
|
SequenceFile.Reader login =
|
||||||
new SequenceFile.Reader(this.fs, reconstructionLog, this.conf);
|
new SequenceFile.Reader(this.fs, reconstructionLog, this.conf);
|
||||||
try {
|
try {
|
||||||
Text thisRegionName = HRegionInfo.decodeRegionName(encodedRegionName);
|
|
||||||
HLogKey key = new HLogKey();
|
HLogKey key = new HLogKey();
|
||||||
HLogEdit val = new HLogEdit();
|
HLogEdit val = new HLogEdit();
|
||||||
while (login.next(key, val)) {
|
while (login.next(key, val)) {
|
||||||
|
@ -282,13 +284,13 @@ class HStore implements HConstants {
|
||||||
// METACOLUMN info such as HBASE::CACHEFLUSH entries
|
// METACOLUMN info such as HBASE::CACHEFLUSH entries
|
||||||
Text column = val.getColumn();
|
Text column = val.getColumn();
|
||||||
if (column.equals(HLog.METACOLUMN)
|
if (column.equals(HLog.METACOLUMN)
|
||||||
|| !key.getRegionName().equals(thisRegionName)
|
|| !key.getRegionName().equals(regionName)
|
||||||
|| !HStoreKey.extractFamily(column).equals(this.familyName)) {
|
|| !HStoreKey.extractFamily(column).equals(this.familyName)) {
|
||||||
if (LOG.isDebugEnabled()) {
|
if (LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Passing on edit " + key.getRegionName() + ", " +
|
LOG.debug("Passing on edit " + key.getRegionName() + ", " +
|
||||||
column.toString() + ": " +
|
column.toString() + ": " +
|
||||||
new String(val.getVal(), UTF8_ENCODING) +
|
new String(val.getVal(), UTF8_ENCODING) +
|
||||||
", my region: " + thisRegionName + ", my column: " +
|
", my region: " + regionName + ", my column: " +
|
||||||
this.familyName);
|
this.familyName);
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -26,8 +26,9 @@ import java.io.DataOutputStream;
|
||||||
import java.io.FileNotFoundException;
|
import java.io.FileNotFoundException;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.UnsupportedEncodingException;
|
import java.io.UnsupportedEncodingException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
import java.util.Vector;
|
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
@ -133,7 +134,7 @@ public class HStoreFile implements HConstants, WritableComparable {
|
||||||
* Constructor that fully initializes the object
|
* Constructor that fully initializes the object
|
||||||
* @param conf Configuration object
|
* @param conf Configuration object
|
||||||
* @param dir directory path
|
* @param dir directory path
|
||||||
* @param regionName name of the region
|
* @param encodedRegionName name of the region
|
||||||
* @param colFamily name of the column family
|
* @param colFamily name of the column family
|
||||||
* @param fileId file identifier
|
* @param fileId file identifier
|
||||||
*/
|
*/
|
||||||
|
@ -377,15 +378,15 @@ public class HStoreFile implements HConstants, WritableComparable {
|
||||||
* @return List of store file instances loaded from passed dir.
|
* @return List of store file instances loaded from passed dir.
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
static Vector<HStoreFile> loadHStoreFiles(Configuration conf, Path dir,
|
static List<HStoreFile> loadHStoreFiles(Configuration conf, Path dir,
|
||||||
String encodedRegionName, Text colFamily, FileSystem fs)
|
String encodedRegionName, Text colFamily, FileSystem fs)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
// Look first at info files. If a reference, these contain info we need
|
// Look first at info files. If a reference, these contain info we need
|
||||||
// to create the HStoreFile.
|
// to create the HStoreFile.
|
||||||
Path infodir = HStoreFile.getInfoDir(dir, encodedRegionName, colFamily);
|
Path infodir = HStoreFile.getInfoDir(dir, encodedRegionName, colFamily);
|
||||||
Path infofiles[] = fs.listPaths(new Path[] {infodir});
|
Path infofiles[] = fs.listPaths(new Path[] {infodir});
|
||||||
Vector<HStoreFile> results = new Vector<HStoreFile>(infofiles.length);
|
ArrayList<HStoreFile> results = new ArrayList<HStoreFile>(infofiles.length);
|
||||||
Vector<Path> mapfiles = new Vector<Path>(infofiles.length);
|
ArrayList<Path> mapfiles = new ArrayList<Path>(infofiles.length);
|
||||||
for (int i = 0; i < infofiles.length; i++) {
|
for (int i = 0; i < infofiles.length; i++) {
|
||||||
Path p = infofiles[i];
|
Path p = infofiles[i];
|
||||||
Matcher m = REF_NAME_PARSER.matcher(p.getName());
|
Matcher m = REF_NAME_PARSER.matcher(p.getName());
|
||||||
|
@ -534,7 +535,7 @@ public class HStoreFile implements HConstants, WritableComparable {
|
||||||
* @param conf configuration object
|
* @param conf configuration object
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
void mergeStoreFiles(Vector<HStoreFile> srcFiles, FileSystem fs,
|
void mergeStoreFiles(List<HStoreFile> srcFiles, FileSystem fs,
|
||||||
@SuppressWarnings("hiding") Configuration conf)
|
@SuppressWarnings("hiding") Configuration conf)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
// Copy all the source MapFile tuples into this HSF's MapFile
|
// Copy all the source MapFile tuples into this HSF's MapFile
|
||||||
|
|
|
@ -113,7 +113,8 @@ public abstract class HBaseTestCase extends TestCase {
|
||||||
|
|
||||||
protected HRegion createNewHRegion(Path dir, Configuration c,
|
protected HRegion createNewHRegion(Path dir, Configuration c,
|
||||||
HRegionInfo info) throws IOException {
|
HRegionInfo info) throws IOException {
|
||||||
Path regionDir = HRegion.getRegionDir(dir, info.getEncodedName());
|
Path regionDir = HRegion.getRegionDir(dir
|
||||||
|
, HRegionInfo.encodeRegionName(info.getRegionName()));
|
||||||
FileSystem fs = dir.getFileSystem(c);
|
FileSystem fs = dir.getFileSystem(c);
|
||||||
fs.mkdirs(regionDir);
|
fs.mkdirs(regionDir);
|
||||||
return new HRegion(dir,
|
return new HRegion(dir,
|
||||||
|
|
|
@ -51,6 +51,7 @@ public class MultiRegionTable extends HBaseTestCase {
|
||||||
* @param columnName
|
* @param columnName
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
|
@SuppressWarnings("null")
|
||||||
public static void makeMultiRegionTable(Configuration conf,
|
public static void makeMultiRegionTable(Configuration conf,
|
||||||
MiniHBaseCluster cluster, FileSystem localFs, String tableName,
|
MiniHBaseCluster cluster, FileSystem localFs, String tableName,
|
||||||
String columnName)
|
String columnName)
|
||||||
|
@ -130,7 +131,8 @@ public class MultiRegionTable extends HBaseTestCase {
|
||||||
Writables.getHRegionInfoOrNull(data.get(HConstants.COL_SPLITA));
|
Writables.getHRegionInfoOrNull(data.get(HConstants.COL_SPLITA));
|
||||||
HRegionInfo splitB =
|
HRegionInfo splitB =
|
||||||
Writables.getHRegionInfoOrNull(data.get(HConstants.COL_SPLITB));
|
Writables.getHRegionInfoOrNull(data.get(HConstants.COL_SPLITB));
|
||||||
Path parentDir = HRegion.getRegionDir(d, parent.getEncodedName());
|
Path parentDir = HRegion.getRegionDir(d,
|
||||||
|
HRegionInfo.encodeRegionName(parent.getRegionName()));
|
||||||
assertTrue(fs.exists(parentDir));
|
assertTrue(fs.exists(parentDir));
|
||||||
LOG.info("Split happened. Parent is " + parent.getRegionName() +
|
LOG.info("Split happened. Parent is " + parent.getRegionName() +
|
||||||
" and daughters are " + splitA.getRegionName() + ", " +
|
" and daughters are " + splitA.getRegionName() + ", " +
|
||||||
|
|
|
@ -89,7 +89,8 @@ public class TestGet extends HBaseTestCase {
|
||||||
desc.addFamily(new HColumnDescriptor(HConstants.COLUMN_FAMILY.toString()));
|
desc.addFamily(new HColumnDescriptor(HConstants.COLUMN_FAMILY.toString()));
|
||||||
|
|
||||||
HRegionInfo info = new HRegionInfo(desc, null, null);
|
HRegionInfo info = new HRegionInfo(desc, null, null);
|
||||||
Path regionDir = HRegion.getRegionDir(dir, info.getEncodedName());
|
Path regionDir = HRegion.getRegionDir(dir,
|
||||||
|
HRegionInfo.encodeRegionName(info.getRegionName()));
|
||||||
fs.mkdirs(regionDir);
|
fs.mkdirs(regionDir);
|
||||||
|
|
||||||
HLog log = new HLog(fs, new Path(regionDir, "log"), conf);
|
HLog log = new HLog(fs, new Path(regionDir, "log"), conf);
|
||||||
|
|
|
@ -141,7 +141,8 @@ public class TestScanner extends HBaseTestCase {
|
||||||
Path dir = new Path("/hbase");
|
Path dir = new Path("/hbase");
|
||||||
fs.mkdirs(dir);
|
fs.mkdirs(dir);
|
||||||
|
|
||||||
Path regionDir = HRegion.getRegionDir(dir, REGION_INFO.getEncodedName());
|
Path regionDir = HRegion.getRegionDir(dir,
|
||||||
|
HRegionInfo.encodeRegionName(REGION_INFO.getRegionName()));
|
||||||
fs.mkdirs(regionDir);
|
fs.mkdirs(regionDir);
|
||||||
|
|
||||||
HLog log = new HLog(fs, new Path(regionDir, "log"), conf);
|
HLog log = new HLog(fs, new Path(regionDir, "log"), conf);
|
||||||
|
|
Loading…
Reference in New Issue