HBASE-9278 Reading Pre-namespace meta table edits kills the reader

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1518397 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2013-08-28 21:46:28 +00:00
parent 02de8c40d9
commit d196d1b742
3 changed files with 61 additions and 12 deletions

View File

@ -72,8 +72,18 @@ public final class TableName implements Comparable<TableName> {
public static final TableName NAMESPACE_TABLE_NAME = public static final TableName NAMESPACE_TABLE_NAME =
valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "namespace"); valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "namespace");
private static final String OLD_META_STR = ".META."; public static final String OLD_META_STR = ".META.";
private static final String OLD_ROOT_STR = "-ROOT-"; public static final String OLD_ROOT_STR = "-ROOT-";
/**
* TableName for old -ROOT- table. It is used to read/process old WALs which have
* ROOT edits.
*/
public static final TableName OLD_ROOT_TABLE_NAME = getADummyTableName(OLD_ROOT_STR);
/**
* TableName for old .META. table. Used in testing.
*/
public static final TableName OLD_META_TABLE_NAME = getADummyTableName(OLD_META_STR);
private byte[] name; private byte[] name;
private String nameAsString; private String nameAsString;
@ -231,6 +241,18 @@ public final class TableName implements Comparable<TableName> {
return ret; return ret;
} }
/**
* It is used to create table names for old META, and ROOT table.
* @return a dummy TableName instance (with no validation) for the passed qualifier
*/
private static TableName getADummyTableName(String qualifier) {
TableName ret = new TableName();
ret.namespaceAsString = NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR;
ret.qualifierAsString = qualifier;
ret.nameAsString = createFullyQualified(ret.namespaceAsString, ret.qualifierAsString);
ret.name = Bytes.toBytes(qualifier);
return ret;
}
public static TableName valueOf(String namespaceAsString, String qualifierAsString) { public static TableName valueOf(String namespaceAsString, String qualifierAsString) {
TableName ret = new TableName(); TableName ret = new TableName();
if(namespaceAsString == null || namespaceAsString.length() < 1) { if(namespaceAsString == null || namespaceAsString.length() < 1) {

View File

@ -35,6 +35,7 @@ import java.util.UUID;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
@ -115,7 +116,7 @@ public class HLogKey implements WritableComparable<HLogKey> {
// The first element in the list is the cluster id on which the change has originated // The first element in the list is the cluster id on which the change has originated
private List<UUID> clusterIds; private List<UUID> clusterIds;
private NavigableMap<byte[], Integer> scopes; private NavigableMap<byte[], Integer> scopes;
private CompressionContext compressionContext; private CompressionContext compressionContext;
@ -148,7 +149,7 @@ public class HLogKey implements WritableComparable<HLogKey> {
long logSeqNum, final long now, List<UUID> clusterIds){ long logSeqNum, final long now, List<UUID> clusterIds){
init(encodedRegionName, tablename, logSeqNum, now, clusterIds); init(encodedRegionName, tablename, logSeqNum, now, clusterIds);
} }
protected void init(final byte [] encodedRegionName, final TableName tablename, protected void init(final byte [] encodedRegionName, final TableName tablename,
long logSeqNum, final long now, List<UUID> clusterIds) { long logSeqNum, final long now, List<UUID> clusterIds) {
this.logSeqNum = logSeqNum; this.logSeqNum = logSeqNum;
@ -254,9 +255,9 @@ public class HLogKey implements WritableComparable<HLogKey> {
/** /**
* Produces a string map for this key. Useful for programmatic use and * Produces a string map for this key. Useful for programmatic use and
* manipulation of the data stored in an HLogKey, for example, printing * manipulation of the data stored in an HLogKey, for example, printing
* as JSON. * as JSON.
* *
* @return a Map containing data from this key * @return a Map containing data from this key
*/ */
public Map<String, Object> toStringMap() { public Map<String, Object> toStringMap() {
@ -375,6 +376,7 @@ public class HLogKey implements WritableComparable<HLogKey> {
// @see Bytes#readByteArray(DataInput) // @see Bytes#readByteArray(DataInput)
this.scopes = null; // writable HLogKey does not contain scopes this.scopes = null; // writable HLogKey does not contain scopes
int len = WritableUtils.readVInt(in); int len = WritableUtils.readVInt(in);
byte[] tablenameBytes = null;
if (len < 0) { if (len < 0) {
// what we just read was the version // what we just read was the version
version = Version.fromCode(len); version = Version.fromCode(len);
@ -387,12 +389,10 @@ public class HLogKey implements WritableComparable<HLogKey> {
if (compressionContext == null || !version.atLeast(Version.COMPRESSED)) { if (compressionContext == null || !version.atLeast(Version.COMPRESSED)) {
this.encodedRegionName = new byte[len]; this.encodedRegionName = new byte[len];
in.readFully(this.encodedRegionName); in.readFully(this.encodedRegionName);
byte[] tablenameBytes = Bytes.readByteArray(in); tablenameBytes = Bytes.readByteArray(in);
this.tablename = TableName.valueOf(tablenameBytes);
} else { } else {
this.encodedRegionName = Compressor.readCompressed(in, compressionContext.regionDict); this.encodedRegionName = Compressor.readCompressed(in, compressionContext.regionDict);
byte[] tablenameBytes = Compressor.readCompressed(in, compressionContext.tableDict); tablenameBytes = Compressor.readCompressed(in, compressionContext.tableDict);
this.tablename = TableName.valueOf(tablenameBytes);
} }
this.logSeqNum = in.readLong(); this.logSeqNum = in.readLong();
@ -413,6 +413,19 @@ public class HLogKey implements WritableComparable<HLogKey> {
// Means it's a very old key, just continue // Means it's a very old key, just continue
} }
} }
try {
this.tablename = TableName.valueOf(tablenameBytes);
} catch (IllegalArgumentException iae) {
if (Bytes.toString(tablenameBytes).equals(TableName.OLD_META_STR)) {
// It is a pre-namespace meta table edit, continue with new format.
LOG.info("Got an old META edit, continuing with new format ");
this.tablename = TableName.META_TABLE_NAME;
this.encodedRegionName = HRegionInfo.FIRST_META_REGIONINFO.getEncodedNameAsBytes();
} else if (Bytes.toString(tablenameBytes).equals(TableName.OLD_ROOT_STR)) {
this.tablename = TableName.OLD_ROOT_TABLE_NAME;
throw iae;
} else throw iae;
}
// Do not need to read the clusters information as we are using protobufs from 0.95 // Do not need to read the clusters information as we are using protobufs from 0.95
} }

View File

@ -21,16 +21,20 @@ package org.apache.hadoop.hbase.regionserver.wal;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
@InterfaceAudience.Private @InterfaceAudience.Private
public abstract class ReaderBase implements HLog.Reader { public abstract class ReaderBase implements HLog.Reader {
private static final Log LOG = LogFactory.getLog(ReaderBase.class);
protected Configuration conf; protected Configuration conf;
protected FileSystem fs; protected FileSystem fs;
protected Path path; protected Path path;
@ -95,7 +99,18 @@ public abstract class ReaderBase implements HLog.Reader {
e.setCompressionContext(compressionContext); e.setCompressionContext(compressionContext);
} }
boolean hasEntry = readNext(e); boolean hasEntry = false;
try {
hasEntry = readNext(e);
} catch (IllegalArgumentException iae) {
TableName tableName = e.getKey().getTablename();
if (tableName != null && tableName.equals(TableName.OLD_ROOT_TABLE_NAME)) {
// It is old ROOT table edit, ignore it
LOG.info("Got an old ROOT edit, ignoring ");
return next(e);
}
else throw iae;
}
edit++; edit++;
if (compressionContext != null && emptyCompressionContext) { if (compressionContext != null && emptyCompressionContext) {
emptyCompressionContext = false; emptyCompressionContext = false;
@ -103,7 +118,6 @@ public abstract class ReaderBase implements HLog.Reader {
return hasEntry ? e : null; return hasEntry ? e : null;
} }
@Override @Override
public void seek(long pos) throws IOException { public void seek(long pos) throws IOException {
if (compressionContext != null && emptyCompressionContext) { if (compressionContext != null && emptyCompressionContext) {