HBASE-24500 The behavior of RegionInfoBuilder.newBuilder(RegionInfo) is strange (#1840)
Signed-off-by: Guanghao Zhang <zghao@apache.org> Signed-off-by: Michael Stack <stack@apache.org>
This commit is contained in:
parent
60c9ae58fb
commit
ee6938abda
|
@ -18,8 +18,6 @@
|
||||||
package org.apache.hadoop.hbase.client;
|
package org.apache.hadoop.hbase.client;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
import org.apache.commons.lang3.ArrayUtils;
|
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
@ -58,8 +56,6 @@ public class RegionInfoBuilder {
|
||||||
private int replicaId = RegionInfo.DEFAULT_REPLICA_ID;
|
private int replicaId = RegionInfo.DEFAULT_REPLICA_ID;
|
||||||
private boolean offLine = false;
|
private boolean offLine = false;
|
||||||
private boolean split = false;
|
private boolean split = false;
|
||||||
private byte[] regionName = null;
|
|
||||||
private String encodedName = null;
|
|
||||||
|
|
||||||
public static RegionInfoBuilder newBuilder(TableName tableName) {
|
public static RegionInfoBuilder newBuilder(TableName tableName) {
|
||||||
return new RegionInfoBuilder(tableName);
|
return new RegionInfoBuilder(tableName);
|
||||||
|
@ -81,8 +77,6 @@ public class RegionInfoBuilder {
|
||||||
this.split = regionInfo.isSplit();
|
this.split = regionInfo.isSplit();
|
||||||
this.regionId = regionInfo.getRegionId();
|
this.regionId = regionInfo.getRegionId();
|
||||||
this.replicaId = regionInfo.getReplicaId();
|
this.replicaId = regionInfo.getReplicaId();
|
||||||
this.regionName = regionInfo.getRegionName();
|
|
||||||
this.encodedName = regionInfo.getEncodedName();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public RegionInfoBuilder setStartKey(byte[] startKey) {
|
public RegionInfoBuilder setStartKey(byte[] startKey) {
|
||||||
|
@ -115,14 +109,9 @@ public class RegionInfoBuilder {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public RegionInfoBuilder setEncodedName(String encodedName) {
|
|
||||||
this.encodedName = encodedName;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RegionInfo build() {
|
public RegionInfo build() {
|
||||||
return new MutableRegionInfo(tableName, startKey, endKey, split,
|
return new MutableRegionInfo(tableName, startKey, endKey, split,
|
||||||
regionId, replicaId, offLine, regionName, encodedName);
|
regionId, replicaId, offLine);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -210,32 +199,12 @@ public class RegionInfoBuilder {
|
||||||
* first meta regions
|
* first meta regions
|
||||||
*/
|
*/
|
||||||
private MutableRegionInfo(long regionId, TableName tableName, int replicaId) {
|
private MutableRegionInfo(long regionId, TableName tableName, int replicaId) {
|
||||||
this(tableName,
|
this(tableName, HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW, false, regionId,
|
||||||
HConstants.EMPTY_START_ROW,
|
replicaId, false);
|
||||||
HConstants.EMPTY_END_ROW,
|
|
||||||
false,
|
|
||||||
regionId,
|
|
||||||
replicaId,
|
|
||||||
false,
|
|
||||||
RegionInfo.createRegionName(tableName, null, regionId, replicaId, false));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
MutableRegionInfo(final TableName tableName, final byte[] startKey,
|
MutableRegionInfo(final TableName tableName, final byte[] startKey, final byte[] endKey,
|
||||||
final byte[] endKey, final boolean split, final long regionId,
|
final boolean split, final long regionId, final int replicaId, boolean offLine) {
|
||||||
final int replicaId, boolean offLine, byte[] regionName) {
|
|
||||||
this(checkTableName(tableName),
|
|
||||||
checkStartKey(startKey),
|
|
||||||
checkEndKey(endKey),
|
|
||||||
split, regionId,
|
|
||||||
checkReplicaId(replicaId),
|
|
||||||
offLine,
|
|
||||||
regionName,
|
|
||||||
RegionInfo.encodeRegionName(regionName));
|
|
||||||
}
|
|
||||||
|
|
||||||
MutableRegionInfo(final TableName tableName, final byte[] startKey,
|
|
||||||
final byte[] endKey, final boolean split, final long regionId,
|
|
||||||
final int replicaId, boolean offLine, byte[] regionName, String encodedName) {
|
|
||||||
this.tableName = checkTableName(tableName);
|
this.tableName = checkTableName(tableName);
|
||||||
this.startKey = checkStartKey(startKey);
|
this.startKey = checkStartKey(startKey);
|
||||||
this.endKey = checkEndKey(endKey);
|
this.endKey = checkEndKey(endKey);
|
||||||
|
@ -243,24 +212,14 @@ public class RegionInfoBuilder {
|
||||||
this.regionId = regionId;
|
this.regionId = regionId;
|
||||||
this.replicaId = checkReplicaId(replicaId);
|
this.replicaId = checkReplicaId(replicaId);
|
||||||
this.offLine = offLine;
|
this.offLine = offLine;
|
||||||
if (ArrayUtils.isEmpty(regionName)) {
|
this.regionName = RegionInfo.createRegionName(this.tableName, this.startKey, this.regionId,
|
||||||
this.regionName = RegionInfo.createRegionName(this.tableName, this.startKey, this.regionId, this.replicaId,
|
this.replicaId, !this.tableName.equals(TableName.META_TABLE_NAME));
|
||||||
!this.tableName.equals(TableName.META_TABLE_NAME));
|
this.encodedName = RegionInfo.encodeRegionName(this.regionName);
|
||||||
this.encodedName = RegionInfo.encodeRegionName(this.regionName);
|
this.hashCode = generateHashCode(this.tableName, this.startKey, this.endKey, this.regionId,
|
||||||
} else {
|
this.replicaId, this.offLine, this.regionName);
|
||||||
this.regionName = regionName;
|
|
||||||
this.encodedName = encodedName;
|
|
||||||
}
|
|
||||||
this.hashCode = generateHashCode(
|
|
||||||
this.tableName,
|
|
||||||
this.startKey,
|
|
||||||
this.endKey,
|
|
||||||
this.regionId,
|
|
||||||
this.replicaId,
|
|
||||||
this.offLine,
|
|
||||||
this.regionName);
|
|
||||||
this.encodedNameAsBytes = Bytes.toBytes(this.encodedName);
|
this.encodedNameAsBytes = Bytes.toBytes(this.encodedName);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return Return a short, printable name for this region
|
* @return Return a short, printable name for this region
|
||||||
* (usually encoded name) for us logging.
|
* (usually encoded name) for us logging.
|
||||||
|
@ -282,7 +241,7 @@ public class RegionInfoBuilder {
|
||||||
* @see #getRegionNameAsString()
|
* @see #getRegionNameAsString()
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public byte [] getRegionName(){
|
public byte[] getRegionName() {
|
||||||
return regionName;
|
return regionName;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -301,20 +260,19 @@ public class RegionInfoBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public byte [] getEncodedNameAsBytes() {
|
public byte[] getEncodedNameAsBytes() {
|
||||||
return this.encodedNameAsBytes;
|
return this.encodedNameAsBytes;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @return the startKey */
|
/** @return the startKey */
|
||||||
@Override
|
@Override
|
||||||
public byte [] getStartKey(){
|
public byte[] getStartKey() {
|
||||||
return startKey;
|
return startKey;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/** @return the endKey */
|
/** @return the endKey */
|
||||||
@Override
|
@Override
|
||||||
public byte [] getEndKey(){
|
public byte[] getEndKey() {
|
||||||
return endKey;
|
return endKey;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -406,7 +364,9 @@ public class RegionInfoBuilder {
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean isSplitParent() {
|
public boolean isSplitParent() {
|
||||||
if (!isSplit()) return false;
|
if (!isSplit()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
if (!isOffline()) {
|
if (!isOffline()) {
|
||||||
LOG.warn("Region is split but NOT offline: " + getRegionNameAsString());
|
LOG.warn("Region is split but NOT offline: " + getRegionNameAsString());
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,23 +64,7 @@ public class RegionReplicaUtil {
|
||||||
if (regionInfo.getReplicaId() == replicaId) {
|
if (regionInfo.getReplicaId() == replicaId) {
|
||||||
return regionInfo;
|
return regionInfo;
|
||||||
}
|
}
|
||||||
|
return RegionInfoBuilder.newBuilder(regionInfo).setReplicaId(replicaId).build();
|
||||||
if (regionInfo.isMetaRegion()) {
|
|
||||||
return RegionInfoBuilder.newBuilder(regionInfo.getTable())
|
|
||||||
.setRegionId(regionInfo.getRegionId())
|
|
||||||
.setReplicaId(replicaId)
|
|
||||||
.setOffline(regionInfo.isOffline())
|
|
||||||
.build();
|
|
||||||
} else {
|
|
||||||
return RegionInfoBuilder.newBuilder(regionInfo.getTable())
|
|
||||||
.setStartKey(regionInfo.getStartKey())
|
|
||||||
.setEndKey(regionInfo.getEndKey())
|
|
||||||
.setSplit(regionInfo.isSplit())
|
|
||||||
.setRegionId(regionInfo.getRegionId())
|
|
||||||
.setReplicaId(replicaId)
|
|
||||||
.setOffline(regionInfo.isOffline())
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -117,6 +117,8 @@ import org.apache.hadoop.hbase.util.Methods;
|
||||||
import org.apache.hadoop.hbase.util.VersionInfo;
|
import org.apache.hadoop.hbase.util.VersionInfo;
|
||||||
import org.apache.hadoop.ipc.RemoteException;
|
import org.apache.hadoop.ipc.RemoteException;
|
||||||
import org.apache.yetus.audience.InterfaceAudience;
|
import org.apache.yetus.audience.InterfaceAudience;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
|
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
|
||||||
import org.apache.hbase.thirdparty.com.google.common.io.ByteStreams;
|
import org.apache.hbase.thirdparty.com.google.common.io.ByteStreams;
|
||||||
|
@ -211,11 +213,12 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos;
|
||||||
* users; e.g. Coprocessor Endpoints. If you make change in here, be sure to make change in
|
* users; e.g. Coprocessor Endpoints. If you make change in here, be sure to make change in
|
||||||
* the companion class too (not the end of the world, especially if you are adding new functionality
|
* the companion class too (not the end of the world, especially if you are adding new functionality
|
||||||
* but something to be aware of.
|
* but something to be aware of.
|
||||||
* @see ProtobufUtil
|
|
||||||
*/
|
*/
|
||||||
// TODO: Generate the non-shaded protobufutil from this one.
|
|
||||||
@InterfaceAudience.Private // TODO: some clients (Hive, etc) use this class
|
@InterfaceAudience.Private // TODO: some clients (Hive, etc) use this class
|
||||||
public final class ProtobufUtil {
|
public final class ProtobufUtil {
|
||||||
|
|
||||||
|
private static final Logger LOG = LoggerFactory.getLogger(ProtobufUtil.class);
|
||||||
|
|
||||||
private ProtobufUtil() {
|
private ProtobufUtil() {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2345,14 +2348,6 @@ public final class ProtobufUtil {
|
||||||
.setQualifier(UnsafeByteOperations.unsafeWrap(tableName.getQualifier())).build();
|
.setQualifier(UnsafeByteOperations.unsafeWrap(tableName.getQualifier())).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static HBaseProtos.RegionInfo toProtoRegionInfo(
|
|
||||||
org.apache.hadoop.hbase.client.RegionInfo regionInfo) {
|
|
||||||
return HBaseProtos.RegionInfo.newBuilder()
|
|
||||||
.setRegionId(regionInfo.getRegionId())
|
|
||||||
.setRegionEncodedName(regionInfo.getEncodedName())
|
|
||||||
.setTableName(toProtoTableName(regionInfo.getTable())).build();
|
|
||||||
}
|
|
||||||
|
|
||||||
public static List<TableName> toTableNameList(List<HBaseProtos.TableName> tableNamesList) {
|
public static List<TableName> toTableNameList(List<HBaseProtos.TableName> tableNamesList) {
|
||||||
if (tableNamesList == null) {
|
if (tableNamesList == null) {
|
||||||
return new ArrayList<>();
|
return new ArrayList<>();
|
||||||
|
@ -3302,7 +3297,9 @@ public final class ProtobufUtil {
|
||||||
* @return the converted Proto RegionInfo
|
* @return the converted Proto RegionInfo
|
||||||
*/
|
*/
|
||||||
public static HBaseProtos.RegionInfo toRegionInfo(final org.apache.hadoop.hbase.client.RegionInfo info) {
|
public static HBaseProtos.RegionInfo toRegionInfo(final org.apache.hadoop.hbase.client.RegionInfo info) {
|
||||||
if (info == null) return null;
|
if (info == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
HBaseProtos.RegionInfo.Builder builder = HBaseProtos.RegionInfo.newBuilder();
|
HBaseProtos.RegionInfo.Builder builder = HBaseProtos.RegionInfo.newBuilder();
|
||||||
builder.setTableName(ProtobufUtil.toProtoTableName(info.getTable()));
|
builder.setTableName(ProtobufUtil.toProtoTableName(info.getTable()));
|
||||||
builder.setRegionId(info.getRegionId());
|
builder.setRegionId(info.getRegionId());
|
||||||
|
@ -3326,7 +3323,9 @@ public final class ProtobufUtil {
|
||||||
* @return the converted RegionInfo
|
* @return the converted RegionInfo
|
||||||
*/
|
*/
|
||||||
public static org.apache.hadoop.hbase.client.RegionInfo toRegionInfo(final HBaseProtos.RegionInfo proto) {
|
public static org.apache.hadoop.hbase.client.RegionInfo toRegionInfo(final HBaseProtos.RegionInfo proto) {
|
||||||
if (proto == null) return null;
|
if (proto == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
TableName tableName = ProtobufUtil.toTableName(proto.getTableName());
|
TableName tableName = ProtobufUtil.toTableName(proto.getTableName());
|
||||||
long regionId = proto.getRegionId();
|
long regionId = proto.getRegionId();
|
||||||
int defaultReplicaId = org.apache.hadoop.hbase.client.RegionInfo.DEFAULT_REPLICA_ID;
|
int defaultReplicaId = org.apache.hadoop.hbase.client.RegionInfo.DEFAULT_REPLICA_ID;
|
||||||
|
@ -3355,8 +3354,10 @@ public final class ProtobufUtil {
|
||||||
if (proto.hasOffline()) {
|
if (proto.hasOffline()) {
|
||||||
rib.setOffline(proto.getOffline());
|
rib.setOffline(proto.getOffline());
|
||||||
}
|
}
|
||||||
if (proto.hasRegionEncodedName()) {
|
org.apache.hadoop.hbase.client.RegionInfo ri = rib.build();
|
||||||
rib.setEncodedName(proto.getRegionEncodedName());
|
if (proto.hasRegionEncodedName() && !proto.getRegionEncodedName().equals(ri.getEncodedName())) {
|
||||||
|
LOG.warn("The converted region info is {}, but the encoded name in proto is {}", ri,
|
||||||
|
proto.getRegionEncodedName());
|
||||||
}
|
}
|
||||||
return rib.build();
|
return rib.build();
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,51 +15,43 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.regionserver;
|
package org.apache.hadoop.hbase.client;
|
||||||
|
|
||||||
import static org.junit.Assert.assertArrayEquals;
|
import static org.junit.Assert.assertArrayEquals;
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.assertFalse;
|
import static org.junit.Assert.assertFalse;
|
||||||
|
import static org.junit.Assert.assertNotEquals;
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.assertTrue;
|
||||||
import static org.junit.Assert.fail;
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
|
||||||
import org.apache.hadoop.fs.Path;
|
|
||||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
import org.apache.hadoop.hbase.TableNameTestRule;
|
||||||
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
|
|
||||||
import org.apache.hadoop.hbase.client.TableDescriptor;
|
|
||||||
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
|
||||||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||||
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.FSTableDescriptors;
|
|
||||||
import org.apache.hadoop.hbase.util.MD5Hash;
|
import org.apache.hadoop.hbase.util.MD5Hash;
|
||||||
import org.junit.ClassRule;
|
import org.junit.ClassRule;
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.experimental.categories.Category;
|
import org.junit.experimental.categories.Category;
|
||||||
import org.junit.rules.TestName;
|
|
||||||
|
|
||||||
import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
|
import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
||||||
|
|
||||||
@Category({RegionServerTests.class, SmallTests.class})
|
@Category({ ClientTests.class, SmallTests.class })
|
||||||
public class TestRegionInfoBuilder {
|
public class TestRegionInfoBuilder {
|
||||||
|
|
||||||
@ClassRule
|
@ClassRule
|
||||||
public static final HBaseClassTestRule CLASS_RULE =
|
public static final HBaseClassTestRule CLASS_RULE =
|
||||||
HBaseClassTestRule.forClass(TestRegionInfoBuilder.class);
|
HBaseClassTestRule.forClass(TestRegionInfoBuilder.class);
|
||||||
|
|
||||||
@Rule
|
@Rule
|
||||||
public TestName name = new TestName();
|
public TableNameTestRule name = new TableNameTestRule();
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testBuilder() {
|
public void testBuilder() {
|
||||||
|
@ -91,74 +83,36 @@ public class TestRegionInfoBuilder {
|
||||||
@Test
|
@Test
|
||||||
public void testPb() throws DeserializationException {
|
public void testPb() throws DeserializationException {
|
||||||
RegionInfo ri = RegionInfoBuilder.FIRST_META_REGIONINFO;
|
RegionInfo ri = RegionInfoBuilder.FIRST_META_REGIONINFO;
|
||||||
byte [] bytes = RegionInfo.toByteArray(ri);
|
byte[] bytes = RegionInfo.toByteArray(ri);
|
||||||
RegionInfo pbri = RegionInfo.parseFrom(bytes);
|
RegionInfo pbri = RegionInfo.parseFrom(bytes);
|
||||||
assertTrue(RegionInfo.COMPARATOR.compare(ri, pbri) == 0);
|
assertTrue(RegionInfo.COMPARATOR.compare(ri, pbri) == 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testReadAndWriteRegionInfoFile() throws IOException, InterruptedException {
|
|
||||||
HBaseTestingUtility htu = new HBaseTestingUtility();
|
|
||||||
RegionInfo ri = RegionInfoBuilder.FIRST_META_REGIONINFO;
|
|
||||||
Path basedir = htu.getDataTestDir();
|
|
||||||
// Create a region. That'll write the .regioninfo file.
|
|
||||||
FSTableDescriptors fsTableDescriptors = new FSTableDescriptors(htu.getConfiguration());
|
|
||||||
FSTableDescriptors.tryUpdateMetaTableDescriptor(htu.getConfiguration());
|
|
||||||
HRegion r = HBaseTestingUtility.createRegionAndWAL(convert(ri), basedir, htu.getConfiguration(),
|
|
||||||
fsTableDescriptors.get(TableName.META_TABLE_NAME));
|
|
||||||
// Get modtime on the file.
|
|
||||||
long modtime = getModTime(r);
|
|
||||||
HBaseTestingUtility.closeRegionAndWAL(r);
|
|
||||||
Thread.sleep(1001);
|
|
||||||
r = HRegion.openHRegion(basedir, convert(ri), fsTableDescriptors.get(TableName.META_TABLE_NAME),
|
|
||||||
null, htu.getConfiguration());
|
|
||||||
// Ensure the file is not written for a second time.
|
|
||||||
long modtime2 = getModTime(r);
|
|
||||||
assertEquals(modtime, modtime2);
|
|
||||||
// Now load the file.
|
|
||||||
RegionInfo deserializedRi = HRegionFileSystem.loadRegionInfoFileContent(
|
|
||||||
r.getRegionFileSystem().getFileSystem(), r.getRegionFileSystem().getRegionDir());
|
|
||||||
HBaseTestingUtility.closeRegionAndWAL(r);
|
|
||||||
}
|
|
||||||
|
|
||||||
long getModTime(final HRegion r) throws IOException {
|
|
||||||
FileStatus[] statuses = r.getRegionFileSystem().getFileSystem().listStatus(
|
|
||||||
new Path(r.getRegionFileSystem().getRegionDir(), HRegionFileSystem.REGION_INFO_FILE));
|
|
||||||
assertTrue(statuses != null && statuses.length == 1);
|
|
||||||
return statuses[0].getModificationTime();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCreateRegionInfoName() throws Exception {
|
public void testCreateRegionInfoName() throws Exception {
|
||||||
final String tableName = name.getMethodName();
|
final TableName tn = name.getTableName();
|
||||||
final TableName tn = TableName.valueOf(tableName);
|
|
||||||
String startKey = "startkey";
|
String startKey = "startkey";
|
||||||
final byte[] sk = Bytes.toBytes(startKey);
|
final byte[] sk = Bytes.toBytes(startKey);
|
||||||
String id = "id";
|
String id = "id";
|
||||||
|
|
||||||
// old format region name
|
// old format region name
|
||||||
byte [] name = RegionInfo.createRegionName(tn, sk, id, false);
|
byte[] name = RegionInfo.createRegionName(tn, sk, id, false);
|
||||||
String nameStr = Bytes.toString(name);
|
String nameStr = Bytes.toString(name);
|
||||||
assertEquals(tableName + "," + startKey + "," + id, nameStr);
|
assertEquals(tn + "," + startKey + "," + id, nameStr);
|
||||||
|
|
||||||
|
|
||||||
// new format region name.
|
// new format region name.
|
||||||
String md5HashInHex = MD5Hash.getMD5AsHex(name);
|
String md5HashInHex = MD5Hash.getMD5AsHex(name);
|
||||||
assertEquals(RegionInfo.MD5_HEX_LENGTH, md5HashInHex.length());
|
assertEquals(RegionInfo.MD5_HEX_LENGTH, md5HashInHex.length());
|
||||||
name = RegionInfo.createRegionName(tn, sk, id, true);
|
name = RegionInfo.createRegionName(tn, sk, id, true);
|
||||||
nameStr = Bytes.toString(name);
|
nameStr = Bytes.toString(name);
|
||||||
assertEquals(tableName + "," + startKey + ","
|
assertEquals(tn + "," + startKey + "," + id + "." + md5HashInHex + ".", nameStr);
|
||||||
+ id + "." + md5HashInHex + ".",
|
|
||||||
nameStr);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testContainsRange() {
|
public void testContainsRange() {
|
||||||
TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(
|
TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(name.getTableName()).build();
|
||||||
TableName.valueOf(name.getMethodName())).build();
|
|
||||||
RegionInfo ri = RegionInfoBuilder.newBuilder(tableDesc.getTableName())
|
RegionInfo ri = RegionInfoBuilder.newBuilder(tableDesc.getTableName())
|
||||||
.setStartKey(Bytes.toBytes("a"))
|
.setStartKey(Bytes.toBytes("a")).setEndKey(Bytes.toBytes("g")).build();
|
||||||
.setEndKey(Bytes.toBytes("g")).build();
|
|
||||||
// Single row range at start of region
|
// Single row range at start of region
|
||||||
assertTrue(ri.containsRange(Bytes.toBytes("a"), Bytes.toBytes("a")));
|
assertTrue(ri.containsRange(Bytes.toBytes("a"), Bytes.toBytes("a")));
|
||||||
// Fully contained range
|
// Fully contained range
|
||||||
|
@ -184,14 +138,11 @@ public class TestRegionInfoBuilder {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testLastRegionCompare() {
|
public void testLastRegionCompare() {
|
||||||
TableDescriptor tableDesc = TableDescriptorBuilder
|
TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(name.getTableName()).build();
|
||||||
.newBuilder(TableName.valueOf(name.getMethodName())).build();
|
|
||||||
RegionInfo rip = RegionInfoBuilder.newBuilder(tableDesc.getTableName())
|
RegionInfo rip = RegionInfoBuilder.newBuilder(tableDesc.getTableName())
|
||||||
.setStartKey(Bytes.toBytes("a"))
|
.setStartKey(Bytes.toBytes("a")).setEndKey(new byte[0]).build();
|
||||||
.setEndKey(new byte[0]).build();
|
|
||||||
RegionInfo ric = RegionInfoBuilder.newBuilder(tableDesc.getTableName())
|
RegionInfo ric = RegionInfoBuilder.newBuilder(tableDesc.getTableName())
|
||||||
.setStartKey(Bytes.toBytes("a"))
|
.setStartKey(Bytes.toBytes("a")).setEndKey(Bytes.toBytes("b")).build();
|
||||||
.setEndKey(Bytes.toBytes("b")).build();
|
|
||||||
assertTrue(RegionInfo.COMPARATOR.compare(rip, ric) > 0);
|
assertTrue(RegionInfo.COMPARATOR.compare(rip, ric) > 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -202,18 +153,12 @@ public class TestRegionInfoBuilder {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testComparator() {
|
public void testComparator() {
|
||||||
final TableName tableName = TableName.valueOf(name.getMethodName());
|
final TableName tableName = name.getTableName();
|
||||||
byte[] empty = new byte[0];
|
byte[] empty = new byte[0];
|
||||||
RegionInfo older = RegionInfoBuilder.newBuilder(tableName)
|
RegionInfo older = RegionInfoBuilder.newBuilder(tableName).setStartKey(empty).setEndKey(empty)
|
||||||
.setStartKey(empty)
|
.setSplit(false).setRegionId(0L).build();
|
||||||
.setEndKey(empty)
|
RegionInfo newer = RegionInfoBuilder.newBuilder(tableName).setStartKey(empty).setEndKey(empty)
|
||||||
.setSplit(false)
|
.setSplit(false).setRegionId(1L).build();
|
||||||
.setRegionId(0L).build();
|
|
||||||
RegionInfo newer = RegionInfoBuilder.newBuilder(tableName)
|
|
||||||
.setStartKey(empty)
|
|
||||||
.setEndKey(empty)
|
|
||||||
.setSplit(false)
|
|
||||||
.setRegionId(1L).build();
|
|
||||||
assertTrue(RegionInfo.COMPARATOR.compare(older, newer) < 0);
|
assertTrue(RegionInfo.COMPARATOR.compare(older, newer) < 0);
|
||||||
assertTrue(RegionInfo.COMPARATOR.compare(newer, older) > 0);
|
assertTrue(RegionInfo.COMPARATOR.compare(newer, older) > 0);
|
||||||
assertTrue(RegionInfo.COMPARATOR.compare(older, older) == 0);
|
assertTrue(RegionInfo.COMPARATOR.compare(older, older) == 0);
|
||||||
|
@ -222,8 +167,7 @@ public class TestRegionInfoBuilder {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testRegionNameForRegionReplicas() throws Exception {
|
public void testRegionNameForRegionReplicas() throws Exception {
|
||||||
String tableName = name.getMethodName();
|
final TableName tn = name.getTableName();
|
||||||
final TableName tn = TableName.valueOf(tableName);
|
|
||||||
String startKey = "startkey";
|
String startKey = "startkey";
|
||||||
final byte[] sk = Bytes.toBytes(startKey);
|
final byte[] sk = Bytes.toBytes(startKey);
|
||||||
String id = "id";
|
String id = "id";
|
||||||
|
@ -231,26 +175,28 @@ public class TestRegionInfoBuilder {
|
||||||
// assert with only the region name without encoding
|
// assert with only the region name without encoding
|
||||||
|
|
||||||
// primary, replicaId = 0
|
// primary, replicaId = 0
|
||||||
byte [] name = RegionInfo.createRegionName(tn, sk, Bytes.toBytes(id), 0, false);
|
byte[] name = RegionInfo.createRegionName(tn, sk, Bytes.toBytes(id), 0, false);
|
||||||
String nameStr = Bytes.toString(name);
|
String nameStr = Bytes.toString(name);
|
||||||
assertEquals(tableName + "," + startKey + "," + id, nameStr);
|
assertEquals(tn + "," + startKey + "," + id, nameStr);
|
||||||
|
|
||||||
// replicaId = 1
|
// replicaId = 1
|
||||||
name = RegionInfo.createRegionName(tn, sk, Bytes.toBytes(id), 1, false);
|
name = RegionInfo.createRegionName(tn, sk, Bytes.toBytes(id), 1, false);
|
||||||
nameStr = Bytes.toString(name);
|
nameStr = Bytes.toString(name);
|
||||||
assertEquals(tableName + "," + startKey + "," + id + "_" +
|
assertEquals(
|
||||||
String.format(RegionInfo.REPLICA_ID_FORMAT, 1), nameStr);
|
tn + "," + startKey + "," + id + "_" + String.format(RegionInfo.REPLICA_ID_FORMAT, 1),
|
||||||
|
nameStr);
|
||||||
|
|
||||||
// replicaId = max
|
// replicaId = max
|
||||||
name = RegionInfo.createRegionName(tn, sk, Bytes.toBytes(id), 0xFFFF, false);
|
name = RegionInfo.createRegionName(tn, sk, Bytes.toBytes(id), 0xFFFF, false);
|
||||||
nameStr = Bytes.toString(name);
|
nameStr = Bytes.toString(name);
|
||||||
assertEquals(tableName + "," + startKey + "," + id + "_" +
|
assertEquals(
|
||||||
String.format(RegionInfo.REPLICA_ID_FORMAT, 0xFFFF), nameStr);
|
tn + "," + startKey + "," + id + "_" + String.format(RegionInfo.REPLICA_ID_FORMAT, 0xFFFF),
|
||||||
|
nameStr);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testParseName() throws IOException {
|
public void testParseName() throws IOException {
|
||||||
final TableName tableName = TableName.valueOf(name.getMethodName());
|
final TableName tableName = name.getTableName();
|
||||||
byte[] startKey = Bytes.toBytes("startKey");
|
byte[] startKey = Bytes.toBytes("startKey");
|
||||||
long regionId = System.currentTimeMillis();
|
long regionId = System.currentTimeMillis();
|
||||||
int replicaId = 42;
|
int replicaId = 42;
|
||||||
|
@ -259,39 +205,34 @@ public class TestRegionInfoBuilder {
|
||||||
byte[] regionName = RegionInfo.createRegionName(tableName, startKey, regionId, false);
|
byte[] regionName = RegionInfo.createRegionName(tableName, startKey, regionId, false);
|
||||||
|
|
||||||
byte[][] fields = RegionInfo.parseRegionName(regionName);
|
byte[][] fields = RegionInfo.parseRegionName(regionName);
|
||||||
assertArrayEquals(Bytes.toString(fields[0]),tableName.getName(), fields[0]);
|
assertArrayEquals(Bytes.toString(fields[0]), tableName.getName(), fields[0]);
|
||||||
assertArrayEquals(Bytes.toString(fields[1]),startKey, fields[1]);
|
assertArrayEquals(Bytes.toString(fields[1]), startKey, fields[1]);
|
||||||
assertArrayEquals(Bytes.toString(fields[2]), Bytes.toBytes(Long.toString(regionId)),fields[2]);
|
assertArrayEquals(Bytes.toString(fields[2]), Bytes.toBytes(Long.toString(regionId)), fields[2]);
|
||||||
assertEquals(3, fields.length);
|
assertEquals(3, fields.length);
|
||||||
|
|
||||||
// test with replicaId
|
// test with replicaId
|
||||||
regionName = RegionInfo.createRegionName(tableName, startKey, regionId,
|
regionName = RegionInfo.createRegionName(tableName, startKey, regionId, replicaId, false);
|
||||||
replicaId, false);
|
|
||||||
|
|
||||||
fields = RegionInfo.parseRegionName(regionName);
|
fields = RegionInfo.parseRegionName(regionName);
|
||||||
assertArrayEquals(Bytes.toString(fields[0]),tableName.getName(), fields[0]);
|
assertArrayEquals(Bytes.toString(fields[0]), tableName.getName(), fields[0]);
|
||||||
assertArrayEquals(Bytes.toString(fields[1]),startKey, fields[1]);
|
assertArrayEquals(Bytes.toString(fields[1]), startKey, fields[1]);
|
||||||
assertArrayEquals(Bytes.toString(fields[2]), Bytes.toBytes(Long.toString(regionId)),fields[2]);
|
assertArrayEquals(Bytes.toString(fields[2]), Bytes.toBytes(Long.toString(regionId)), fields[2]);
|
||||||
assertArrayEquals(Bytes.toString(fields[3]), Bytes.toBytes(
|
assertArrayEquals(Bytes.toString(fields[3]),
|
||||||
String.format(RegionInfo.REPLICA_ID_FORMAT, replicaId)), fields[3]);
|
Bytes.toBytes(String.format(RegionInfo.REPLICA_ID_FORMAT, replicaId)), fields[3]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testConvert() {
|
public void testConvert() {
|
||||||
final TableName tableName = TableName.valueOf("ns1:" + name.getMethodName());
|
final TableName tableName =
|
||||||
|
TableName.valueOf("ns1:" + name.getTableName().getQualifierAsString());
|
||||||
byte[] startKey = Bytes.toBytes("startKey");
|
byte[] startKey = Bytes.toBytes("startKey");
|
||||||
byte[] endKey = Bytes.toBytes("endKey");
|
byte[] endKey = Bytes.toBytes("endKey");
|
||||||
boolean split = false;
|
boolean split = false;
|
||||||
long regionId = System.currentTimeMillis();
|
long regionId = System.currentTimeMillis();
|
||||||
int replicaId = 42;
|
int replicaId = 42;
|
||||||
|
|
||||||
|
RegionInfo ri = RegionInfoBuilder.newBuilder(tableName).setStartKey(startKey).setEndKey(endKey)
|
||||||
RegionInfo ri = RegionInfoBuilder.newBuilder(tableName)
|
.setSplit(split).setRegionId(regionId).setReplicaId(replicaId).build();
|
||||||
.setStartKey(startKey)
|
|
||||||
.setEndKey(endKey)
|
|
||||||
.setSplit(split)
|
|
||||||
.setRegionId(regionId)
|
|
||||||
.setReplicaId(replicaId).build();
|
|
||||||
|
|
||||||
// convert two times, compare
|
// convert two times, compare
|
||||||
RegionInfo convertedRi = ProtobufUtil.toRegionInfo(ProtobufUtil.toRegionInfo(ri));
|
RegionInfo convertedRi = ProtobufUtil.toRegionInfo(ProtobufUtil.toRegionInfo(ri));
|
||||||
|
@ -302,30 +243,44 @@ public class TestRegionInfoBuilder {
|
||||||
HBaseProtos.RegionInfo info = HBaseProtos.RegionInfo.newBuilder()
|
HBaseProtos.RegionInfo info = HBaseProtos.RegionInfo.newBuilder()
|
||||||
.setTableName(HBaseProtos.TableName.newBuilder()
|
.setTableName(HBaseProtos.TableName.newBuilder()
|
||||||
.setQualifier(UnsafeByteOperations.unsafeWrap(tableName.getQualifier()))
|
.setQualifier(UnsafeByteOperations.unsafeWrap(tableName.getQualifier()))
|
||||||
.setNamespace(UnsafeByteOperations.unsafeWrap(tableName.getNamespace()))
|
.setNamespace(UnsafeByteOperations.unsafeWrap(tableName.getNamespace())).build())
|
||||||
.build())
|
|
||||||
.setStartKey(UnsafeByteOperations.unsafeWrap(startKey))
|
.setStartKey(UnsafeByteOperations.unsafeWrap(startKey))
|
||||||
.setEndKey(UnsafeByteOperations.unsafeWrap(endKey))
|
.setEndKey(UnsafeByteOperations.unsafeWrap(endKey)).setSplit(split).setRegionId(regionId)
|
||||||
.setSplit(split)
|
|
||||||
.setRegionId(regionId)
|
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
convertedRi = ProtobufUtil.toRegionInfo(info);
|
convertedRi = ProtobufUtil.toRegionInfo(info);
|
||||||
RegionInfo expectedRi = RegionInfoBuilder.newBuilder(tableName)
|
RegionInfo expectedRi = RegionInfoBuilder.newBuilder(tableName).setStartKey(startKey)
|
||||||
.setStartKey(startKey)
|
.setEndKey(endKey).setSplit(split).setRegionId(regionId).setReplicaId(0).build();
|
||||||
.setEndKey(endKey)
|
|
||||||
.setSplit(split)
|
|
||||||
.setRegionId(regionId)
|
|
||||||
.setReplicaId(0).build();
|
|
||||||
|
|
||||||
assertEquals(expectedRi, convertedRi);
|
assertEquals(expectedRi, convertedRi);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Duplicated method in TestRegionInfoDisplay too.
|
private void assertRegionNameNotEquals(RegionInfo expected, RegionInfo actual) {
|
||||||
private HRegionInfo convert(RegionInfo ri) {
|
assertNotEquals(expected.getRegionNameAsString(), actual.getRegionNameAsString());
|
||||||
HRegionInfo hri = new HRegionInfo(
|
assertNotEquals(expected.getEncodedName(), actual.getEncodedName());
|
||||||
ri.getTable(), ri.getStartKey(), ri.getEndKey(), ri.isSplit(), ri.getRegionId());
|
}
|
||||||
hri.setOffline(ri.isOffline());
|
|
||||||
return hri;
|
private void assertRegionNameEquals(RegionInfo expected, RegionInfo actual) {
|
||||||
|
assertEquals(expected.getRegionNameAsString(), actual.getRegionNameAsString());
|
||||||
|
assertEquals(expected.getEncodedName(), actual.getEncodedName());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testNewBuilderWithRegionInfo() {
|
||||||
|
RegionInfo ri = RegionInfoBuilder.newBuilder(name.getTableName()).build();
|
||||||
|
assertEquals(ri, RegionInfoBuilder.newBuilder(ri).build());
|
||||||
|
|
||||||
|
// make sure that the region name and encoded name are changed, see HBASE-24500 for more
|
||||||
|
// details.
|
||||||
|
assertRegionNameNotEquals(ri,
|
||||||
|
RegionInfoBuilder.newBuilder(ri).setStartKey(new byte[1]).build());
|
||||||
|
assertRegionNameNotEquals(ri,
|
||||||
|
RegionInfoBuilder.newBuilder(ri).setRegionId(ri.getRegionId() + 1).build());
|
||||||
|
assertRegionNameNotEquals(ri, RegionInfoBuilder.newBuilder(ri).setReplicaId(1).build());
|
||||||
|
|
||||||
|
// these fields are not in region name
|
||||||
|
assertRegionNameEquals(ri, RegionInfoBuilder.newBuilder(ri).setEndKey(new byte[1]).build());
|
||||||
|
assertRegionNameEquals(ri, RegionInfoBuilder.newBuilder(ri).setSplit(true).build());
|
||||||
|
assertRegionNameEquals(ri, RegionInfoBuilder.newBuilder(ri).setOffline(true).build());
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -19,6 +19,8 @@ package org.apache.hadoop.hbase.shaded.protobuf;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.verify;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
|
@ -29,17 +31,26 @@ import org.apache.hadoop.hbase.CellComparatorImpl;
|
||||||
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
|
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
|
||||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||||
import org.apache.hadoop.hbase.KeyValue;
|
import org.apache.hadoop.hbase.KeyValue;
|
||||||
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.client.Append;
|
import org.apache.hadoop.hbase.client.Append;
|
||||||
import org.apache.hadoop.hbase.client.Delete;
|
import org.apache.hadoop.hbase.client.Delete;
|
||||||
import org.apache.hadoop.hbase.client.Get;
|
import org.apache.hadoop.hbase.client.Get;
|
||||||
import org.apache.hadoop.hbase.client.Increment;
|
import org.apache.hadoop.hbase.client.Increment;
|
||||||
import org.apache.hadoop.hbase.client.Put;
|
import org.apache.hadoop.hbase.client.Put;
|
||||||
|
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||||
|
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
|
||||||
import org.apache.hadoop.hbase.io.TimeRange;
|
import org.apache.hadoop.hbase.io.TimeRange;
|
||||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
import org.apache.hadoop.hbase.util.MD5Hash;
|
||||||
|
import org.apache.log4j.Appender;
|
||||||
|
import org.apache.log4j.Level;
|
||||||
|
import org.apache.log4j.LogManager;
|
||||||
|
import org.apache.log4j.spi.LoggingEvent;
|
||||||
import org.junit.ClassRule;
|
import org.junit.ClassRule;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.experimental.categories.Category;
|
import org.junit.experimental.categories.Category;
|
||||||
|
import org.mockito.ArgumentCaptor;
|
||||||
|
|
||||||
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
|
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
|
||||||
import org.apache.hbase.thirdparty.com.google.protobuf.Any;
|
import org.apache.hbase.thirdparty.com.google.protobuf.Any;
|
||||||
|
@ -54,6 +65,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationPr
|
||||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue;
|
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue;
|
||||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType;
|
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType;
|
||||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;
|
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;
|
||||||
|
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
||||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair;
|
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair;
|
||||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos;
|
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos;
|
||||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
|
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
|
||||||
|
@ -479,4 +491,29 @@ public class TestProtobufUtil {
|
||||||
+ "\"sharedLockCount\":0"
|
+ "\"sharedLockCount\":0"
|
||||||
+ "}]", lockJson);
|
+ "}]", lockJson);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRegionInfo() {
|
||||||
|
TableName tableName = TableName.valueOf("testRegionInfo");
|
||||||
|
RegionInfo expected = RegionInfoBuilder.newBuilder(tableName).build();
|
||||||
|
String md5 = MD5Hash.getMD5AsHex(new byte[0]);
|
||||||
|
HBaseProtos.RegionInfo proto =
|
||||||
|
ProtobufUtil.toRegionInfo(expected).toBuilder().setRegionEncodedName(md5).build();
|
||||||
|
|
||||||
|
Appender appender = mock(Appender.class);
|
||||||
|
LogManager.getRootLogger().addAppender(appender);
|
||||||
|
try {
|
||||||
|
RegionInfo actual = ProtobufUtil.toRegionInfo(proto);
|
||||||
|
assertEquals(expected, actual);
|
||||||
|
ArgumentCaptor<LoggingEvent> captor = ArgumentCaptor.forClass(LoggingEvent.class);
|
||||||
|
verify(appender).doAppend(captor.capture());
|
||||||
|
LoggingEvent event = captor.getValue();
|
||||||
|
assertEquals(Level.WARN, event.getLevel());
|
||||||
|
assertEquals(
|
||||||
|
"The converted region info is " + actual + ", but the encoded name in proto is " + md5,
|
||||||
|
event.getRenderedMessage());
|
||||||
|
} finally {
|
||||||
|
LogManager.getRootLogger().removeAppender(appender);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,98 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||||
|
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
|
||||||
|
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||||
|
import org.apache.hadoop.hbase.TableName;
|
||||||
|
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||||
|
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
|
||||||
|
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
||||||
|
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||||
|
import org.apache.hadoop.hbase.util.FSTableDescriptors;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
|
import org.junit.ClassRule;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.experimental.categories.Category;
|
||||||
|
|
||||||
|
@Category({ RegionServerTests.class, SmallTests.class })
|
||||||
|
public class TestReadAndWriteRegionInfoFile {
|
||||||
|
|
||||||
|
@ClassRule
|
||||||
|
public static final HBaseClassTestRule CLASS_RULE =
|
||||||
|
HBaseClassTestRule.forClass(TestReadAndWriteRegionInfoFile.class);
|
||||||
|
|
||||||
|
private static final HBaseCommonTestingUtility UTIL = new HBaseTestingUtility();
|
||||||
|
|
||||||
|
private static final Configuration CONF = UTIL.getConfiguration();
|
||||||
|
|
||||||
|
private static FileSystem FS;
|
||||||
|
|
||||||
|
private static Path ROOT_DIR;
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void setUp() throws IOException {
|
||||||
|
ROOT_DIR = UTIL.getDataTestDir();
|
||||||
|
FS = ROOT_DIR.getFileSystem(CONF);
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void tearDown() {
|
||||||
|
UTIL.cleanupTestDir();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testReadAndWriteRegionInfoFile() throws IOException, InterruptedException {
|
||||||
|
RegionInfo ri = RegionInfoBuilder.FIRST_META_REGIONINFO;
|
||||||
|
// Create a region. That'll write the .regioninfo file.
|
||||||
|
FSTableDescriptors fsTableDescriptors = new FSTableDescriptors(FS, ROOT_DIR);
|
||||||
|
FSTableDescriptors.tryUpdateMetaTableDescriptor(CONF, FS, ROOT_DIR, null);
|
||||||
|
HRegion r = HBaseTestingUtility.createRegionAndWAL(ri, ROOT_DIR, CONF,
|
||||||
|
fsTableDescriptors.get(TableName.META_TABLE_NAME));
|
||||||
|
// Get modtime on the file.
|
||||||
|
long modtime = getModTime(r);
|
||||||
|
HBaseTestingUtility.closeRegionAndWAL(r);
|
||||||
|
Thread.sleep(1001);
|
||||||
|
r = HRegion.openHRegion(ROOT_DIR, ri, fsTableDescriptors.get(TableName.META_TABLE_NAME), null,
|
||||||
|
CONF);
|
||||||
|
// Ensure the file is not written for a second time.
|
||||||
|
long modtime2 = getModTime(r);
|
||||||
|
assertEquals(modtime, modtime2);
|
||||||
|
// Now load the file.
|
||||||
|
HRegionFileSystem.loadRegionInfoFileContent(r.getRegionFileSystem().getFileSystem(),
|
||||||
|
r.getRegionFileSystem().getRegionDir());
|
||||||
|
HBaseTestingUtility.closeRegionAndWAL(r);
|
||||||
|
}
|
||||||
|
|
||||||
|
private long getModTime(final HRegion r) throws IOException {
|
||||||
|
FileStatus[] statuses = r.getRegionFileSystem().getFileSystem().listStatus(
|
||||||
|
new Path(r.getRegionFileSystem().getRegionDir(), HRegionFileSystem.REGION_INFO_FILE));
|
||||||
|
assertTrue(statuses != null && statuses.length == 1);
|
||||||
|
return statuses[0].getModificationTime();
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue