HBASE-24500 The behavior of RegionInfoBuilder.newBuilder(RegionInfo) is strange (#1840)
Signed-off-by: Guanghao Zhang <zghao@apache.org> Signed-off-by: Michael Stack <stack@apache.org>
This commit is contained in:
parent
60c9ae58fb
commit
ee6938abda
|
@ -18,8 +18,6 @@
|
|||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
@ -58,8 +56,6 @@ public class RegionInfoBuilder {
|
|||
private int replicaId = RegionInfo.DEFAULT_REPLICA_ID;
|
||||
private boolean offLine = false;
|
||||
private boolean split = false;
|
||||
private byte[] regionName = null;
|
||||
private String encodedName = null;
|
||||
|
||||
public static RegionInfoBuilder newBuilder(TableName tableName) {
|
||||
return new RegionInfoBuilder(tableName);
|
||||
|
@ -81,8 +77,6 @@ public class RegionInfoBuilder {
|
|||
this.split = regionInfo.isSplit();
|
||||
this.regionId = regionInfo.getRegionId();
|
||||
this.replicaId = regionInfo.getReplicaId();
|
||||
this.regionName = regionInfo.getRegionName();
|
||||
this.encodedName = regionInfo.getEncodedName();
|
||||
}
|
||||
|
||||
public RegionInfoBuilder setStartKey(byte[] startKey) {
|
||||
|
@ -115,14 +109,9 @@ public class RegionInfoBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
public RegionInfoBuilder setEncodedName(String encodedName) {
|
||||
this.encodedName = encodedName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RegionInfo build() {
|
||||
return new MutableRegionInfo(tableName, startKey, endKey, split,
|
||||
regionId, replicaId, offLine, regionName, encodedName);
|
||||
regionId, replicaId, offLine);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -210,32 +199,12 @@ public class RegionInfoBuilder {
|
|||
* first meta regions
|
||||
*/
|
||||
private MutableRegionInfo(long regionId, TableName tableName, int replicaId) {
|
||||
this(tableName,
|
||||
HConstants.EMPTY_START_ROW,
|
||||
HConstants.EMPTY_END_ROW,
|
||||
false,
|
||||
regionId,
|
||||
replicaId,
|
||||
false,
|
||||
RegionInfo.createRegionName(tableName, null, regionId, replicaId, false));
|
||||
this(tableName, HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW, false, regionId,
|
||||
replicaId, false);
|
||||
}
|
||||
|
||||
MutableRegionInfo(final TableName tableName, final byte[] startKey,
|
||||
final byte[] endKey, final boolean split, final long regionId,
|
||||
final int replicaId, boolean offLine, byte[] regionName) {
|
||||
this(checkTableName(tableName),
|
||||
checkStartKey(startKey),
|
||||
checkEndKey(endKey),
|
||||
split, regionId,
|
||||
checkReplicaId(replicaId),
|
||||
offLine,
|
||||
regionName,
|
||||
RegionInfo.encodeRegionName(regionName));
|
||||
}
|
||||
|
||||
MutableRegionInfo(final TableName tableName, final byte[] startKey,
|
||||
final byte[] endKey, final boolean split, final long regionId,
|
||||
final int replicaId, boolean offLine, byte[] regionName, String encodedName) {
|
||||
MutableRegionInfo(final TableName tableName, final byte[] startKey, final byte[] endKey,
|
||||
final boolean split, final long regionId, final int replicaId, boolean offLine) {
|
||||
this.tableName = checkTableName(tableName);
|
||||
this.startKey = checkStartKey(startKey);
|
||||
this.endKey = checkEndKey(endKey);
|
||||
|
@ -243,24 +212,14 @@ public class RegionInfoBuilder {
|
|||
this.regionId = regionId;
|
||||
this.replicaId = checkReplicaId(replicaId);
|
||||
this.offLine = offLine;
|
||||
if (ArrayUtils.isEmpty(regionName)) {
|
||||
this.regionName = RegionInfo.createRegionName(this.tableName, this.startKey, this.regionId, this.replicaId,
|
||||
!this.tableName.equals(TableName.META_TABLE_NAME));
|
||||
this.encodedName = RegionInfo.encodeRegionName(this.regionName);
|
||||
} else {
|
||||
this.regionName = regionName;
|
||||
this.encodedName = encodedName;
|
||||
}
|
||||
this.hashCode = generateHashCode(
|
||||
this.tableName,
|
||||
this.startKey,
|
||||
this.endKey,
|
||||
this.regionId,
|
||||
this.replicaId,
|
||||
this.offLine,
|
||||
this.regionName);
|
||||
this.regionName = RegionInfo.createRegionName(this.tableName, this.startKey, this.regionId,
|
||||
this.replicaId, !this.tableName.equals(TableName.META_TABLE_NAME));
|
||||
this.encodedName = RegionInfo.encodeRegionName(this.regionName);
|
||||
this.hashCode = generateHashCode(this.tableName, this.startKey, this.endKey, this.regionId,
|
||||
this.replicaId, this.offLine, this.regionName);
|
||||
this.encodedNameAsBytes = Bytes.toBytes(this.encodedName);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Return a short, printable name for this region
|
||||
* (usually encoded name) for us logging.
|
||||
|
@ -282,7 +241,7 @@ public class RegionInfoBuilder {
|
|||
* @see #getRegionNameAsString()
|
||||
*/
|
||||
@Override
|
||||
public byte [] getRegionName(){
|
||||
public byte[] getRegionName() {
|
||||
return regionName;
|
||||
}
|
||||
|
||||
|
@ -301,20 +260,19 @@ public class RegionInfoBuilder {
|
|||
}
|
||||
|
||||
@Override
|
||||
public byte [] getEncodedNameAsBytes() {
|
||||
public byte[] getEncodedNameAsBytes() {
|
||||
return this.encodedNameAsBytes;
|
||||
}
|
||||
|
||||
/** @return the startKey */
|
||||
@Override
|
||||
public byte [] getStartKey(){
|
||||
public byte[] getStartKey() {
|
||||
return startKey;
|
||||
}
|
||||
|
||||
|
||||
/** @return the endKey */
|
||||
@Override
|
||||
public byte [] getEndKey(){
|
||||
public byte[] getEndKey() {
|
||||
return endKey;
|
||||
}
|
||||
|
||||
|
@ -406,7 +364,9 @@ public class RegionInfoBuilder {
|
|||
*/
|
||||
@Override
|
||||
public boolean isSplitParent() {
|
||||
if (!isSplit()) return false;
|
||||
if (!isSplit()) {
|
||||
return false;
|
||||
}
|
||||
if (!isOffline()) {
|
||||
LOG.warn("Region is split but NOT offline: " + getRegionNameAsString());
|
||||
}
|
||||
|
|
|
@ -64,23 +64,7 @@ public class RegionReplicaUtil {
|
|||
if (regionInfo.getReplicaId() == replicaId) {
|
||||
return regionInfo;
|
||||
}
|
||||
|
||||
if (regionInfo.isMetaRegion()) {
|
||||
return RegionInfoBuilder.newBuilder(regionInfo.getTable())
|
||||
.setRegionId(regionInfo.getRegionId())
|
||||
.setReplicaId(replicaId)
|
||||
.setOffline(regionInfo.isOffline())
|
||||
.build();
|
||||
} else {
|
||||
return RegionInfoBuilder.newBuilder(regionInfo.getTable())
|
||||
.setStartKey(regionInfo.getStartKey())
|
||||
.setEndKey(regionInfo.getEndKey())
|
||||
.setSplit(regionInfo.isSplit())
|
||||
.setRegionId(regionInfo.getRegionId())
|
||||
.setReplicaId(replicaId)
|
||||
.setOffline(regionInfo.isOffline())
|
||||
.build();
|
||||
}
|
||||
return RegionInfoBuilder.newBuilder(regionInfo).setReplicaId(replicaId).build();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -117,6 +117,8 @@ import org.apache.hadoop.hbase.util.Methods;
|
|||
import org.apache.hadoop.hbase.util.VersionInfo;
|
||||
import org.apache.hadoop.ipc.RemoteException;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hbase.thirdparty.com.google.common.io.ByteStreams;
|
||||
|
@ -211,11 +213,12 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos;
|
|||
* users; e.g. Coprocessor Endpoints. If you make change in here, be sure to make change in
|
||||
* the companion class too (not the end of the world, especially if you are adding new functionality
|
||||
* but something to be aware of.
|
||||
* @see ProtobufUtil
|
||||
*/
|
||||
// TODO: Generate the non-shaded protobufutil from this one.
|
||||
@InterfaceAudience.Private // TODO: some clients (Hive, etc) use this class
|
||||
public final class ProtobufUtil {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ProtobufUtil.class);
|
||||
|
||||
private ProtobufUtil() {
|
||||
}
|
||||
|
||||
|
@ -2345,14 +2348,6 @@ public final class ProtobufUtil {
|
|||
.setQualifier(UnsafeByteOperations.unsafeWrap(tableName.getQualifier())).build();
|
||||
}
|
||||
|
||||
public static HBaseProtos.RegionInfo toProtoRegionInfo(
|
||||
org.apache.hadoop.hbase.client.RegionInfo regionInfo) {
|
||||
return HBaseProtos.RegionInfo.newBuilder()
|
||||
.setRegionId(regionInfo.getRegionId())
|
||||
.setRegionEncodedName(regionInfo.getEncodedName())
|
||||
.setTableName(toProtoTableName(regionInfo.getTable())).build();
|
||||
}
|
||||
|
||||
public static List<TableName> toTableNameList(List<HBaseProtos.TableName> tableNamesList) {
|
||||
if (tableNamesList == null) {
|
||||
return new ArrayList<>();
|
||||
|
@ -3302,7 +3297,9 @@ public final class ProtobufUtil {
|
|||
* @return the converted Proto RegionInfo
|
||||
*/
|
||||
public static HBaseProtos.RegionInfo toRegionInfo(final org.apache.hadoop.hbase.client.RegionInfo info) {
|
||||
if (info == null) return null;
|
||||
if (info == null) {
|
||||
return null;
|
||||
}
|
||||
HBaseProtos.RegionInfo.Builder builder = HBaseProtos.RegionInfo.newBuilder();
|
||||
builder.setTableName(ProtobufUtil.toProtoTableName(info.getTable()));
|
||||
builder.setRegionId(info.getRegionId());
|
||||
|
@ -3326,7 +3323,9 @@ public final class ProtobufUtil {
|
|||
* @return the converted RegionInfo
|
||||
*/
|
||||
public static org.apache.hadoop.hbase.client.RegionInfo toRegionInfo(final HBaseProtos.RegionInfo proto) {
|
||||
if (proto == null) return null;
|
||||
if (proto == null) {
|
||||
return null;
|
||||
}
|
||||
TableName tableName = ProtobufUtil.toTableName(proto.getTableName());
|
||||
long regionId = proto.getRegionId();
|
||||
int defaultReplicaId = org.apache.hadoop.hbase.client.RegionInfo.DEFAULT_REPLICA_ID;
|
||||
|
@ -3355,8 +3354,10 @@ public final class ProtobufUtil {
|
|||
if (proto.hasOffline()) {
|
||||
rib.setOffline(proto.getOffline());
|
||||
}
|
||||
if (proto.hasRegionEncodedName()) {
|
||||
rib.setEncodedName(proto.getRegionEncodedName());
|
||||
org.apache.hadoop.hbase.client.RegionInfo ri = rib.build();
|
||||
if (proto.hasRegionEncodedName() && !proto.getRegionEncodedName().equals(ri.getEncodedName())) {
|
||||
LOG.warn("The converted region info is {}, but the encoded name in proto is {}", ri,
|
||||
proto.getRegionEncodedName());
|
||||
}
|
||||
return rib.build();
|
||||
}
|
||||
|
|
|
@ -15,51 +15,43 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
|
||||
import org.apache.hadoop.hbase.client.TableDescriptor;
|
||||
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
||||
import org.apache.hadoop.hbase.TableNameTestRule;
|
||||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.FSTableDescriptors;
|
||||
import org.apache.hadoop.hbase.util.MD5Hash;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.rules.TestName;
|
||||
|
||||
import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
||||
|
||||
@Category({RegionServerTests.class, SmallTests.class})
|
||||
@Category({ ClientTests.class, SmallTests.class })
|
||||
public class TestRegionInfoBuilder {
|
||||
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
HBaseClassTestRule.forClass(TestRegionInfoBuilder.class);
|
||||
HBaseClassTestRule.forClass(TestRegionInfoBuilder.class);
|
||||
|
||||
@Rule
|
||||
public TestName name = new TestName();
|
||||
public TableNameTestRule name = new TableNameTestRule();
|
||||
|
||||
@Test
|
||||
public void testBuilder() {
|
||||
|
@ -91,74 +83,36 @@ public class TestRegionInfoBuilder {
|
|||
@Test
|
||||
public void testPb() throws DeserializationException {
|
||||
RegionInfo ri = RegionInfoBuilder.FIRST_META_REGIONINFO;
|
||||
byte [] bytes = RegionInfo.toByteArray(ri);
|
||||
byte[] bytes = RegionInfo.toByteArray(ri);
|
||||
RegionInfo pbri = RegionInfo.parseFrom(bytes);
|
||||
assertTrue(RegionInfo.COMPARATOR.compare(ri, pbri) == 0);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReadAndWriteRegionInfoFile() throws IOException, InterruptedException {
|
||||
HBaseTestingUtility htu = new HBaseTestingUtility();
|
||||
RegionInfo ri = RegionInfoBuilder.FIRST_META_REGIONINFO;
|
||||
Path basedir = htu.getDataTestDir();
|
||||
// Create a region. That'll write the .regioninfo file.
|
||||
FSTableDescriptors fsTableDescriptors = new FSTableDescriptors(htu.getConfiguration());
|
||||
FSTableDescriptors.tryUpdateMetaTableDescriptor(htu.getConfiguration());
|
||||
HRegion r = HBaseTestingUtility.createRegionAndWAL(convert(ri), basedir, htu.getConfiguration(),
|
||||
fsTableDescriptors.get(TableName.META_TABLE_NAME));
|
||||
// Get modtime on the file.
|
||||
long modtime = getModTime(r);
|
||||
HBaseTestingUtility.closeRegionAndWAL(r);
|
||||
Thread.sleep(1001);
|
||||
r = HRegion.openHRegion(basedir, convert(ri), fsTableDescriptors.get(TableName.META_TABLE_NAME),
|
||||
null, htu.getConfiguration());
|
||||
// Ensure the file is not written for a second time.
|
||||
long modtime2 = getModTime(r);
|
||||
assertEquals(modtime, modtime2);
|
||||
// Now load the file.
|
||||
RegionInfo deserializedRi = HRegionFileSystem.loadRegionInfoFileContent(
|
||||
r.getRegionFileSystem().getFileSystem(), r.getRegionFileSystem().getRegionDir());
|
||||
HBaseTestingUtility.closeRegionAndWAL(r);
|
||||
}
|
||||
|
||||
long getModTime(final HRegion r) throws IOException {
|
||||
FileStatus[] statuses = r.getRegionFileSystem().getFileSystem().listStatus(
|
||||
new Path(r.getRegionFileSystem().getRegionDir(), HRegionFileSystem.REGION_INFO_FILE));
|
||||
assertTrue(statuses != null && statuses.length == 1);
|
||||
return statuses[0].getModificationTime();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateRegionInfoName() throws Exception {
|
||||
final String tableName = name.getMethodName();
|
||||
final TableName tn = TableName.valueOf(tableName);
|
||||
final TableName tn = name.getTableName();
|
||||
String startKey = "startkey";
|
||||
final byte[] sk = Bytes.toBytes(startKey);
|
||||
String id = "id";
|
||||
|
||||
// old format region name
|
||||
byte [] name = RegionInfo.createRegionName(tn, sk, id, false);
|
||||
byte[] name = RegionInfo.createRegionName(tn, sk, id, false);
|
||||
String nameStr = Bytes.toString(name);
|
||||
assertEquals(tableName + "," + startKey + "," + id, nameStr);
|
||||
|
||||
assertEquals(tn + "," + startKey + "," + id, nameStr);
|
||||
|
||||
// new format region name.
|
||||
String md5HashInHex = MD5Hash.getMD5AsHex(name);
|
||||
assertEquals(RegionInfo.MD5_HEX_LENGTH, md5HashInHex.length());
|
||||
name = RegionInfo.createRegionName(tn, sk, id, true);
|
||||
nameStr = Bytes.toString(name);
|
||||
assertEquals(tableName + "," + startKey + ","
|
||||
+ id + "." + md5HashInHex + ".",
|
||||
nameStr);
|
||||
assertEquals(tn + "," + startKey + "," + id + "." + md5HashInHex + ".", nameStr);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testContainsRange() {
|
||||
TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(
|
||||
TableName.valueOf(name.getMethodName())).build();
|
||||
TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(name.getTableName()).build();
|
||||
RegionInfo ri = RegionInfoBuilder.newBuilder(tableDesc.getTableName())
|
||||
.setStartKey(Bytes.toBytes("a"))
|
||||
.setEndKey(Bytes.toBytes("g")).build();
|
||||
.setStartKey(Bytes.toBytes("a")).setEndKey(Bytes.toBytes("g")).build();
|
||||
// Single row range at start of region
|
||||
assertTrue(ri.containsRange(Bytes.toBytes("a"), Bytes.toBytes("a")));
|
||||
// Fully contained range
|
||||
|
@ -184,14 +138,11 @@ public class TestRegionInfoBuilder {
|
|||
|
||||
@Test
|
||||
public void testLastRegionCompare() {
|
||||
TableDescriptor tableDesc = TableDescriptorBuilder
|
||||
.newBuilder(TableName.valueOf(name.getMethodName())).build();
|
||||
TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(name.getTableName()).build();
|
||||
RegionInfo rip = RegionInfoBuilder.newBuilder(tableDesc.getTableName())
|
||||
.setStartKey(Bytes.toBytes("a"))
|
||||
.setEndKey(new byte[0]).build();
|
||||
.setStartKey(Bytes.toBytes("a")).setEndKey(new byte[0]).build();
|
||||
RegionInfo ric = RegionInfoBuilder.newBuilder(tableDesc.getTableName())
|
||||
.setStartKey(Bytes.toBytes("a"))
|
||||
.setEndKey(Bytes.toBytes("b")).build();
|
||||
.setStartKey(Bytes.toBytes("a")).setEndKey(Bytes.toBytes("b")).build();
|
||||
assertTrue(RegionInfo.COMPARATOR.compare(rip, ric) > 0);
|
||||
}
|
||||
|
||||
|
@ -202,18 +153,12 @@ public class TestRegionInfoBuilder {
|
|||
|
||||
@Test
|
||||
public void testComparator() {
|
||||
final TableName tableName = TableName.valueOf(name.getMethodName());
|
||||
final TableName tableName = name.getTableName();
|
||||
byte[] empty = new byte[0];
|
||||
RegionInfo older = RegionInfoBuilder.newBuilder(tableName)
|
||||
.setStartKey(empty)
|
||||
.setEndKey(empty)
|
||||
.setSplit(false)
|
||||
.setRegionId(0L).build();
|
||||
RegionInfo newer = RegionInfoBuilder.newBuilder(tableName)
|
||||
.setStartKey(empty)
|
||||
.setEndKey(empty)
|
||||
.setSplit(false)
|
||||
.setRegionId(1L).build();
|
||||
RegionInfo older = RegionInfoBuilder.newBuilder(tableName).setStartKey(empty).setEndKey(empty)
|
||||
.setSplit(false).setRegionId(0L).build();
|
||||
RegionInfo newer = RegionInfoBuilder.newBuilder(tableName).setStartKey(empty).setEndKey(empty)
|
||||
.setSplit(false).setRegionId(1L).build();
|
||||
assertTrue(RegionInfo.COMPARATOR.compare(older, newer) < 0);
|
||||
assertTrue(RegionInfo.COMPARATOR.compare(newer, older) > 0);
|
||||
assertTrue(RegionInfo.COMPARATOR.compare(older, older) == 0);
|
||||
|
@ -222,8 +167,7 @@ public class TestRegionInfoBuilder {
|
|||
|
||||
@Test
|
||||
public void testRegionNameForRegionReplicas() throws Exception {
|
||||
String tableName = name.getMethodName();
|
||||
final TableName tn = TableName.valueOf(tableName);
|
||||
final TableName tn = name.getTableName();
|
||||
String startKey = "startkey";
|
||||
final byte[] sk = Bytes.toBytes(startKey);
|
||||
String id = "id";
|
||||
|
@ -231,26 +175,28 @@ public class TestRegionInfoBuilder {
|
|||
// assert with only the region name without encoding
|
||||
|
||||
// primary, replicaId = 0
|
||||
byte [] name = RegionInfo.createRegionName(tn, sk, Bytes.toBytes(id), 0, false);
|
||||
byte[] name = RegionInfo.createRegionName(tn, sk, Bytes.toBytes(id), 0, false);
|
||||
String nameStr = Bytes.toString(name);
|
||||
assertEquals(tableName + "," + startKey + "," + id, nameStr);
|
||||
assertEquals(tn + "," + startKey + "," + id, nameStr);
|
||||
|
||||
// replicaId = 1
|
||||
name = RegionInfo.createRegionName(tn, sk, Bytes.toBytes(id), 1, false);
|
||||
nameStr = Bytes.toString(name);
|
||||
assertEquals(tableName + "," + startKey + "," + id + "_" +
|
||||
String.format(RegionInfo.REPLICA_ID_FORMAT, 1), nameStr);
|
||||
assertEquals(
|
||||
tn + "," + startKey + "," + id + "_" + String.format(RegionInfo.REPLICA_ID_FORMAT, 1),
|
||||
nameStr);
|
||||
|
||||
// replicaId = max
|
||||
name = RegionInfo.createRegionName(tn, sk, Bytes.toBytes(id), 0xFFFF, false);
|
||||
nameStr = Bytes.toString(name);
|
||||
assertEquals(tableName + "," + startKey + "," + id + "_" +
|
||||
String.format(RegionInfo.REPLICA_ID_FORMAT, 0xFFFF), nameStr);
|
||||
assertEquals(
|
||||
tn + "," + startKey + "," + id + "_" + String.format(RegionInfo.REPLICA_ID_FORMAT, 0xFFFF),
|
||||
nameStr);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testParseName() throws IOException {
|
||||
final TableName tableName = TableName.valueOf(name.getMethodName());
|
||||
final TableName tableName = name.getTableName();
|
||||
byte[] startKey = Bytes.toBytes("startKey");
|
||||
long regionId = System.currentTimeMillis();
|
||||
int replicaId = 42;
|
||||
|
@ -259,39 +205,34 @@ public class TestRegionInfoBuilder {
|
|||
byte[] regionName = RegionInfo.createRegionName(tableName, startKey, regionId, false);
|
||||
|
||||
byte[][] fields = RegionInfo.parseRegionName(regionName);
|
||||
assertArrayEquals(Bytes.toString(fields[0]),tableName.getName(), fields[0]);
|
||||
assertArrayEquals(Bytes.toString(fields[1]),startKey, fields[1]);
|
||||
assertArrayEquals(Bytes.toString(fields[2]), Bytes.toBytes(Long.toString(regionId)),fields[2]);
|
||||
assertArrayEquals(Bytes.toString(fields[0]), tableName.getName(), fields[0]);
|
||||
assertArrayEquals(Bytes.toString(fields[1]), startKey, fields[1]);
|
||||
assertArrayEquals(Bytes.toString(fields[2]), Bytes.toBytes(Long.toString(regionId)), fields[2]);
|
||||
assertEquals(3, fields.length);
|
||||
|
||||
// test with replicaId
|
||||
regionName = RegionInfo.createRegionName(tableName, startKey, regionId,
|
||||
replicaId, false);
|
||||
regionName = RegionInfo.createRegionName(tableName, startKey, regionId, replicaId, false);
|
||||
|
||||
fields = RegionInfo.parseRegionName(regionName);
|
||||
assertArrayEquals(Bytes.toString(fields[0]),tableName.getName(), fields[0]);
|
||||
assertArrayEquals(Bytes.toString(fields[1]),startKey, fields[1]);
|
||||
assertArrayEquals(Bytes.toString(fields[2]), Bytes.toBytes(Long.toString(regionId)),fields[2]);
|
||||
assertArrayEquals(Bytes.toString(fields[3]), Bytes.toBytes(
|
||||
String.format(RegionInfo.REPLICA_ID_FORMAT, replicaId)), fields[3]);
|
||||
assertArrayEquals(Bytes.toString(fields[0]), tableName.getName(), fields[0]);
|
||||
assertArrayEquals(Bytes.toString(fields[1]), startKey, fields[1]);
|
||||
assertArrayEquals(Bytes.toString(fields[2]), Bytes.toBytes(Long.toString(regionId)), fields[2]);
|
||||
assertArrayEquals(Bytes.toString(fields[3]),
|
||||
Bytes.toBytes(String.format(RegionInfo.REPLICA_ID_FORMAT, replicaId)), fields[3]);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConvert() {
|
||||
final TableName tableName = TableName.valueOf("ns1:" + name.getMethodName());
|
||||
final TableName tableName =
|
||||
TableName.valueOf("ns1:" + name.getTableName().getQualifierAsString());
|
||||
byte[] startKey = Bytes.toBytes("startKey");
|
||||
byte[] endKey = Bytes.toBytes("endKey");
|
||||
boolean split = false;
|
||||
long regionId = System.currentTimeMillis();
|
||||
int replicaId = 42;
|
||||
|
||||
|
||||
RegionInfo ri = RegionInfoBuilder.newBuilder(tableName)
|
||||
.setStartKey(startKey)
|
||||
.setEndKey(endKey)
|
||||
.setSplit(split)
|
||||
.setRegionId(regionId)
|
||||
.setReplicaId(replicaId).build();
|
||||
RegionInfo ri = RegionInfoBuilder.newBuilder(tableName).setStartKey(startKey).setEndKey(endKey)
|
||||
.setSplit(split).setRegionId(regionId).setReplicaId(replicaId).build();
|
||||
|
||||
// convert two times, compare
|
||||
RegionInfo convertedRi = ProtobufUtil.toRegionInfo(ProtobufUtil.toRegionInfo(ri));
|
||||
|
@ -302,30 +243,44 @@ public class TestRegionInfoBuilder {
|
|||
HBaseProtos.RegionInfo info = HBaseProtos.RegionInfo.newBuilder()
|
||||
.setTableName(HBaseProtos.TableName.newBuilder()
|
||||
.setQualifier(UnsafeByteOperations.unsafeWrap(tableName.getQualifier()))
|
||||
.setNamespace(UnsafeByteOperations.unsafeWrap(tableName.getNamespace()))
|
||||
.build())
|
||||
.setNamespace(UnsafeByteOperations.unsafeWrap(tableName.getNamespace())).build())
|
||||
.setStartKey(UnsafeByteOperations.unsafeWrap(startKey))
|
||||
.setEndKey(UnsafeByteOperations.unsafeWrap(endKey))
|
||||
.setSplit(split)
|
||||
.setRegionId(regionId)
|
||||
.setEndKey(UnsafeByteOperations.unsafeWrap(endKey)).setSplit(split).setRegionId(regionId)
|
||||
.build();
|
||||
|
||||
convertedRi = ProtobufUtil.toRegionInfo(info);
|
||||
RegionInfo expectedRi = RegionInfoBuilder.newBuilder(tableName)
|
||||
.setStartKey(startKey)
|
||||
.setEndKey(endKey)
|
||||
.setSplit(split)
|
||||
.setRegionId(regionId)
|
||||
.setReplicaId(0).build();
|
||||
RegionInfo expectedRi = RegionInfoBuilder.newBuilder(tableName).setStartKey(startKey)
|
||||
.setEndKey(endKey).setSplit(split).setRegionId(regionId).setReplicaId(0).build();
|
||||
|
||||
assertEquals(expectedRi, convertedRi);
|
||||
}
|
||||
|
||||
// Duplicated method in TestRegionInfoDisplay too.
|
||||
private HRegionInfo convert(RegionInfo ri) {
|
||||
HRegionInfo hri = new HRegionInfo(
|
||||
ri.getTable(), ri.getStartKey(), ri.getEndKey(), ri.isSplit(), ri.getRegionId());
|
||||
hri.setOffline(ri.isOffline());
|
||||
return hri;
|
||||
private void assertRegionNameNotEquals(RegionInfo expected, RegionInfo actual) {
|
||||
assertNotEquals(expected.getRegionNameAsString(), actual.getRegionNameAsString());
|
||||
assertNotEquals(expected.getEncodedName(), actual.getEncodedName());
|
||||
}
|
||||
|
||||
private void assertRegionNameEquals(RegionInfo expected, RegionInfo actual) {
|
||||
assertEquals(expected.getRegionNameAsString(), actual.getRegionNameAsString());
|
||||
assertEquals(expected.getEncodedName(), actual.getEncodedName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNewBuilderWithRegionInfo() {
|
||||
RegionInfo ri = RegionInfoBuilder.newBuilder(name.getTableName()).build();
|
||||
assertEquals(ri, RegionInfoBuilder.newBuilder(ri).build());
|
||||
|
||||
// make sure that the region name and encoded name are changed, see HBASE-24500 for more
|
||||
// details.
|
||||
assertRegionNameNotEquals(ri,
|
||||
RegionInfoBuilder.newBuilder(ri).setStartKey(new byte[1]).build());
|
||||
assertRegionNameNotEquals(ri,
|
||||
RegionInfoBuilder.newBuilder(ri).setRegionId(ri.getRegionId() + 1).build());
|
||||
assertRegionNameNotEquals(ri, RegionInfoBuilder.newBuilder(ri).setReplicaId(1).build());
|
||||
|
||||
// these fields are not in region name
|
||||
assertRegionNameEquals(ri, RegionInfoBuilder.newBuilder(ri).setEndKey(new byte[1]).build());
|
||||
assertRegionNameEquals(ri, RegionInfoBuilder.newBuilder(ri).setSplit(true).build());
|
||||
assertRegionNameEquals(ri, RegionInfoBuilder.newBuilder(ri).setOffline(true).build());
|
||||
}
|
||||
}
|
|
@ -19,6 +19,8 @@ package org.apache.hadoop.hbase.shaded.protobuf;
|
|||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
@ -29,17 +31,26 @@ import org.apache.hadoop.hbase.CellComparatorImpl;
|
|||
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.Append;
|
||||
import org.apache.hadoop.hbase.client.Delete;
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
import org.apache.hadoop.hbase.client.Increment;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
|
||||
import org.apache.hadoop.hbase.io.TimeRange;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.MD5Hash;
|
||||
import org.apache.log4j.Appender;
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.LogManager;
|
||||
import org.apache.log4j.spi.LoggingEvent;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
|
||||
import org.apache.hbase.thirdparty.com.google.protobuf.Any;
|
||||
|
@ -54,6 +65,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationPr
|
|||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
|
||||
|
@ -479,4 +491,29 @@ public class TestProtobufUtil {
|
|||
+ "\"sharedLockCount\":0"
|
||||
+ "}]", lockJson);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRegionInfo() {
|
||||
TableName tableName = TableName.valueOf("testRegionInfo");
|
||||
RegionInfo expected = RegionInfoBuilder.newBuilder(tableName).build();
|
||||
String md5 = MD5Hash.getMD5AsHex(new byte[0]);
|
||||
HBaseProtos.RegionInfo proto =
|
||||
ProtobufUtil.toRegionInfo(expected).toBuilder().setRegionEncodedName(md5).build();
|
||||
|
||||
Appender appender = mock(Appender.class);
|
||||
LogManager.getRootLogger().addAppender(appender);
|
||||
try {
|
||||
RegionInfo actual = ProtobufUtil.toRegionInfo(proto);
|
||||
assertEquals(expected, actual);
|
||||
ArgumentCaptor<LoggingEvent> captor = ArgumentCaptor.forClass(LoggingEvent.class);
|
||||
verify(appender).doAppend(captor.capture());
|
||||
LoggingEvent event = captor.getValue();
|
||||
assertEquals(Level.WARN, event.getLevel());
|
||||
assertEquals(
|
||||
"The converted region info is " + actual + ", but the encoded name in proto is " + md5,
|
||||
event.getRenderedMessage());
|
||||
} finally {
|
||||
LogManager.getRootLogger().removeAppender(appender);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,98 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
|
||||
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.FSTableDescriptors;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ RegionServerTests.class, SmallTests.class })
|
||||
public class TestReadAndWriteRegionInfoFile {
|
||||
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
HBaseClassTestRule.forClass(TestReadAndWriteRegionInfoFile.class);
|
||||
|
||||
private static final HBaseCommonTestingUtility UTIL = new HBaseTestingUtility();
|
||||
|
||||
private static final Configuration CONF = UTIL.getConfiguration();
|
||||
|
||||
private static FileSystem FS;
|
||||
|
||||
private static Path ROOT_DIR;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws IOException {
|
||||
ROOT_DIR = UTIL.getDataTestDir();
|
||||
FS = ROOT_DIR.getFileSystem(CONF);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDown() {
|
||||
UTIL.cleanupTestDir();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReadAndWriteRegionInfoFile() throws IOException, InterruptedException {
|
||||
RegionInfo ri = RegionInfoBuilder.FIRST_META_REGIONINFO;
|
||||
// Create a region. That'll write the .regioninfo file.
|
||||
FSTableDescriptors fsTableDescriptors = new FSTableDescriptors(FS, ROOT_DIR);
|
||||
FSTableDescriptors.tryUpdateMetaTableDescriptor(CONF, FS, ROOT_DIR, null);
|
||||
HRegion r = HBaseTestingUtility.createRegionAndWAL(ri, ROOT_DIR, CONF,
|
||||
fsTableDescriptors.get(TableName.META_TABLE_NAME));
|
||||
// Get modtime on the file.
|
||||
long modtime = getModTime(r);
|
||||
HBaseTestingUtility.closeRegionAndWAL(r);
|
||||
Thread.sleep(1001);
|
||||
r = HRegion.openHRegion(ROOT_DIR, ri, fsTableDescriptors.get(TableName.META_TABLE_NAME), null,
|
||||
CONF);
|
||||
// Ensure the file is not written for a second time.
|
||||
long modtime2 = getModTime(r);
|
||||
assertEquals(modtime, modtime2);
|
||||
// Now load the file.
|
||||
HRegionFileSystem.loadRegionInfoFileContent(r.getRegionFileSystem().getFileSystem(),
|
||||
r.getRegionFileSystem().getRegionDir());
|
||||
HBaseTestingUtility.closeRegionAndWAL(r);
|
||||
}
|
||||
|
||||
private long getModTime(final HRegion r) throws IOException {
|
||||
FileStatus[] statuses = r.getRegionFileSystem().getFileSystem().listStatus(
|
||||
new Path(r.getRegionFileSystem().getRegionDir(), HRegionFileSystem.REGION_INFO_FILE));
|
||||
assertTrue(statuses != null && statuses.length == 1);
|
||||
return statuses[0].getModificationTime();
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue