Revert "HBASE-24500 The behavior of RegionInfoBuilder.newBuilder(RegionInfo) is strange (#1840)"
This reverts commit ee6938abda
.
This commit is contained in:
parent
ee6938abda
commit
08c97d3a87
|
@ -18,6 +18,8 @@
|
|||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
@ -56,6 +58,8 @@ public class RegionInfoBuilder {
|
|||
private int replicaId = RegionInfo.DEFAULT_REPLICA_ID;
|
||||
private boolean offLine = false;
|
||||
private boolean split = false;
|
||||
private byte[] regionName = null;
|
||||
private String encodedName = null;
|
||||
|
||||
public static RegionInfoBuilder newBuilder(TableName tableName) {
|
||||
return new RegionInfoBuilder(tableName);
|
||||
|
@ -77,6 +81,8 @@ public class RegionInfoBuilder {
|
|||
this.split = regionInfo.isSplit();
|
||||
this.regionId = regionInfo.getRegionId();
|
||||
this.replicaId = regionInfo.getReplicaId();
|
||||
this.regionName = regionInfo.getRegionName();
|
||||
this.encodedName = regionInfo.getEncodedName();
|
||||
}
|
||||
|
||||
public RegionInfoBuilder setStartKey(byte[] startKey) {
|
||||
|
@ -109,9 +115,14 @@ public class RegionInfoBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
public RegionInfoBuilder setEncodedName(String encodedName) {
|
||||
this.encodedName = encodedName;
|
||||
return this;
|
||||
}
|
||||
|
||||
public RegionInfo build() {
|
||||
return new MutableRegionInfo(tableName, startKey, endKey, split,
|
||||
regionId, replicaId, offLine);
|
||||
regionId, replicaId, offLine, regionName, encodedName);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -199,12 +210,32 @@ public class RegionInfoBuilder {
|
|||
* first meta regions
|
||||
*/
|
||||
private MutableRegionInfo(long regionId, TableName tableName, int replicaId) {
|
||||
this(tableName, HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW, false, regionId,
|
||||
replicaId, false);
|
||||
this(tableName,
|
||||
HConstants.EMPTY_START_ROW,
|
||||
HConstants.EMPTY_END_ROW,
|
||||
false,
|
||||
regionId,
|
||||
replicaId,
|
||||
false,
|
||||
RegionInfo.createRegionName(tableName, null, regionId, replicaId, false));
|
||||
}
|
||||
|
||||
MutableRegionInfo(final TableName tableName, final byte[] startKey, final byte[] endKey,
|
||||
final boolean split, final long regionId, final int replicaId, boolean offLine) {
|
||||
MutableRegionInfo(final TableName tableName, final byte[] startKey,
|
||||
final byte[] endKey, final boolean split, final long regionId,
|
||||
final int replicaId, boolean offLine, byte[] regionName) {
|
||||
this(checkTableName(tableName),
|
||||
checkStartKey(startKey),
|
||||
checkEndKey(endKey),
|
||||
split, regionId,
|
||||
checkReplicaId(replicaId),
|
||||
offLine,
|
||||
regionName,
|
||||
RegionInfo.encodeRegionName(regionName));
|
||||
}
|
||||
|
||||
MutableRegionInfo(final TableName tableName, final byte[] startKey,
|
||||
final byte[] endKey, final boolean split, final long regionId,
|
||||
final int replicaId, boolean offLine, byte[] regionName, String encodedName) {
|
||||
this.tableName = checkTableName(tableName);
|
||||
this.startKey = checkStartKey(startKey);
|
||||
this.endKey = checkEndKey(endKey);
|
||||
|
@ -212,14 +243,24 @@ public class RegionInfoBuilder {
|
|||
this.regionId = regionId;
|
||||
this.replicaId = checkReplicaId(replicaId);
|
||||
this.offLine = offLine;
|
||||
this.regionName = RegionInfo.createRegionName(this.tableName, this.startKey, this.regionId,
|
||||
this.replicaId, !this.tableName.equals(TableName.META_TABLE_NAME));
|
||||
if (ArrayUtils.isEmpty(regionName)) {
|
||||
this.regionName = RegionInfo.createRegionName(this.tableName, this.startKey, this.regionId, this.replicaId,
|
||||
!this.tableName.equals(TableName.META_TABLE_NAME));
|
||||
this.encodedName = RegionInfo.encodeRegionName(this.regionName);
|
||||
this.hashCode = generateHashCode(this.tableName, this.startKey, this.endKey, this.regionId,
|
||||
this.replicaId, this.offLine, this.regionName);
|
||||
} else {
|
||||
this.regionName = regionName;
|
||||
this.encodedName = encodedName;
|
||||
}
|
||||
this.hashCode = generateHashCode(
|
||||
this.tableName,
|
||||
this.startKey,
|
||||
this.endKey,
|
||||
this.regionId,
|
||||
this.replicaId,
|
||||
this.offLine,
|
||||
this.regionName);
|
||||
this.encodedNameAsBytes = Bytes.toBytes(this.encodedName);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Return a short, printable name for this region
|
||||
* (usually encoded name) for us logging.
|
||||
|
@ -270,6 +311,7 @@ public class RegionInfoBuilder {
|
|||
return startKey;
|
||||
}
|
||||
|
||||
|
||||
/** @return the endKey */
|
||||
@Override
|
||||
public byte [] getEndKey(){
|
||||
|
@ -364,9 +406,7 @@ public class RegionInfoBuilder {
|
|||
*/
|
||||
@Override
|
||||
public boolean isSplitParent() {
|
||||
if (!isSplit()) {
|
||||
return false;
|
||||
}
|
||||
if (!isSplit()) return false;
|
||||
if (!isOffline()) {
|
||||
LOG.warn("Region is split but NOT offline: " + getRegionNameAsString());
|
||||
}
|
||||
|
|
|
@ -64,7 +64,23 @@ public class RegionReplicaUtil {
|
|||
if (regionInfo.getReplicaId() == replicaId) {
|
||||
return regionInfo;
|
||||
}
|
||||
return RegionInfoBuilder.newBuilder(regionInfo).setReplicaId(replicaId).build();
|
||||
|
||||
if (regionInfo.isMetaRegion()) {
|
||||
return RegionInfoBuilder.newBuilder(regionInfo.getTable())
|
||||
.setRegionId(regionInfo.getRegionId())
|
||||
.setReplicaId(replicaId)
|
||||
.setOffline(regionInfo.isOffline())
|
||||
.build();
|
||||
} else {
|
||||
return RegionInfoBuilder.newBuilder(regionInfo.getTable())
|
||||
.setStartKey(regionInfo.getStartKey())
|
||||
.setEndKey(regionInfo.getEndKey())
|
||||
.setSplit(regionInfo.isSplit())
|
||||
.setRegionId(regionInfo.getRegionId())
|
||||
.setReplicaId(replicaId)
|
||||
.setOffline(regionInfo.isOffline())
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -117,8 +117,6 @@ import org.apache.hadoop.hbase.util.Methods;
|
|||
import org.apache.hadoop.hbase.util.VersionInfo;
|
||||
import org.apache.hadoop.ipc.RemoteException;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hbase.thirdparty.com.google.common.io.ByteStreams;
|
||||
|
@ -213,12 +211,11 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos;
|
|||
* users; e.g. Coprocessor Endpoints. If you make change in here, be sure to make change in
|
||||
* the companion class too (not the end of the world, especially if you are adding new functionality
|
||||
* but something to be aware of.
|
||||
* @see ProtobufUtil
|
||||
*/
|
||||
// TODO: Generate the non-shaded protobufutil from this one.
|
||||
@InterfaceAudience.Private // TODO: some clients (Hive, etc) use this class
|
||||
public final class ProtobufUtil {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(ProtobufUtil.class);
|
||||
|
||||
private ProtobufUtil() {
|
||||
}
|
||||
|
||||
|
@ -2348,6 +2345,14 @@ public final class ProtobufUtil {
|
|||
.setQualifier(UnsafeByteOperations.unsafeWrap(tableName.getQualifier())).build();
|
||||
}
|
||||
|
||||
public static HBaseProtos.RegionInfo toProtoRegionInfo(
|
||||
org.apache.hadoop.hbase.client.RegionInfo regionInfo) {
|
||||
return HBaseProtos.RegionInfo.newBuilder()
|
||||
.setRegionId(regionInfo.getRegionId())
|
||||
.setRegionEncodedName(regionInfo.getEncodedName())
|
||||
.setTableName(toProtoTableName(regionInfo.getTable())).build();
|
||||
}
|
||||
|
||||
public static List<TableName> toTableNameList(List<HBaseProtos.TableName> tableNamesList) {
|
||||
if (tableNamesList == null) {
|
||||
return new ArrayList<>();
|
||||
|
@ -3297,9 +3302,7 @@ public final class ProtobufUtil {
|
|||
* @return the converted Proto RegionInfo
|
||||
*/
|
||||
public static HBaseProtos.RegionInfo toRegionInfo(final org.apache.hadoop.hbase.client.RegionInfo info) {
|
||||
if (info == null) {
|
||||
return null;
|
||||
}
|
||||
if (info == null) return null;
|
||||
HBaseProtos.RegionInfo.Builder builder = HBaseProtos.RegionInfo.newBuilder();
|
||||
builder.setTableName(ProtobufUtil.toProtoTableName(info.getTable()));
|
||||
builder.setRegionId(info.getRegionId());
|
||||
|
@ -3323,9 +3326,7 @@ public final class ProtobufUtil {
|
|||
* @return the converted RegionInfo
|
||||
*/
|
||||
public static org.apache.hadoop.hbase.client.RegionInfo toRegionInfo(final HBaseProtos.RegionInfo proto) {
|
||||
if (proto == null) {
|
||||
return null;
|
||||
}
|
||||
if (proto == null) return null;
|
||||
TableName tableName = ProtobufUtil.toTableName(proto.getTableName());
|
||||
long regionId = proto.getRegionId();
|
||||
int defaultReplicaId = org.apache.hadoop.hbase.client.RegionInfo.DEFAULT_REPLICA_ID;
|
||||
|
@ -3354,10 +3355,8 @@ public final class ProtobufUtil {
|
|||
if (proto.hasOffline()) {
|
||||
rib.setOffline(proto.getOffline());
|
||||
}
|
||||
org.apache.hadoop.hbase.client.RegionInfo ri = rib.build();
|
||||
if (proto.hasRegionEncodedName() && !proto.getRegionEncodedName().equals(ri.getEncodedName())) {
|
||||
LOG.warn("The converted region info is {}, but the encoded name in proto is {}", ri,
|
||||
proto.getRegionEncodedName());
|
||||
if (proto.hasRegionEncodedName()) {
|
||||
rib.setEncodedName(proto.getRegionEncodedName());
|
||||
}
|
||||
return rib.build();
|
||||
}
|
||||
|
|
|
@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.shaded.protobuf;
|
|||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
@ -31,26 +29,17 @@ import org.apache.hadoop.hbase.CellComparatorImpl;
|
|||
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.Append;
|
||||
import org.apache.hadoop.hbase.client.Delete;
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
import org.apache.hadoop.hbase.client.Increment;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
|
||||
import org.apache.hadoop.hbase.io.TimeRange;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.MD5Hash;
|
||||
import org.apache.log4j.Appender;
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.LogManager;
|
||||
import org.apache.log4j.spi.LoggingEvent;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
|
||||
import org.apache.hbase.thirdparty.com.google.protobuf.Any;
|
||||
|
@ -65,7 +54,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationPr
|
|||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.DeleteType;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
|
||||
|
@ -491,29 +479,4 @@ public class TestProtobufUtil {
|
|||
+ "\"sharedLockCount\":0"
|
||||
+ "}]", lockJson);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRegionInfo() {
|
||||
TableName tableName = TableName.valueOf("testRegionInfo");
|
||||
RegionInfo expected = RegionInfoBuilder.newBuilder(tableName).build();
|
||||
String md5 = MD5Hash.getMD5AsHex(new byte[0]);
|
||||
HBaseProtos.RegionInfo proto =
|
||||
ProtobufUtil.toRegionInfo(expected).toBuilder().setRegionEncodedName(md5).build();
|
||||
|
||||
Appender appender = mock(Appender.class);
|
||||
LogManager.getRootLogger().addAppender(appender);
|
||||
try {
|
||||
RegionInfo actual = ProtobufUtil.toRegionInfo(proto);
|
||||
assertEquals(expected, actual);
|
||||
ArgumentCaptor<LoggingEvent> captor = ArgumentCaptor.forClass(LoggingEvent.class);
|
||||
verify(appender).doAppend(captor.capture());
|
||||
LoggingEvent event = captor.getValue();
|
||||
assertEquals(Level.WARN, event.getLevel());
|
||||
assertEquals(
|
||||
"The converted region info is " + actual + ", but the encoded name in proto is " + md5,
|
||||
event.getRenderedMessage());
|
||||
} finally {
|
||||
LogManager.getRootLogger().removeAppender(appender);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,98 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
|
||||
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.FSTableDescriptors;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({ RegionServerTests.class, SmallTests.class })
|
||||
public class TestReadAndWriteRegionInfoFile {
|
||||
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
HBaseClassTestRule.forClass(TestReadAndWriteRegionInfoFile.class);
|
||||
|
||||
private static final HBaseCommonTestingUtility UTIL = new HBaseTestingUtility();
|
||||
|
||||
private static final Configuration CONF = UTIL.getConfiguration();
|
||||
|
||||
private static FileSystem FS;
|
||||
|
||||
private static Path ROOT_DIR;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws IOException {
|
||||
ROOT_DIR = UTIL.getDataTestDir();
|
||||
FS = ROOT_DIR.getFileSystem(CONF);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDown() {
|
||||
UTIL.cleanupTestDir();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReadAndWriteRegionInfoFile() throws IOException, InterruptedException {
|
||||
RegionInfo ri = RegionInfoBuilder.FIRST_META_REGIONINFO;
|
||||
// Create a region. That'll write the .regioninfo file.
|
||||
FSTableDescriptors fsTableDescriptors = new FSTableDescriptors(FS, ROOT_DIR);
|
||||
FSTableDescriptors.tryUpdateMetaTableDescriptor(CONF, FS, ROOT_DIR, null);
|
||||
HRegion r = HBaseTestingUtility.createRegionAndWAL(ri, ROOT_DIR, CONF,
|
||||
fsTableDescriptors.get(TableName.META_TABLE_NAME));
|
||||
// Get modtime on the file.
|
||||
long modtime = getModTime(r);
|
||||
HBaseTestingUtility.closeRegionAndWAL(r);
|
||||
Thread.sleep(1001);
|
||||
r = HRegion.openHRegion(ROOT_DIR, ri, fsTableDescriptors.get(TableName.META_TABLE_NAME), null,
|
||||
CONF);
|
||||
// Ensure the file is not written for a second time.
|
||||
long modtime2 = getModTime(r);
|
||||
assertEquals(modtime, modtime2);
|
||||
// Now load the file.
|
||||
HRegionFileSystem.loadRegionInfoFileContent(r.getRegionFileSystem().getFileSystem(),
|
||||
r.getRegionFileSystem().getRegionDir());
|
||||
HBaseTestingUtility.closeRegionAndWAL(r);
|
||||
}
|
||||
|
||||
private long getModTime(final HRegion r) throws IOException {
|
||||
FileStatus[] statuses = r.getRegionFileSystem().getFileSystem().listStatus(
|
||||
new Path(r.getRegionFileSystem().getRegionDir(), HRegionFileSystem.REGION_INFO_FILE));
|
||||
assertTrue(statuses != null && statuses.length == 1);
|
||||
return statuses[0].getModificationTime();
|
||||
}
|
||||
}
|
|
@ -15,35 +15,43 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.TableNameTestRule;
|
||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
|
||||
import org.apache.hadoop.hbase.client.TableDescriptor;
|
||||
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
||||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.FSTableDescriptors;
|
||||
import org.apache.hadoop.hbase.util.MD5Hash;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.rules.TestName;
|
||||
|
||||
import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
|
||||
|
||||
@Category({ ClientTests.class, SmallTests.class })
|
||||
@Category({RegionServerTests.class, SmallTests.class})
|
||||
public class TestRegionInfoBuilder {
|
||||
|
||||
@ClassRule
|
||||
|
@ -51,7 +59,7 @@ public class TestRegionInfoBuilder {
|
|||
HBaseClassTestRule.forClass(TestRegionInfoBuilder.class);
|
||||
|
||||
@Rule
|
||||
public TableNameTestRule name = new TableNameTestRule();
|
||||
public TestName name = new TestName();
|
||||
|
||||
@Test
|
||||
public void testBuilder() {
|
||||
|
@ -88,9 +96,42 @@ public class TestRegionInfoBuilder {
|
|||
assertTrue(RegionInfo.COMPARATOR.compare(ri, pbri) == 0);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReadAndWriteRegionInfoFile() throws IOException, InterruptedException {
|
||||
HBaseTestingUtility htu = new HBaseTestingUtility();
|
||||
RegionInfo ri = RegionInfoBuilder.FIRST_META_REGIONINFO;
|
||||
Path basedir = htu.getDataTestDir();
|
||||
// Create a region. That'll write the .regioninfo file.
|
||||
FSTableDescriptors fsTableDescriptors = new FSTableDescriptors(htu.getConfiguration());
|
||||
FSTableDescriptors.tryUpdateMetaTableDescriptor(htu.getConfiguration());
|
||||
HRegion r = HBaseTestingUtility.createRegionAndWAL(convert(ri), basedir, htu.getConfiguration(),
|
||||
fsTableDescriptors.get(TableName.META_TABLE_NAME));
|
||||
// Get modtime on the file.
|
||||
long modtime = getModTime(r);
|
||||
HBaseTestingUtility.closeRegionAndWAL(r);
|
||||
Thread.sleep(1001);
|
||||
r = HRegion.openHRegion(basedir, convert(ri), fsTableDescriptors.get(TableName.META_TABLE_NAME),
|
||||
null, htu.getConfiguration());
|
||||
// Ensure the file is not written for a second time.
|
||||
long modtime2 = getModTime(r);
|
||||
assertEquals(modtime, modtime2);
|
||||
// Now load the file.
|
||||
RegionInfo deserializedRi = HRegionFileSystem.loadRegionInfoFileContent(
|
||||
r.getRegionFileSystem().getFileSystem(), r.getRegionFileSystem().getRegionDir());
|
||||
HBaseTestingUtility.closeRegionAndWAL(r);
|
||||
}
|
||||
|
||||
long getModTime(final HRegion r) throws IOException {
|
||||
FileStatus[] statuses = r.getRegionFileSystem().getFileSystem().listStatus(
|
||||
new Path(r.getRegionFileSystem().getRegionDir(), HRegionFileSystem.REGION_INFO_FILE));
|
||||
assertTrue(statuses != null && statuses.length == 1);
|
||||
return statuses[0].getModificationTime();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateRegionInfoName() throws Exception {
|
||||
final TableName tn = name.getTableName();
|
||||
final String tableName = name.getMethodName();
|
||||
final TableName tn = TableName.valueOf(tableName);
|
||||
String startKey = "startkey";
|
||||
final byte[] sk = Bytes.toBytes(startKey);
|
||||
String id = "id";
|
||||
|
@ -98,21 +139,26 @@ public class TestRegionInfoBuilder {
|
|||
// old format region name
|
||||
byte [] name = RegionInfo.createRegionName(tn, sk, id, false);
|
||||
String nameStr = Bytes.toString(name);
|
||||
assertEquals(tn + "," + startKey + "," + id, nameStr);
|
||||
assertEquals(tableName + "," + startKey + "," + id, nameStr);
|
||||
|
||||
|
||||
// new format region name.
|
||||
String md5HashInHex = MD5Hash.getMD5AsHex(name);
|
||||
assertEquals(RegionInfo.MD5_HEX_LENGTH, md5HashInHex.length());
|
||||
name = RegionInfo.createRegionName(tn, sk, id, true);
|
||||
nameStr = Bytes.toString(name);
|
||||
assertEquals(tn + "," + startKey + "," + id + "." + md5HashInHex + ".", nameStr);
|
||||
assertEquals(tableName + "," + startKey + ","
|
||||
+ id + "." + md5HashInHex + ".",
|
||||
nameStr);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testContainsRange() {
|
||||
TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(name.getTableName()).build();
|
||||
TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(
|
||||
TableName.valueOf(name.getMethodName())).build();
|
||||
RegionInfo ri = RegionInfoBuilder.newBuilder(tableDesc.getTableName())
|
||||
.setStartKey(Bytes.toBytes("a")).setEndKey(Bytes.toBytes("g")).build();
|
||||
.setStartKey(Bytes.toBytes("a"))
|
||||
.setEndKey(Bytes.toBytes("g")).build();
|
||||
// Single row range at start of region
|
||||
assertTrue(ri.containsRange(Bytes.toBytes("a"), Bytes.toBytes("a")));
|
||||
// Fully contained range
|
||||
|
@ -138,11 +184,14 @@ public class TestRegionInfoBuilder {
|
|||
|
||||
@Test
|
||||
public void testLastRegionCompare() {
|
||||
TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(name.getTableName()).build();
|
||||
TableDescriptor tableDesc = TableDescriptorBuilder
|
||||
.newBuilder(TableName.valueOf(name.getMethodName())).build();
|
||||
RegionInfo rip = RegionInfoBuilder.newBuilder(tableDesc.getTableName())
|
||||
.setStartKey(Bytes.toBytes("a")).setEndKey(new byte[0]).build();
|
||||
.setStartKey(Bytes.toBytes("a"))
|
||||
.setEndKey(new byte[0]).build();
|
||||
RegionInfo ric = RegionInfoBuilder.newBuilder(tableDesc.getTableName())
|
||||
.setStartKey(Bytes.toBytes("a")).setEndKey(Bytes.toBytes("b")).build();
|
||||
.setStartKey(Bytes.toBytes("a"))
|
||||
.setEndKey(Bytes.toBytes("b")).build();
|
||||
assertTrue(RegionInfo.COMPARATOR.compare(rip, ric) > 0);
|
||||
}
|
||||
|
||||
|
@ -153,12 +202,18 @@ public class TestRegionInfoBuilder {
|
|||
|
||||
@Test
|
||||
public void testComparator() {
|
||||
final TableName tableName = name.getTableName();
|
||||
final TableName tableName = TableName.valueOf(name.getMethodName());
|
||||
byte[] empty = new byte[0];
|
||||
RegionInfo older = RegionInfoBuilder.newBuilder(tableName).setStartKey(empty).setEndKey(empty)
|
||||
.setSplit(false).setRegionId(0L).build();
|
||||
RegionInfo newer = RegionInfoBuilder.newBuilder(tableName).setStartKey(empty).setEndKey(empty)
|
||||
.setSplit(false).setRegionId(1L).build();
|
||||
RegionInfo older = RegionInfoBuilder.newBuilder(tableName)
|
||||
.setStartKey(empty)
|
||||
.setEndKey(empty)
|
||||
.setSplit(false)
|
||||
.setRegionId(0L).build();
|
||||
RegionInfo newer = RegionInfoBuilder.newBuilder(tableName)
|
||||
.setStartKey(empty)
|
||||
.setEndKey(empty)
|
||||
.setSplit(false)
|
||||
.setRegionId(1L).build();
|
||||
assertTrue(RegionInfo.COMPARATOR.compare(older, newer) < 0);
|
||||
assertTrue(RegionInfo.COMPARATOR.compare(newer, older) > 0);
|
||||
assertTrue(RegionInfo.COMPARATOR.compare(older, older) == 0);
|
||||
|
@ -167,7 +222,8 @@ public class TestRegionInfoBuilder {
|
|||
|
||||
@Test
|
||||
public void testRegionNameForRegionReplicas() throws Exception {
|
||||
final TableName tn = name.getTableName();
|
||||
String tableName = name.getMethodName();
|
||||
final TableName tn = TableName.valueOf(tableName);
|
||||
String startKey = "startkey";
|
||||
final byte[] sk = Bytes.toBytes(startKey);
|
||||
String id = "id";
|
||||
|
@ -177,26 +233,24 @@ public class TestRegionInfoBuilder {
|
|||
// primary, replicaId = 0
|
||||
byte [] name = RegionInfo.createRegionName(tn, sk, Bytes.toBytes(id), 0, false);
|
||||
String nameStr = Bytes.toString(name);
|
||||
assertEquals(tn + "," + startKey + "," + id, nameStr);
|
||||
assertEquals(tableName + "," + startKey + "," + id, nameStr);
|
||||
|
||||
// replicaId = 1
|
||||
name = RegionInfo.createRegionName(tn, sk, Bytes.toBytes(id), 1, false);
|
||||
nameStr = Bytes.toString(name);
|
||||
assertEquals(
|
||||
tn + "," + startKey + "," + id + "_" + String.format(RegionInfo.REPLICA_ID_FORMAT, 1),
|
||||
nameStr);
|
||||
assertEquals(tableName + "," + startKey + "," + id + "_" +
|
||||
String.format(RegionInfo.REPLICA_ID_FORMAT, 1), nameStr);
|
||||
|
||||
// replicaId = max
|
||||
name = RegionInfo.createRegionName(tn, sk, Bytes.toBytes(id), 0xFFFF, false);
|
||||
nameStr = Bytes.toString(name);
|
||||
assertEquals(
|
||||
tn + "," + startKey + "," + id + "_" + String.format(RegionInfo.REPLICA_ID_FORMAT, 0xFFFF),
|
||||
nameStr);
|
||||
assertEquals(tableName + "," + startKey + "," + id + "_" +
|
||||
String.format(RegionInfo.REPLICA_ID_FORMAT, 0xFFFF), nameStr);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testParseName() throws IOException {
|
||||
final TableName tableName = name.getTableName();
|
||||
final TableName tableName = TableName.valueOf(name.getMethodName());
|
||||
byte[] startKey = Bytes.toBytes("startKey");
|
||||
long regionId = System.currentTimeMillis();
|
||||
int replicaId = 42;
|
||||
|
@ -211,28 +265,33 @@ public class TestRegionInfoBuilder {
|
|||
assertEquals(3, fields.length);
|
||||
|
||||
// test with replicaId
|
||||
regionName = RegionInfo.createRegionName(tableName, startKey, regionId, replicaId, false);
|
||||
regionName = RegionInfo.createRegionName(tableName, startKey, regionId,
|
||||
replicaId, false);
|
||||
|
||||
fields = RegionInfo.parseRegionName(regionName);
|
||||
assertArrayEquals(Bytes.toString(fields[0]),tableName.getName(), fields[0]);
|
||||
assertArrayEquals(Bytes.toString(fields[1]),startKey, fields[1]);
|
||||
assertArrayEquals(Bytes.toString(fields[2]), Bytes.toBytes(Long.toString(regionId)),fields[2]);
|
||||
assertArrayEquals(Bytes.toString(fields[3]),
|
||||
Bytes.toBytes(String.format(RegionInfo.REPLICA_ID_FORMAT, replicaId)), fields[3]);
|
||||
assertArrayEquals(Bytes.toString(fields[3]), Bytes.toBytes(
|
||||
String.format(RegionInfo.REPLICA_ID_FORMAT, replicaId)), fields[3]);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConvert() {
|
||||
final TableName tableName =
|
||||
TableName.valueOf("ns1:" + name.getTableName().getQualifierAsString());
|
||||
final TableName tableName = TableName.valueOf("ns1:" + name.getMethodName());
|
||||
byte[] startKey = Bytes.toBytes("startKey");
|
||||
byte[] endKey = Bytes.toBytes("endKey");
|
||||
boolean split = false;
|
||||
long regionId = System.currentTimeMillis();
|
||||
int replicaId = 42;
|
||||
|
||||
RegionInfo ri = RegionInfoBuilder.newBuilder(tableName).setStartKey(startKey).setEndKey(endKey)
|
||||
.setSplit(split).setRegionId(regionId).setReplicaId(replicaId).build();
|
||||
|
||||
RegionInfo ri = RegionInfoBuilder.newBuilder(tableName)
|
||||
.setStartKey(startKey)
|
||||
.setEndKey(endKey)
|
||||
.setSplit(split)
|
||||
.setRegionId(regionId)
|
||||
.setReplicaId(replicaId).build();
|
||||
|
||||
// convert two times, compare
|
||||
RegionInfo convertedRi = ProtobufUtil.toRegionInfo(ProtobufUtil.toRegionInfo(ri));
|
||||
|
@ -243,44 +302,30 @@ public class TestRegionInfoBuilder {
|
|||
HBaseProtos.RegionInfo info = HBaseProtos.RegionInfo.newBuilder()
|
||||
.setTableName(HBaseProtos.TableName.newBuilder()
|
||||
.setQualifier(UnsafeByteOperations.unsafeWrap(tableName.getQualifier()))
|
||||
.setNamespace(UnsafeByteOperations.unsafeWrap(tableName.getNamespace())).build())
|
||||
.setNamespace(UnsafeByteOperations.unsafeWrap(tableName.getNamespace()))
|
||||
.build())
|
||||
.setStartKey(UnsafeByteOperations.unsafeWrap(startKey))
|
||||
.setEndKey(UnsafeByteOperations.unsafeWrap(endKey)).setSplit(split).setRegionId(regionId)
|
||||
.setEndKey(UnsafeByteOperations.unsafeWrap(endKey))
|
||||
.setSplit(split)
|
||||
.setRegionId(regionId)
|
||||
.build();
|
||||
|
||||
convertedRi = ProtobufUtil.toRegionInfo(info);
|
||||
RegionInfo expectedRi = RegionInfoBuilder.newBuilder(tableName).setStartKey(startKey)
|
||||
.setEndKey(endKey).setSplit(split).setRegionId(regionId).setReplicaId(0).build();
|
||||
RegionInfo expectedRi = RegionInfoBuilder.newBuilder(tableName)
|
||||
.setStartKey(startKey)
|
||||
.setEndKey(endKey)
|
||||
.setSplit(split)
|
||||
.setRegionId(regionId)
|
||||
.setReplicaId(0).build();
|
||||
|
||||
assertEquals(expectedRi, convertedRi);
|
||||
}
|
||||
|
||||
private void assertRegionNameNotEquals(RegionInfo expected, RegionInfo actual) {
|
||||
assertNotEquals(expected.getRegionNameAsString(), actual.getRegionNameAsString());
|
||||
assertNotEquals(expected.getEncodedName(), actual.getEncodedName());
|
||||
}
|
||||
|
||||
private void assertRegionNameEquals(RegionInfo expected, RegionInfo actual) {
|
||||
assertEquals(expected.getRegionNameAsString(), actual.getRegionNameAsString());
|
||||
assertEquals(expected.getEncodedName(), actual.getEncodedName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNewBuilderWithRegionInfo() {
|
||||
RegionInfo ri = RegionInfoBuilder.newBuilder(name.getTableName()).build();
|
||||
assertEquals(ri, RegionInfoBuilder.newBuilder(ri).build());
|
||||
|
||||
// make sure that the region name and encoded name are changed, see HBASE-24500 for more
|
||||
// details.
|
||||
assertRegionNameNotEquals(ri,
|
||||
RegionInfoBuilder.newBuilder(ri).setStartKey(new byte[1]).build());
|
||||
assertRegionNameNotEquals(ri,
|
||||
RegionInfoBuilder.newBuilder(ri).setRegionId(ri.getRegionId() + 1).build());
|
||||
assertRegionNameNotEquals(ri, RegionInfoBuilder.newBuilder(ri).setReplicaId(1).build());
|
||||
|
||||
// these fields are not in region name
|
||||
assertRegionNameEquals(ri, RegionInfoBuilder.newBuilder(ri).setEndKey(new byte[1]).build());
|
||||
assertRegionNameEquals(ri, RegionInfoBuilder.newBuilder(ri).setSplit(true).build());
|
||||
assertRegionNameEquals(ri, RegionInfoBuilder.newBuilder(ri).setOffline(true).build());
|
||||
// Duplicated method in TestRegionInfoDisplay too.
|
||||
private HRegionInfo convert(RegionInfo ri) {
|
||||
HRegionInfo hri = new HRegionInfo(
|
||||
ri.getTable(), ri.getStartKey(), ri.getEndKey(), ri.isSplit(), ri.getRegionId());
|
||||
hri.setOffline(ri.isOffline());
|
||||
return hri;
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue