HBASE-24247 Failed multi-merge because two regions not adjacent (legitimately) (#1570)

hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfo.java
 Add new isOverlap method that takes list of RegionInfos checking that
 current RegionInfo is overlapped by the passed in Regions.

    Signed-off-by: Jan Hentschel <jan.hentschel@ultratendency.com>
    Signed-off-by: Huaxiang Sun <huaxiangsun@apache.com>
This commit is contained in:
Michael Stack 2020-04-28 13:22:00 -07:00 committed by GitHub
parent 8eb2f6752c
commit 9e697d0ea6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 95 additions and 8 deletions

View File

@ -796,6 +796,16 @@ public interface RegionInfo extends Comparable<RegionInfo> {
return !isLast() && Bytes.compareTo(getStartKey(), getEndKey()) > 0;
}
default boolean isOverlap(RegionInfo [] ris) {
for (RegionInfo ri: ris) {
if (isOverlap(ri)) {
return true;
}
}
return false;
}
/**
* @return True if an overlap in region range.
* @see #isDegenerate()

View File

@ -133,7 +133,7 @@ public class MergeTableRegionsProcedure
LOG.warn(msg);
throw new MergeRegionException(msg);
}
if (!force && !ri.isAdjacent(previous) && !ri.isOverlap(previous)) {
if (!force && !ri.isAdjacent(previous) && !ri.isOverlap(regions)) {
String msg = "Unable to merge non-adjacent or non-overlapping regions " +
previous.getShortNameToLog() + ", " + ri.getShortNameToLog() + " when force=false";
LOG.warn(msg);
@ -142,7 +142,7 @@ public class MergeTableRegionsProcedure
}
if (ri.getReplicaId() != RegionInfo.DEFAULT_REPLICA_ID) {
throw new MergeRegionException("Can't merge non-default replicas; " + ri);
throw new MergeRegionException("Can't merge non-default/replicaId!=0 replicas; " + ri);
}
try {
checkOnline(env, ri);

View File

@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.master.assignment.GCRegionProcedure;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.util.Threads;
@ -175,6 +176,51 @@ public class TestMetaFixer {
});
}
/**
* Make it so a big overlap spans many Regions, some of which are non-contiguous. Make it so
* we can fix this condition. HBASE-24247
*/
@Test
public void testOverlapWithMergeOfNonContiguous() throws Exception {
TableName tn = TableName.valueOf(this.name.getMethodName());
TEST_UTIL.createMultiRegionTable(tn, HConstants.CATALOG_FAMILY);
List<RegionInfo> ris = MetaTableAccessor.getTableRegions(TEST_UTIL.getConnection(), tn);
assertTrue(ris.size() > 5);
MasterServices services = TEST_UTIL.getHBaseCluster().getMaster();
services.getCatalogJanitor().scan();
CatalogJanitor.Report report = services.getCatalogJanitor().getLastReport();
assertTrue(report.isEmpty());
// Make a simple overlap spanning second and third region.
makeOverlap(services, ris.get(1), ris.get(5));
// Now Delete a region under the overlap to manufacture non-contiguous sub regions.
RegionInfo deletedRegion = ris.get(3);
long pid = services.getAssignmentManager().unassign(deletedRegion);
while (!services.getMasterProcedureExecutor().isFinished(pid)) {
Threads.sleep(100);
}
GCRegionProcedure procedure =
new GCRegionProcedure(services.getMasterProcedureExecutor().getEnvironment(), ris.get(3));
pid = services.getMasterProcedureExecutor().submitProcedure(procedure);
while (!services.getMasterProcedureExecutor().isFinished(pid)) {
Threads.sleep(100);
}
Threads.sleep(10000);
services.getCatalogJanitor().scan();
report = services.getCatalogJanitor().getLastReport();
assertEquals(1, MetaFixer.calculateMerges(10, report.getOverlaps()).size());
MetaFixer fixer = new MetaFixer(services);
fixer.fixOverlaps(report);
await(10, () -> {
try {
services.getCatalogJanitor().scan();
final CatalogJanitor.Report postReport = services.getCatalogJanitor().getLastReport();
return postReport.isEmpty();
} catch (Exception e) {
throw new RuntimeException(e);
}
});
}
/**
* Await the successful return of {@code condition}, sleeping {@code sleepMillis} between
* invocations.

View File

@ -16,14 +16,12 @@
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
@ -48,9 +46,7 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
@Category({RegionServerTests.class, SmallTests.class})
@ -131,6 +127,39 @@ public class TestHRegionInfo {
assertTrue(adri.isOverlap(abri));
}
/**
* Tests {@link RegionInfo#isOverlap(RegionInfo[])}
*/
@Test
public void testIsOverlaps() {
byte[] a = Bytes.toBytes("a");
byte[] b = Bytes.toBytes("b");
byte[] c = Bytes.toBytes("c");
byte[] d = Bytes.toBytes("d");
byte[] e = Bytes.toBytes("e");
byte[] f = Bytes.toBytes("f");
org.apache.hadoop.hbase.client.RegionInfo ari =
org.apache.hadoop.hbase.client.RegionInfoBuilder.newBuilder(TableName.META_TABLE_NAME).
setEndKey(a).build();
org.apache.hadoop.hbase.client.RegionInfo abri =
org.apache.hadoop.hbase.client.RegionInfoBuilder.newBuilder(TableName.META_TABLE_NAME).
setStartKey(a).setEndKey(b).build();
org.apache.hadoop.hbase.client.RegionInfo eri =
org.apache.hadoop.hbase.client.RegionInfoBuilder.newBuilder(TableName.META_TABLE_NAME).
setEndKey(e).build();
org.apache.hadoop.hbase.client.RegionInfo cdri =
org.apache.hadoop.hbase.client.RegionInfoBuilder.newBuilder(TableName.META_TABLE_NAME).
setStartKey(c).setEndKey(d).build();
org.apache.hadoop.hbase.client.RegionInfo efri =
org.apache.hadoop.hbase.client.RegionInfoBuilder.newBuilder(TableName.META_TABLE_NAME).
setStartKey(e).setEndKey(f).build();
RegionInfo [] subsumes = new RegionInfo [] {ari, abri, eri};
assertTrue(abri.isOverlap(subsumes));
subsumes = new RegionInfo [] {ari, cdri, eri};
assertTrue(cdri.isOverlap(subsumes));
assertFalse(efri.isOverlap(subsumes));
}
@Test
public void testPb() throws DeserializationException {
HRegionInfo hri = HRegionInfo.FIRST_META_REGIONINFO;
@ -159,9 +188,11 @@ public class TestHRegionInfo {
long modtime2 = getModTime(r);
assertEquals(modtime, modtime2);
// Now load the file.
org.apache.hadoop.hbase.client.RegionInfo deserializedHri = HRegionFileSystem.loadRegionInfoFileContent(
org.apache.hadoop.hbase.client.RegionInfo deserializedHri =
HRegionFileSystem.loadRegionInfoFileContent(
r.getRegionFileSystem().getFileSystem(), r.getRegionFileSystem().getRegionDir());
assertTrue(org.apache.hadoop.hbase.client.RegionInfo.COMPARATOR.compare(hri, deserializedHri) == 0);
assertEquals(0,
org.apache.hadoop.hbase.client.RegionInfo.COMPARATOR.compare(hri, deserializedHri));
HBaseTestingUtility.closeRegionAndWAL(r);
}