HBASE-23624 Add a tool to dump the procedure info in HFile (#975)
Signed-off-by: stack <stack@apache.org>
This commit is contained in:
parent
b81685cfcc
commit
bee7f4e08c
|
@ -34,7 +34,7 @@ import java.util.List;
|
|||
import java.util.Map.Entry;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.Optional;
|
||||
|
||||
import java.util.function.Function;
|
||||
import org.apache.hadoop.hbase.KeyValue.Type;
|
||||
import org.apache.hadoop.hbase.io.HeapSize;
|
||||
import org.apache.hadoop.hbase.util.ByteBufferUtils;
|
||||
|
@ -1297,17 +1297,25 @@ public final class CellUtil {
|
|||
* @return The Key portion of the passed <code>cell</code> as a String.
|
||||
*/
|
||||
public static String getCellKeyAsString(Cell cell) {
|
||||
StringBuilder sb = new StringBuilder(
|
||||
Bytes.toStringBinary(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
|
||||
return getCellKeyAsString(cell,
|
||||
c -> Bytes.toStringBinary(c.getRowArray(), c.getRowOffset(), c.getRowLength()));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param cell the cell to convert
|
||||
* @param rowConverter used to convert the row of the cell to a string
|
||||
* @return The Key portion of the passed <code>cell</code> as a String.
|
||||
*/
|
||||
public static String getCellKeyAsString(Cell cell, Function<Cell, String> rowConverter) {
|
||||
StringBuilder sb = new StringBuilder(rowConverter.apply(cell));
|
||||
sb.append('/');
|
||||
sb.append(cell.getFamilyLength() == 0 ? ""
|
||||
: Bytes.toStringBinary(cell.getFamilyArray(), cell.getFamilyOffset(),
|
||||
cell.getFamilyLength()));
|
||||
sb.append(cell.getFamilyLength() == 0 ? "" :
|
||||
Bytes.toStringBinary(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()));
|
||||
// KeyValue only added ':' if family is non-null. Do same.
|
||||
if (cell.getFamilyLength() > 0) sb.append(':');
|
||||
sb.append(cell.getQualifierLength() == 0 ? ""
|
||||
: Bytes.toStringBinary(cell.getQualifierArray(), cell.getQualifierOffset(),
|
||||
cell.getQualifierLength()));
|
||||
sb.append(cell.getQualifierLength() == 0 ? "" :
|
||||
Bytes.toStringBinary(cell.getQualifierArray(), cell.getQualifierOffset(),
|
||||
cell.getQualifierLength()));
|
||||
sb.append('/');
|
||||
sb.append(KeyValue.humanReadableTimestamp(cell.getTimestamp()));
|
||||
sb.append('/');
|
||||
|
|
|
@ -634,7 +634,7 @@ public class HFile {
|
|||
* @return The list of files found.
|
||||
* @throws IOException When scanning the files fails.
|
||||
*/
|
||||
static List<Path> getStoreFiles(FileSystem fs, Path regionDir)
|
||||
public static List<Path> getStoreFiles(FileSystem fs, Path regionDir)
|
||||
throws IOException {
|
||||
List<Path> regionHFiles = new ArrayList<>();
|
||||
PathFilter dirFilter = new FSUtils.DirFilter(fs);
|
||||
|
|
|
@ -0,0 +1,175 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.procedure2.store.region;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
|
||||
import org.apache.hadoop.hbase.PrivateCellUtil;
|
||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFile;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
|
||||
import org.apache.hadoop.hbase.procedure2.Procedure;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureUtil;
|
||||
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.CommonFSUtils;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.yetus.audience.InterfaceStability;
|
||||
|
||||
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
|
||||
import org.apache.hbase.thirdparty.org.apache.commons.cli.Option;
|
||||
import org.apache.hbase.thirdparty.org.apache.commons.cli.OptionGroup;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
|
||||
|
||||
/**
|
||||
* A tool to dump the procedures in the HFiles.
|
||||
* <p/>
|
||||
* The different between this and {@link org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter} is
|
||||
* that, this class will decode the procedure in the cell for better debugging. You are free to use
|
||||
* {@link org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter} to dump the same file as well.
|
||||
*/
|
||||
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
|
||||
@InterfaceStability.Evolving
|
||||
public class HFileProcedurePrettyPrinter extends AbstractHBaseTool {
|
||||
|
||||
private Long procId;
|
||||
|
||||
private List<Path> files = new ArrayList<>();
|
||||
|
||||
private final PrintStream out;
|
||||
|
||||
public HFileProcedurePrettyPrinter() {
|
||||
this(System.out);
|
||||
}
|
||||
|
||||
public HFileProcedurePrettyPrinter(PrintStream out) {
|
||||
this.out = out;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void addOptions() {
|
||||
addOptWithArg("w", "seekToPid", "Seek to this procedure id and print this procedure only");
|
||||
OptionGroup files = new OptionGroup();
|
||||
files.addOption(new Option("f", "file", true,
|
||||
"File to scan. Pass full-path; e.g. hdfs://a:9000/MasterProcs/master/procedure/p/xxx"));
|
||||
files.addOption(new Option("a", "all", false, "Scan the whole procedure region."));
|
||||
files.setRequired(true);
|
||||
options.addOptionGroup(files);
|
||||
}
|
||||
|
||||
private void addAllHFiles() throws IOException {
|
||||
Path masterProcDir =
|
||||
new Path(CommonFSUtils.getWALRootDir(conf), RegionProcedureStore.MASTER_PROCEDURE_DIR);
|
||||
Path tableDir = CommonFSUtils.getTableDir(masterProcDir, RegionProcedureStore.TABLE_NAME);
|
||||
FileSystem fs = tableDir.getFileSystem(conf);
|
||||
Path regionDir =
|
||||
fs.listStatus(tableDir, p -> RegionInfo.isEncodedRegionName(Bytes.toBytes(p.getName())))[0]
|
||||
.getPath();
|
||||
List<Path> regionFiles = HFile.getStoreFiles(fs, regionDir);
|
||||
files.addAll(regionFiles);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void processOptions(CommandLine cmd) {
|
||||
if (cmd.hasOption("w")) {
|
||||
String key = cmd.getOptionValue("w");
|
||||
if (key != null && key.length() != 0) {
|
||||
procId = Long.parseLong(key);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Invalid row is specified.");
|
||||
}
|
||||
}
|
||||
if (cmd.hasOption("f")) {
|
||||
files.add(new Path(cmd.getOptionValue("f")));
|
||||
}
|
||||
if (cmd.hasOption("a")) {
|
||||
try {
|
||||
addAllHFiles();
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void printCell(Cell cell) throws IOException {
|
||||
out.print("K: " + CellUtil.getCellKeyAsString(cell,
|
||||
c -> Long.toString(Bytes.toLong(c.getRowArray(), c.getRowOffset(), c.getRowLength()))));
|
||||
if (cell.getType() == Cell.Type.Put) {
|
||||
if (cell.getValueLength() == 0) {
|
||||
out.println(" V: mark deleted");
|
||||
} else {
|
||||
Procedure<?> proc = ProcedureUtil.convertToProcedure(ProcedureProtos.Procedure.parser()
|
||||
.parseFrom(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()));
|
||||
out.println(" V: " + proc.toStringDetails());
|
||||
}
|
||||
} else {
|
||||
out.println();
|
||||
}
|
||||
}
|
||||
|
||||
private void processFile(Path file) throws IOException {
|
||||
out.println("Scanning -> " + file);
|
||||
FileSystem fs = file.getFileSystem(conf);
|
||||
try (HFile.Reader reader = HFile.createReader(fs, file, CacheConfig.DISABLED, true, conf);
|
||||
HFileScanner scanner = reader.getScanner(false, false, false)) {
|
||||
if (procId != null) {
|
||||
if (scanner
|
||||
.seekTo(PrivateCellUtil.createFirstOnRow(Bytes.toBytes(procId.longValue()))) != -1) {
|
||||
do {
|
||||
Cell cell = scanner.getCell();
|
||||
long currentProcId =
|
||||
Bytes.toLong(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
|
||||
if (currentProcId != procId.longValue()) {
|
||||
break;
|
||||
}
|
||||
printCell(cell);
|
||||
} while (scanner.next());
|
||||
}
|
||||
} else {
|
||||
if (scanner.seekTo()) {
|
||||
do {
|
||||
Cell cell = scanner.getCell();
|
||||
printCell(cell);
|
||||
} while (scanner.next());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doWork() throws Exception {
|
||||
for (Path file : files) {
|
||||
processFile(file);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
new HFileProcedurePrettyPrinter().doStaticMain(args);
|
||||
}
|
||||
}
|
|
@ -112,17 +112,15 @@ public class RegionProcedureStore extends ProcedureStoreBase {
|
|||
|
||||
static final String LOGCLEANER_PLUGINS = "hbase.procedure.store.region.logcleaner.plugins";
|
||||
|
||||
private static final String DATA_DIR = "data";
|
||||
|
||||
private static final String REPLAY_EDITS_DIR = "replay";
|
||||
private static final String REPLAY_EDITS_DIR = "recovered.wals";
|
||||
|
||||
private static final String DEAD_WAL_DIR_SUFFIX = "-dead";
|
||||
|
||||
private static final TableName TABLE_NAME = TableName.valueOf("master:procedure");
|
||||
static final TableName TABLE_NAME = TableName.valueOf("master:procedure");
|
||||
|
||||
static final byte[] FAMILY = Bytes.toBytes("p");
|
||||
|
||||
private static final byte[] PROC_QUALIFIER = Bytes.toBytes("d");
|
||||
static final byte[] PROC_QUALIFIER = Bytes.toBytes("d");
|
||||
|
||||
private static final int REGION_ID = 1;
|
||||
|
||||
|
@ -231,27 +229,26 @@ public class RegionProcedureStore extends ProcedureStoreBase {
|
|||
return wal;
|
||||
}
|
||||
|
||||
private HRegion bootstrap(Configuration conf, FileSystem fs, Path rootDir, Path dataDir)
|
||||
throws IOException {
|
||||
private HRegion bootstrap(Configuration conf, FileSystem fs, Path rootDir) throws IOException {
|
||||
RegionInfo regionInfo = RegionInfoBuilder.newBuilder(TABLE_NAME).setRegionId(REGION_ID).build();
|
||||
Path tmpDataDir = new Path(dataDir.getParent(), dataDir.getName() + "-tmp");
|
||||
if (fs.exists(tmpDataDir) && !fs.delete(tmpDataDir, true)) {
|
||||
throw new IOException("Can not delete partial created proc region " + tmpDataDir);
|
||||
Path tmpTableDir = CommonFSUtils.getTableDir(rootDir, TableName
|
||||
.valueOf(TABLE_NAME.getNamespaceAsString(), TABLE_NAME.getQualifierAsString() + "-tmp"));
|
||||
if (fs.exists(tmpTableDir) && !fs.delete(tmpTableDir, true)) {
|
||||
throw new IOException("Can not delete partial created proc region " + tmpTableDir);
|
||||
}
|
||||
Path tableDir = CommonFSUtils.getTableDir(tmpDataDir, TABLE_NAME);
|
||||
HRegion.createHRegion(conf, regionInfo, fs, tableDir, TABLE_DESC).close();
|
||||
if (!fs.rename(tmpDataDir, dataDir)) {
|
||||
throw new IOException("Can not rename " + tmpDataDir + " to " + dataDir);
|
||||
HRegion.createHRegion(conf, regionInfo, fs, tmpTableDir, TABLE_DESC).close();
|
||||
Path tableDir = CommonFSUtils.getTableDir(rootDir, TABLE_NAME);
|
||||
if (!fs.rename(tmpTableDir, tableDir)) {
|
||||
throw new IOException("Can not rename " + tmpTableDir + " to " + tableDir);
|
||||
}
|
||||
WAL wal = createWAL(fs, rootDir, regionInfo);
|
||||
return HRegion.openHRegionFromTableDir(conf, fs, tableDir, regionInfo, TABLE_DESC, wal, null,
|
||||
null);
|
||||
}
|
||||
|
||||
private HRegion open(Configuration conf, FileSystem fs, Path rootDir, Path dataDir)
|
||||
throws IOException {
|
||||
private HRegion open(Configuration conf, FileSystem fs, Path rootDir) throws IOException {
|
||||
String factoryId = server.getServerName().toString();
|
||||
Path tableDir = CommonFSUtils.getTableDir(dataDir, TABLE_NAME);
|
||||
Path tableDir = CommonFSUtils.getTableDir(rootDir, TABLE_NAME);
|
||||
Path regionDir =
|
||||
fs.listStatus(tableDir, p -> RegionInfo.isEncodedRegionName(Bytes.toBytes(p.getName())))[0]
|
||||
.getPath();
|
||||
|
@ -391,13 +388,13 @@ public class RegionProcedureStore extends ProcedureStoreBase {
|
|||
walRoller.start();
|
||||
|
||||
walFactory = new WALFactory(conf, server.getServerName().toString());
|
||||
Path dataDir = new Path(rootDir, DATA_DIR);
|
||||
if (fs.exists(dataDir)) {
|
||||
Path tableDir = CommonFSUtils.getTableDir(rootDir, TABLE_NAME);
|
||||
if (fs.exists(tableDir)) {
|
||||
// load the existing region.
|
||||
region = open(conf, fs, rootDir, dataDir);
|
||||
region = open(conf, fs, rootDir);
|
||||
} else {
|
||||
// bootstrapping...
|
||||
region = bootstrap(conf, fs, rootDir, dataDir);
|
||||
region = bootstrap(conf, fs, rootDir);
|
||||
}
|
||||
flusherAndCompactor = new RegionFlusherAndCompactor(conf, server, region);
|
||||
walRoller.setFlusherAndCompactor(flusherAndCompactor);
|
||||
|
|
|
@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
|
|||
* <p/>
|
||||
* The different between this and {@link WALPrettyPrinter} is that, this class will decode the
|
||||
* procedure in the WALEdit for better debugging. You are free to use {@link WALPrettyPrinter} to
|
||||
* dump the safe file as well.
|
||||
* dump the same file as well.
|
||||
*/
|
||||
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
|
||||
@InterfaceStability.Evolving
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.procedure2.store.region;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.LoadCounter;
|
||||
import org.apache.hadoop.hbase.regionserver.MemStoreLAB;
|
||||
import org.apache.hadoop.hbase.util.CommonFSUtils;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
public class RegionProcedureStoreTestBase {
|
||||
|
||||
protected HBaseCommonTestingUtility htu;
|
||||
|
||||
protected RegionProcedureStore store;
|
||||
|
||||
@Before
|
||||
public void setUp() throws IOException {
|
||||
htu = new HBaseCommonTestingUtility();
|
||||
htu.getConfiguration().setBoolean(MemStoreLAB.USEMSLAB_KEY, false);
|
||||
Path testDir = htu.getDataTestDir();
|
||||
CommonFSUtils.setWALRootDir(htu.getConfiguration(), testDir);
|
||||
store = RegionProcedureStoreTestHelper.createStore(htu.getConfiguration(), new LoadCounter());
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws IOException {
|
||||
store.stop(true);
|
||||
htu.cleanupTestDir();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,153 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.procedure2.store.region;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.PrintStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.apache.commons.lang3.mutable.MutableLong;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFile;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.CommonFSUtils;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({ MasterTests.class, MediumTests.class })
|
||||
public class TestHFileProcedurePrettyPrinter extends RegionProcedureStoreTestBase {
|
||||
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
HBaseClassTestRule.forClass(TestHFileProcedurePrettyPrinter.class);
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(TestHFileProcedurePrettyPrinter.class);
|
||||
|
||||
private List<String> checkOutput(BufferedReader reader, MutableLong putCount,
|
||||
MutableLong deleteCount, MutableLong markDeletedCount) throws IOException {
|
||||
putCount.setValue(0);
|
||||
deleteCount.setValue(0);
|
||||
markDeletedCount.setValue(0);
|
||||
List<String> fileScanned = new ArrayList<>();
|
||||
for (;;) {
|
||||
String line = reader.readLine();
|
||||
if (line == null) {
|
||||
return fileScanned;
|
||||
}
|
||||
LOG.info(line);
|
||||
if (line.contains("V: mark deleted")) {
|
||||
markDeletedCount.increment();
|
||||
} else if (line.contains("/Put/")) {
|
||||
putCount.increment();
|
||||
} else if (line.contains("/DeleteFamily/")) {
|
||||
deleteCount.increment();
|
||||
} else if (line.startsWith("Scanning -> ")) {
|
||||
fileScanned.add(line.split(" -> ")[1]);
|
||||
} else {
|
||||
fail("Unrecognized output: " + line);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test() throws Exception {
|
||||
HFileProcedurePrettyPrinter printer = new HFileProcedurePrettyPrinter();
|
||||
// -a or -f is required so passing empty args will cause an error and return a non-zero value.
|
||||
assertNotEquals(0, ToolRunner.run(htu.getConfiguration(), printer, new String[0]));
|
||||
List<RegionProcedureStoreTestProcedure> procs = new ArrayList<>();
|
||||
for (int i = 0; i < 10; i++) {
|
||||
RegionProcedureStoreTestProcedure proc = new RegionProcedureStoreTestProcedure();
|
||||
store.insert(proc, null);
|
||||
procs.add(proc);
|
||||
}
|
||||
store.region.flush(true);
|
||||
for (int i = 0; i < 5; i++) {
|
||||
store.delete(procs.get(i).getProcId());
|
||||
}
|
||||
store.region.flush(true);
|
||||
store.cleanup();
|
||||
store.region.flush(true);
|
||||
Path tableDir = CommonFSUtils.getTableDir(
|
||||
new Path(htu.getDataTestDir(), RegionProcedureStore.MASTER_PROCEDURE_DIR),
|
||||
RegionProcedureStore.TABLE_NAME);
|
||||
FileSystem fs = tableDir.getFileSystem(htu.getConfiguration());
|
||||
Path regionDir =
|
||||
fs.listStatus(tableDir, p -> RegionInfo.isEncodedRegionName(Bytes.toBytes(p.getName())))[0]
|
||||
.getPath();
|
||||
List<Path> storefiles = HFile.getStoreFiles(fs, regionDir);
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream();
|
||||
PrintStream out = new PrintStream(bos);
|
||||
MutableLong putCount = new MutableLong();
|
||||
MutableLong deleteCount = new MutableLong();
|
||||
MutableLong markDeletedCount = new MutableLong();
|
||||
for (Path file : storefiles) {
|
||||
bos.reset();
|
||||
printer = new HFileProcedurePrettyPrinter(out);
|
||||
assertEquals(0,
|
||||
ToolRunner.run(htu.getConfiguration(), printer, new String[] { "-f", file.toString() }));
|
||||
try (BufferedReader reader =
|
||||
new BufferedReader(new InputStreamReader(new ByteArrayInputStream(bos.toByteArray()),
|
||||
StandardCharsets.UTF_8))) {
|
||||
List<String> fileScanned = checkOutput(reader, putCount, deleteCount, markDeletedCount);
|
||||
assertEquals(1, fileScanned.size());
|
||||
assertEquals(file.toString(), fileScanned.get(0));
|
||||
if (putCount.longValue() == 10) {
|
||||
assertEquals(0, deleteCount.longValue());
|
||||
assertEquals(0, markDeletedCount.longValue());
|
||||
} else if (deleteCount.longValue() == 5) {
|
||||
assertEquals(0, putCount.longValue());
|
||||
assertEquals(0, markDeletedCount.longValue());
|
||||
} else if (markDeletedCount.longValue() == 5) {
|
||||
assertEquals(0, putCount.longValue());
|
||||
assertEquals(0, deleteCount.longValue());
|
||||
} else {
|
||||
fail("Should have entered one of the above 3 branches");
|
||||
}
|
||||
}
|
||||
}
|
||||
bos.reset();
|
||||
printer = new HFileProcedurePrettyPrinter(out);
|
||||
assertEquals(0, ToolRunner.run(htu.getConfiguration(), printer, new String[] { "-a" }));
|
||||
try (BufferedReader reader = new BufferedReader(
|
||||
new InputStreamReader(new ByteArrayInputStream(bos.toByteArray()), StandardCharsets.UTF_8))) {
|
||||
List<String> fileScanned = checkOutput(reader, putCount, deleteCount, markDeletedCount);
|
||||
assertEquals(3, fileScanned.size());
|
||||
assertEquals(10, putCount.longValue());
|
||||
assertEquals(5, deleteCount.longValue());
|
||||
assertEquals(5, markDeletedCount.longValue());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -21,23 +21,16 @@ import static org.junit.Assert.assertEquals;
|
|||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
import org.apache.hadoop.hbase.procedure2.Procedure;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.LoadCounter;
|
||||
import org.apache.hadoop.hbase.regionserver.MemStoreLAB;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.CommonFSUtils;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
@ -45,7 +38,7 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({ MasterTests.class, MediumTests.class })
|
||||
public class TestRegionProcedureStore {
|
||||
public class TestRegionProcedureStore extends RegionProcedureStoreTestBase {
|
||||
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
|
@ -53,25 +46,6 @@ public class TestRegionProcedureStore {
|
|||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(TestRegionProcedureStore.class);
|
||||
|
||||
private HBaseCommonTestingUtility htu;
|
||||
|
||||
private RegionProcedureStore store;
|
||||
|
||||
@Before
|
||||
public void setUp() throws IOException {
|
||||
htu = new HBaseCommonTestingUtility();
|
||||
htu.getConfiguration().setBoolean(MemStoreLAB.USEMSLAB_KEY, false);
|
||||
Path testDir = htu.getDataTestDir();
|
||||
CommonFSUtils.setWALRootDir(htu.getConfiguration(), testDir);
|
||||
store = RegionProcedureStoreTestHelper.createStore(htu.getConfiguration(), new LoadCounter());
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws IOException {
|
||||
store.stop(true);
|
||||
htu.cleanupTestDir();
|
||||
}
|
||||
|
||||
private void verifyProcIdsOnRestart(final Set<Long> procIds) throws Exception {
|
||||
LOG.debug("expected: " + procIds);
|
||||
LoadCounter loader = new LoadCounter();
|
||||
|
|
|
@ -22,7 +22,6 @@ import static org.junit.Assert.assertEquals;
|
|||
import java.io.BufferedReader;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.PrintStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
@ -31,16 +30,10 @@ import java.util.List;
|
|||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.LoadCounter;
|
||||
import org.apache.hadoop.hbase.regionserver.MemStoreLAB;
|
||||
import org.apache.hadoop.hbase.testclassification.MasterTests;
|
||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||
import org.apache.hadoop.hbase.util.CommonFSUtils;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
@ -48,7 +41,7 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({ MasterTests.class, MediumTests.class })
|
||||
public class TestWALProcedurePrettyPrinter {
|
||||
public class TestWALProcedurePrettyPrinter extends RegionProcedureStoreTestBase {
|
||||
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
|
@ -56,25 +49,6 @@ public class TestWALProcedurePrettyPrinter {
|
|||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(TestWALProcedurePrettyPrinter.class);
|
||||
|
||||
private HBaseCommonTestingUtility htu;
|
||||
|
||||
private RegionProcedureStore store;
|
||||
|
||||
@Before
|
||||
public void setUp() throws IOException {
|
||||
htu = new HBaseCommonTestingUtility();
|
||||
htu.getConfiguration().setBoolean(MemStoreLAB.USEMSLAB_KEY, false);
|
||||
Path testDir = htu.getDataTestDir();
|
||||
CommonFSUtils.setWALRootDir(htu.getConfiguration(), testDir);
|
||||
store = RegionProcedureStoreTestHelper.createStore(htu.getConfiguration(), new LoadCounter());
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws IOException {
|
||||
store.stop(true);
|
||||
htu.cleanupTestDir();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test() throws Exception {
|
||||
List<RegionProcedureStoreTestProcedure> procs = new ArrayList<>();
|
||||
|
|
Loading…
Reference in New Issue