HBASE-11845 HFile tool should implement Tool, disable blockcache by default

HFileTool now accepts configuration via -D arguments. The blockcache is
disabled by default.
This commit is contained in:
Nick Dimiduk 2014-09-09 13:48:29 -07:00
parent d283818c51
commit 42bc104d0b
3 changed files with 35 additions and 14 deletions

View File

@ -289,7 +289,7 @@ elif [ "$COMMAND" = "hbck" ] ; then
elif [ "$COMMAND" = "hlog" ] ; then
CLASS='org.apache.hadoop.hbase.regionserver.wal.HLogPrettyPrinter'
elif [ "$COMMAND" = "hfile" ] ; then
CLASS='org.apache.hadoop.hbase.io.hfile.HFile'
CLASS='org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter'
elif [ "$COMMAND" = "zkcli" ] ; then
CLASS="org.apache.hadoop.hbase.zookeeper.ZooKeeperMainServer"
elif [ "$COMMAND" = "upgrade" ] ; then

View File

@ -800,11 +800,6 @@ public class HFile {
return res;
}
public static void main(String[] args) throws IOException {
HFilePrettyPrinter prettyPrinter = new HFilePrettyPrinter();
System.exit(prettyPrinter.run(args));
}
/**
* Checks the given {@link HFile} format version, and throws an exception if
* invalid. Note that if the version number comes from an input file and has
@ -822,4 +817,9 @@ public class HFile {
+ MAX_FORMAT_VERSION + ")");
}
}
public static void main(String[] args) throws Exception {
// delegate to preserve old behavior
HFilePrettyPrinter.main(args);
}
}

View File

@ -40,8 +40,11 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue;
@ -56,6 +59,8 @@ import org.apache.hadoop.hbase.util.ByteBloomFilter;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import com.yammer.metrics.core.Histogram;
import com.yammer.metrics.core.Metric;
@ -69,7 +74,7 @@ import com.yammer.metrics.reporting.ConsoleReporter;
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class HFilePrettyPrinter {
public class HFilePrettyPrinter extends Configured implements Tool {
private static final Log LOG = LogFactory.getLog(HFilePrettyPrinter.class);
@ -89,7 +94,6 @@ public class HFilePrettyPrinter {
* The row which the user wants to specify and print all the KeyValues for.
*/
private byte[] row = null;
private Configuration conf;
private List<Path> files = new ArrayList<Path>();
private int count;
@ -97,6 +101,16 @@ public class HFilePrettyPrinter {
private static final String FOUR_SPACES = " ";
public HFilePrettyPrinter() {
super();
init();
}
public HFilePrettyPrinter(Configuration conf) {
super(conf);
init();
}
private void init() {
options.addOption("v", "verbose", false,
"Verbose output; emits file and meta data delimiters");
options.addOption("p", "printkv", false, "Print key/value pairs");
@ -153,13 +167,13 @@ public class HFilePrettyPrinter {
String regionName = cmd.getOptionValue("r");
byte[] rn = Bytes.toBytes(regionName);
byte[][] hri = HRegionInfo.parseRegionName(rn);
Path rootDir = FSUtils.getRootDir(conf);
Path rootDir = FSUtils.getRootDir(getConf());
Path tableDir = FSUtils.getTableDir(rootDir, TableName.valueOf(hri[0]));
String enc = HRegionInfo.encodeRegionName(rn);
Path regionDir = new Path(tableDir, enc);
if (verbose)
System.out.println("region dir -> " + regionDir);
List<Path> regionFiles = HFile.getStoreFiles(FileSystem.get(conf),
List<Path> regionFiles = HFile.getStoreFiles(FileSystem.get(getConf()),
regionDir);
if (verbose)
System.out.println("Number of region files found -> "
@ -182,9 +196,8 @@ public class HFilePrettyPrinter {
* exit code (zero for success, non-zero for failure).
*/
public int run(String[] args) {
conf = HBaseConfiguration.create();
try {
FSUtils.setFsDefault(conf, FSUtils.getRootDir(conf));
FSUtils.setFsDefault(getConf(), FSUtils.getRootDir(getConf()));
if (!parseOptions(args))
return 1;
} catch (IOException ex) {
@ -214,12 +227,12 @@ public class HFilePrettyPrinter {
private void processFile(Path file) throws IOException {
if (verbose)
System.out.println("Scanning -> " + file);
FileSystem fs = file.getFileSystem(conf);
FileSystem fs = file.getFileSystem(getConf());
if (!fs.exists(file)) {
System.err.println("ERROR, file doesnt exist: " + file);
}
HFile.Reader reader = HFile.createReader(fs, file, new CacheConfig(conf), conf);
HFile.Reader reader = HFile.createReader(fs, file, new CacheConfig(getConf()), getConf());
Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
@ -489,4 +502,12 @@ public class HFilePrettyPrinter {
stream.printf(Locale.getDefault(), " count = %d\n", histogram.count());
}
}
public static void main(String[] args) throws Exception {
Configuration conf = HBaseConfiguration.create();
// no need for a block cache
conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0);
int ret = ToolRunner.run(conf, new HFilePrettyPrinter(), args);
System.exit(ret);
}
}