From 1942a99b831bb4c41c0e09d6b93df5e1d060f58e Mon Sep 17 00:00:00 2001 From: Jerry He Date: Wed, 10 Feb 2016 15:02:58 -0800 Subject: [PATCH] HBASE-15223 Make convertScanToString public for Spark --- .../hbase/mapreduce/TableInputFormatBase.java | 10 +++++----- .../hadoop/hbase/mapreduce/TableMapReduceUtil.java | 4 ++-- .../apache/hadoop/hbase/mapreduce/TableSplit.java | 13 ++++++++++++- 3 files changed, 19 insertions(+), 8 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java index 918232fa29a..b2f115cb403 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java @@ -266,7 +266,7 @@ extends InputFormat { } List splits = new ArrayList(1); long regionSize = sizeCalculator.getRegionSize(regLoc.getRegionInfo().getRegionName()); - TableSplit split = new TableSplit(tableName, + TableSplit split = new TableSplit(tableName, scan, HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, regLoc .getHostnamePort().split(Addressing.HOSTNAME_PORT_SEPARATOR)[0], regionSize); splits.add(split); @@ -309,7 +309,7 @@ extends InputFormat { byte[] regionName = location.getRegionInfo().getRegionName(); long regionSize = sizeCalculator.getRegionSize(regionName); - TableSplit split = new TableSplit(tableName, + TableSplit split = new TableSplit(tableName, scan, splitStart, splitStop, regionLocation, regionSize); splits.add(split); if (LOG.isDebugEnabled()) { @@ -397,9 +397,9 @@ extends InputFormat { byte[] splitKey = getSplitKey(ts.getStartRow(), ts.getEndRow(), isTextKey); //Set the size of child TableSplit as 1/2 of the region size. The exact size of the // MapReduce input splits is not far off. - TableSplit t1 = new TableSplit(tableName, ts.getStartRow(), splitKey, regionLocation, + TableSplit t1 = new TableSplit(tableName, scan, ts.getStartRow(), splitKey, regionLocation, regionSize / 2); - TableSplit t2 = new TableSplit(tableName, splitKey, ts.getEndRow(), regionLocation, + TableSplit t2 = new TableSplit(tableName, scan, splitKey, ts.getEndRow(), regionLocation, regionSize - regionSize / 2); resultList.add(t1); resultList.add(t2); @@ -426,7 +426,7 @@ extends InputFormat { break; } } - TableSplit t = new TableSplit(tableName, splitStartKey, splitEndKey, + TableSplit t = new TableSplit(tableName, scan, splitStartKey, splitEndKey, regionLocation, totalSize); resultList.add(t); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java index d43c4d97e7c..37e4e44b35d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java @@ -561,7 +561,7 @@ public class TableMapReduceUtil { * @return The scan saved in a Base64 encoded string. * @throws IOException When writing the scan fails. */ - static String convertScanToString(Scan scan) throws IOException { + public static String convertScanToString(Scan scan) throws IOException { ClientProtos.Scan proto = ProtobufUtil.toScan(scan); return Base64.encodeBytes(proto.toByteArray()); } @@ -573,7 +573,7 @@ public class TableMapReduceUtil { * @return The newly created Scan instance. * @throws IOException When reading the scan instance fails. */ - static Scan convertStringToScan(String base64) throws IOException { + public static Scan convertStringToScan(String base64) throws IOException { byte [] decoded = Base64.decode(base64); ClientProtos.Scan scan; try { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java index e8e59a2d740..850db81112e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java @@ -301,7 +301,18 @@ implements Writable, Comparable { StringBuilder sb = new StringBuilder(); sb.append("HBase table split("); sb.append("table name: ").append(tableName); - sb.append(", scan: ").append(scan); + // null scan input is represented by "" + String printScan = ""; + if (!scan.equals("")) { + try { + // get the real scan here in toString, not the Base64 string + printScan = TableMapReduceUtil.convertStringToScan(scan).toString(); + } + catch (IOException e) { + printScan = ""; + } + } + sb.append(", scan: ").append(printScan); sb.append(", start row: ").append(Bytes.toStringBinary(startRow)); sb.append(", end row: ").append(Bytes.toStringBinary(endRow)); sb.append(", region location: ").append(regionLocation);