diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java index 82378d191d6..d72c17711af 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java @@ -264,7 +264,7 @@ extends InputFormat { } List splits = new ArrayList(1); long regionSize = sizeCalculator.getRegionSize(regLoc.getRegionInfo().getRegionName()); - TableSplit split = new TableSplit(table.getName(), + TableSplit split = new TableSplit(table.getName(), scan, HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, regLoc .getHostnamePort().split(Addressing.HOSTNAME_PORT_SEPARATOR)[0], regionSize); splits.add(split); @@ -307,7 +307,7 @@ extends InputFormat { byte[] regionName = location.getRegionInfo().getRegionName(); long regionSize = sizeCalculator.getRegionSize(regionName); - TableSplit split = new TableSplit(table.getName(), + TableSplit split = new TableSplit(table.getName(), scan, splitStart, splitStop, regionLocation, regionSize); splits.add(split); if (LOG.isDebugEnabled()) { @@ -398,10 +398,10 @@ extends InputFormat { byte[] splitKey = getSplitKey(ts.getStartRow(), ts.getEndRow(), isTextKey); //Set the size of child TableSplit as 1/2 of the region size. The exact size of the // MapReduce input splits is not far off. - TableSplit t1 = new TableSplit(table.getName(), ts.getStartRow(), splitKey, regionLocation, - regionSize / 2); - TableSplit t2 = new TableSplit(table.getName(), splitKey, ts.getEndRow(), regionLocation, - regionSize - regionSize / 2); + TableSplit t1 = new TableSplit(table.getName(), scan, ts.getStartRow(), splitKey, + regionLocation, regionSize / 2); + TableSplit t2 = new TableSplit(table.getName(), scan, splitKey, ts.getEndRow(), + regionLocation, regionSize - regionSize / 2); resultList.add(t1); resultList.add(t2); count++; @@ -427,7 +427,7 @@ extends InputFormat { break; } } - TableSplit t = new TableSplit(table.getName(), splitStartKey, splitEndKey, + TableSplit t = new TableSplit(table.getName(), scan, splitStartKey, splitEndKey, regionLocation, totalSize); resultList.add(t); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java index 8cad7ab840b..458464f199a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java @@ -552,7 +552,7 @@ public class TableMapReduceUtil { * @return The scan saved in a Base64 encoded string. * @throws IOException When writing the scan fails. */ - static String convertScanToString(Scan scan) throws IOException { + public static String convertScanToString(Scan scan) throws IOException { ClientProtos.Scan proto = ProtobufUtil.toScan(scan); return Base64.encodeBytes(proto.toByteArray()); } @@ -564,7 +564,7 @@ public class TableMapReduceUtil { * @return The newly created Scan instance. * @throws IOException When reading the scan instance fails. */ - static Scan convertStringToScan(String base64) throws IOException { + public static Scan convertStringToScan(String base64) throws IOException { byte [] decoded = Base64.decode(base64); ClientProtos.Scan scan; try { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java index 7111668f2ed..bf8532ba9a6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java @@ -325,7 +325,18 @@ implements Writable, Comparable { StringBuilder sb = new StringBuilder(); sb.append("HBase table split("); sb.append("table name: ").append(tableName); - sb.append(", scan: ").append(scan); + // null scan input is represented by "" + String printScan = ""; + if (!scan.equals("")) { + try { + // get the real scan here in toString, not the Base64 string + printScan = TableMapReduceUtil.convertStringToScan(scan).toString(); + } + catch (IOException e) { + printScan = ""; + } + } + sb.append(", scan: ").append(printScan); sb.append(", start row: ").append(Bytes.toStringBinary(startRow)); sb.append(", end row: ").append(Bytes.toStringBinary(endRow)); sb.append(", region location: ").append(regionLocation); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index f9b2b7b54d4..eff9060b8ea 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -2133,7 +2133,6 @@ public class RSRpcServices implements HBaseRPCErrorHandler, RegionActionResult.Builder regionActionResultBuilder = RegionActionResult.newBuilder(); Boolean processed = null; - RpcCallContext context = RpcServer.getCurrentCall(); this.rpcMultiRequestCount.increment(); for (RegionAction regionAction : request.getRegionActionList()) { this.requestCount.add(regionAction.getActionCount());