HBASE-15223 Make convertScanToString public for Spark
This commit is contained in:
parent
abb6cdce71
commit
1942a99b83
|
@ -266,7 +266,7 @@ extends InputFormat<ImmutableBytesWritable, Result> {
|
|||
}
|
||||
List<InputSplit> splits = new ArrayList<InputSplit>(1);
|
||||
long regionSize = sizeCalculator.getRegionSize(regLoc.getRegionInfo().getRegionName());
|
||||
TableSplit split = new TableSplit(tableName,
|
||||
TableSplit split = new TableSplit(tableName, scan,
|
||||
HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY, regLoc
|
||||
.getHostnamePort().split(Addressing.HOSTNAME_PORT_SEPARATOR)[0], regionSize);
|
||||
splits.add(split);
|
||||
|
@ -309,7 +309,7 @@ extends InputFormat<ImmutableBytesWritable, Result> {
|
|||
|
||||
byte[] regionName = location.getRegionInfo().getRegionName();
|
||||
long regionSize = sizeCalculator.getRegionSize(regionName);
|
||||
TableSplit split = new TableSplit(tableName,
|
||||
TableSplit split = new TableSplit(tableName, scan,
|
||||
splitStart, splitStop, regionLocation, regionSize);
|
||||
splits.add(split);
|
||||
if (LOG.isDebugEnabled()) {
|
||||
|
@ -397,9 +397,9 @@ extends InputFormat<ImmutableBytesWritable, Result> {
|
|||
byte[] splitKey = getSplitKey(ts.getStartRow(), ts.getEndRow(), isTextKey);
|
||||
//Set the size of child TableSplit as 1/2 of the region size. The exact size of the
|
||||
// MapReduce input splits is not far off.
|
||||
TableSplit t1 = new TableSplit(tableName, ts.getStartRow(), splitKey, regionLocation,
|
||||
TableSplit t1 = new TableSplit(tableName, scan, ts.getStartRow(), splitKey, regionLocation,
|
||||
regionSize / 2);
|
||||
TableSplit t2 = new TableSplit(tableName, splitKey, ts.getEndRow(), regionLocation,
|
||||
TableSplit t2 = new TableSplit(tableName, scan, splitKey, ts.getEndRow(), regionLocation,
|
||||
regionSize - regionSize / 2);
|
||||
resultList.add(t1);
|
||||
resultList.add(t2);
|
||||
|
@ -426,7 +426,7 @@ extends InputFormat<ImmutableBytesWritable, Result> {
|
|||
break;
|
||||
}
|
||||
}
|
||||
TableSplit t = new TableSplit(tableName, splitStartKey, splitEndKey,
|
||||
TableSplit t = new TableSplit(tableName, scan, splitStartKey, splitEndKey,
|
||||
regionLocation, totalSize);
|
||||
resultList.add(t);
|
||||
}
|
||||
|
|
|
@ -561,7 +561,7 @@ public class TableMapReduceUtil {
|
|||
* @return The scan saved in a Base64 encoded string.
|
||||
* @throws IOException When writing the scan fails.
|
||||
*/
|
||||
static String convertScanToString(Scan scan) throws IOException {
|
||||
public static String convertScanToString(Scan scan) throws IOException {
|
||||
ClientProtos.Scan proto = ProtobufUtil.toScan(scan);
|
||||
return Base64.encodeBytes(proto.toByteArray());
|
||||
}
|
||||
|
@ -573,7 +573,7 @@ public class TableMapReduceUtil {
|
|||
* @return The newly created Scan instance.
|
||||
* @throws IOException When reading the scan instance fails.
|
||||
*/
|
||||
static Scan convertStringToScan(String base64) throws IOException {
|
||||
public static Scan convertStringToScan(String base64) throws IOException {
|
||||
byte [] decoded = Base64.decode(base64);
|
||||
ClientProtos.Scan scan;
|
||||
try {
|
||||
|
|
|
@ -301,7 +301,18 @@ implements Writable, Comparable<TableSplit> {
|
|||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("HBase table split(");
|
||||
sb.append("table name: ").append(tableName);
|
||||
sb.append(", scan: ").append(scan);
|
||||
// null scan input is represented by ""
|
||||
String printScan = "";
|
||||
if (!scan.equals("")) {
|
||||
try {
|
||||
// get the real scan here in toString, not the Base64 string
|
||||
printScan = TableMapReduceUtil.convertStringToScan(scan).toString();
|
||||
}
|
||||
catch (IOException e) {
|
||||
printScan = "";
|
||||
}
|
||||
}
|
||||
sb.append(", scan: ").append(printScan);
|
||||
sb.append(", start row: ").append(Bytes.toStringBinary(startRow));
|
||||
sb.append(", end row: ").append(Bytes.toStringBinary(endRow));
|
||||
sb.append(", region location: ").append(regionLocation);
|
||||
|
|
Loading…
Reference in New Issue