HBASE-3919 More places output binary data to text
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1127384 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
147e0054fd
commit
81aae08c4a
|
@ -294,6 +294,7 @@ Release 0.90.4 - Unreleased
|
||||||
sun jre provided by RHEL6 (Roman Shaposhnik)
|
sun jre provided by RHEL6 (Roman Shaposhnik)
|
||||||
HBASE-3920 HLog hbase.regionserver.flushlogentries no longer supported
|
HBASE-3920 HLog hbase.regionserver.flushlogentries no longer supported
|
||||||
(Dave Latham)
|
(Dave Latham)
|
||||||
|
HBASE-3919 More places output binary data to text (Dave Latham)
|
||||||
|
|
||||||
Release 0.90.3 - May 19th, 2011
|
Release 0.90.3 - May 19th, 2011
|
||||||
|
|
||||||
|
|
|
@ -470,7 +470,7 @@ public class CatalogTracker {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
LOG.info("Failed verification of " + Bytes.toString(regionName) +
|
LOG.info("Failed verification of " + Bytes.toStringBinary(regionName) +
|
||||||
" at address=" + address + "; " + t);
|
" at address=" + address + "; " + t);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -162,7 +162,7 @@ public class MetaEditor {
|
||||||
addLocation(put, sn);
|
addLocation(put, sn);
|
||||||
server.put(catalogRegionName, put);
|
server.put(catalogRegionName, put);
|
||||||
LOG.info("Updated row " + regionInfo.getRegionNameAsString() +
|
LOG.info("Updated row " + regionInfo.getRegionNameAsString() +
|
||||||
" in region " + Bytes.toString(catalogRegionName) + " with " +
|
" in region " + Bytes.toStringBinary(catalogRegionName) + " with " +
|
||||||
"serverName=" + sn.toString());
|
"serverName=" + sn.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -199,7 +199,7 @@ public class MetaEditor {
|
||||||
catalogTracker.waitForMetaServerConnectionDefault().
|
catalogTracker.waitForMetaServerConnectionDefault().
|
||||||
delete(CatalogTracker.META_REGION, delete);
|
delete(CatalogTracker.META_REGION, delete);
|
||||||
LOG.info("Deleted daughter reference " + daughter.getRegionNameAsString() +
|
LOG.info("Deleted daughter reference " + daughter.getRegionNameAsString() +
|
||||||
", qualifier=" + Bytes.toString(qualifier) + ", from parent " +
|
", qualifier=" + Bytes.toStringBinary(qualifier) + ", from parent " +
|
||||||
parent.getRegionNameAsString());
|
parent.getRegionNameAsString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -290,7 +290,7 @@ public class Delete implements Writable, Row, Comparable<Row> {
|
||||||
public String toString() {
|
public String toString() {
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
sb.append("row=");
|
sb.append("row=");
|
||||||
sb.append(Bytes.toString(this.row));
|
sb.append(Bytes.toStringBinary(this.row));
|
||||||
sb.append(", ts=");
|
sb.append(", ts=");
|
||||||
sb.append(this.ts);
|
sb.append(this.ts);
|
||||||
sb.append(", families={");
|
sb.append(", families={");
|
||||||
|
|
|
@ -306,7 +306,7 @@ public class Get implements Writable, Row, Comparable<Row> {
|
||||||
public String toString() {
|
public String toString() {
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
sb.append("row=");
|
sb.append("row=");
|
||||||
sb.append(Bytes.toString(this.row));
|
sb.append(Bytes.toStringBinary(this.row));
|
||||||
sb.append(", maxVersions=");
|
sb.append(", maxVersions=");
|
||||||
sb.append("").append(this.maxVersions);
|
sb.append("").append(this.maxVersions);
|
||||||
sb.append(", cacheBlocks=");
|
sb.append(", cacheBlocks=");
|
||||||
|
@ -342,7 +342,7 @@ public class Get implements Writable, Row, Comparable<Row> {
|
||||||
} else {
|
} else {
|
||||||
moreThanOneB = true;
|
moreThanOneB = true;
|
||||||
}
|
}
|
||||||
sb.append(Bytes.toString(column));
|
sb.append(Bytes.toStringBinary(column));
|
||||||
}
|
}
|
||||||
sb.append("}");
|
sb.append("}");
|
||||||
}
|
}
|
||||||
|
|
|
@ -766,7 +766,7 @@ public class HBaseAdmin implements Abortable, Closeable {
|
||||||
Pair<HRegionInfo, ServerName> pair = MetaReader.getRegion(ct, regionname);
|
Pair<HRegionInfo, ServerName> pair = MetaReader.getRegion(ct, regionname);
|
||||||
if (pair == null || pair.getSecond() == null) {
|
if (pair == null || pair.getSecond() == null) {
|
||||||
LOG.info("No server in .META. for " +
|
LOG.info("No server in .META. for " +
|
||||||
Bytes.toString(regionname) + "; pair=" + pair);
|
Bytes.toStringBinary(regionname) + "; pair=" + pair);
|
||||||
} else {
|
} else {
|
||||||
closeRegion(pair.getSecond(), pair.getFirst());
|
closeRegion(pair.getSecond(), pair.getFirst());
|
||||||
}
|
}
|
||||||
|
@ -774,7 +774,7 @@ public class HBaseAdmin implements Abortable, Closeable {
|
||||||
Pair<HRegionInfo, ServerName> pair = MetaReader.getRegion(ct, regionname);
|
Pair<HRegionInfo, ServerName> pair = MetaReader.getRegion(ct, regionname);
|
||||||
if (pair == null || pair.getSecond() == null) {
|
if (pair == null || pair.getSecond() == null) {
|
||||||
LOG.info("No server in .META. for " +
|
LOG.info("No server in .META. for " +
|
||||||
Bytes.toString(regionname) + "; pair=" + pair);
|
Bytes.toStringBinary(regionname) + "; pair=" + pair);
|
||||||
} else {
|
} else {
|
||||||
closeRegion(pair.getSecond(), pair.getFirst());
|
closeRegion(pair.getSecond(), pair.getFirst());
|
||||||
}
|
}
|
||||||
|
@ -823,7 +823,7 @@ public class HBaseAdmin implements Abortable, Closeable {
|
||||||
MetaReader.getRegion(ct, tableNameOrRegionName);
|
MetaReader.getRegion(ct, tableNameOrRegionName);
|
||||||
if (pair == null || pair.getSecond() == null) {
|
if (pair == null || pair.getSecond() == null) {
|
||||||
LOG.info("No server in .META. for " +
|
LOG.info("No server in .META. for " +
|
||||||
Bytes.toString(tableNameOrRegionName) + "; pair=" + pair);
|
Bytes.toStringBinary(tableNameOrRegionName) + "; pair=" + pair);
|
||||||
} else {
|
} else {
|
||||||
flush(pair.getSecond(), pair.getFirst());
|
flush(pair.getSecond(), pair.getFirst());
|
||||||
}
|
}
|
||||||
|
@ -926,7 +926,7 @@ public class HBaseAdmin implements Abortable, Closeable {
|
||||||
MetaReader.getRegion(ct, tableNameOrRegionName);
|
MetaReader.getRegion(ct, tableNameOrRegionName);
|
||||||
if (pair == null || pair.getSecond() == null) {
|
if (pair == null || pair.getSecond() == null) {
|
||||||
LOG.info("No server in .META. for " +
|
LOG.info("No server in .META. for " +
|
||||||
Bytes.toString(tableNameOrRegionName) + "; pair=" + pair);
|
Bytes.toStringBinary(tableNameOrRegionName) + "; pair=" + pair);
|
||||||
} else {
|
} else {
|
||||||
compact(pair.getSecond(), pair.getFirst(), major);
|
compact(pair.getSecond(), pair.getFirst(), major);
|
||||||
}
|
}
|
||||||
|
@ -1082,7 +1082,7 @@ public class HBaseAdmin implements Abortable, Closeable {
|
||||||
MetaReader.getRegion(ct, tableNameOrRegionName);
|
MetaReader.getRegion(ct, tableNameOrRegionName);
|
||||||
if (pair == null || pair.getSecond() == null) {
|
if (pair == null || pair.getSecond() == null) {
|
||||||
LOG.info("No server in .META. for " +
|
LOG.info("No server in .META. for " +
|
||||||
Bytes.toString(tableNameOrRegionName) + "; pair=" + pair);
|
Bytes.toStringBinary(tableNameOrRegionName) + "; pair=" + pair);
|
||||||
} else {
|
} else {
|
||||||
split(pair.getSecond(), pair.getFirst(), splitPoint);
|
split(pair.getSecond(), pair.getFirst(), splitPoint);
|
||||||
}
|
}
|
||||||
|
|
|
@ -225,7 +225,7 @@ public class Increment implements Writable {
|
||||||
public String toString() {
|
public String toString() {
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
sb.append("row=");
|
sb.append("row=");
|
||||||
sb.append(Bytes.toString(this.row));
|
sb.append(Bytes.toStringBinary(this.row));
|
||||||
if(this.familyMap.size() == 0) {
|
if(this.familyMap.size() == 0) {
|
||||||
sb.append(", no columns set to be incremented");
|
sb.append(", no columns set to be incremented");
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
|
@ -254,7 +254,7 @@ public class Increment implements Writable {
|
||||||
} else {
|
} else {
|
||||||
moreThanOneB = true;
|
moreThanOneB = true;
|
||||||
}
|
}
|
||||||
sb.append(Bytes.toString(column.getKey()) + "+=" + column.getValue());
|
sb.append(Bytes.toStringBinary(column.getKey()) + "+=" + column.getValue());
|
||||||
}
|
}
|
||||||
sb.append("}");
|
sb.append("}");
|
||||||
}
|
}
|
||||||
|
|
|
@ -155,13 +155,13 @@ public class MetaScanner {
|
||||||
HConstants.CATALOG_FAMILY);
|
HConstants.CATALOG_FAMILY);
|
||||||
if (startRowResult == null) {
|
if (startRowResult == null) {
|
||||||
throw new TableNotFoundException("Cannot find row in .META. for table: "
|
throw new TableNotFoundException("Cannot find row in .META. for table: "
|
||||||
+ Bytes.toString(tableName) + ", row=" + Bytes.toString(searchRow));
|
+ Bytes.toString(tableName) + ", row=" + Bytes.toStringBinary(searchRow));
|
||||||
}
|
}
|
||||||
byte[] value = startRowResult.getValue(HConstants.CATALOG_FAMILY,
|
byte[] value = startRowResult.getValue(HConstants.CATALOG_FAMILY,
|
||||||
HConstants.REGIONINFO_QUALIFIER);
|
HConstants.REGIONINFO_QUALIFIER);
|
||||||
if (value == null || value.length == 0) {
|
if (value == null || value.length == 0) {
|
||||||
throw new IOException("HRegionInfo was null or empty in Meta for " +
|
throw new IOException("HRegionInfo was null or empty in Meta for " +
|
||||||
Bytes.toString(tableName) + ", row=" + Bytes.toString(searchRow));
|
Bytes.toString(tableName) + ", row=" + Bytes.toStringBinary(searchRow));
|
||||||
}
|
}
|
||||||
HRegionInfo regionInfo = Writables.getHRegionInfo(value);
|
HRegionInfo regionInfo = Writables.getHRegionInfo(value);
|
||||||
|
|
||||||
|
@ -186,7 +186,7 @@ public class MetaScanner {
|
||||||
final Scan scan = new Scan(startRow).addFamily(HConstants.CATALOG_FAMILY);
|
final Scan scan = new Scan(startRow).addFamily(HConstants.CATALOG_FAMILY);
|
||||||
if (LOG.isDebugEnabled()) {
|
if (LOG.isDebugEnabled()) {
|
||||||
LOG.debug("Scanning " + Bytes.toString(metaTableName) +
|
LOG.debug("Scanning " + Bytes.toString(metaTableName) +
|
||||||
" starting at row=" + Bytes.toString(startRow) + " for max=" +
|
" starting at row=" + Bytes.toStringBinary(startRow) + " for max=" +
|
||||||
rowUpperLimit + " rows");
|
rowUpperLimit + " rows");
|
||||||
}
|
}
|
||||||
callable = new ScannerCallable(connection, metaTableName, scan);
|
callable = new ScannerCallable(connection, metaTableName, scan);
|
||||||
|
|
|
@ -414,7 +414,7 @@ public class Put implements HeapSize, Writable, Row, Comparable<Row> {
|
||||||
public String toString() {
|
public String toString() {
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
sb.append("row=");
|
sb.append("row=");
|
||||||
sb.append(Bytes.toString(this.row));
|
sb.append(Bytes.toStringBinary(this.row));
|
||||||
sb.append(", families={");
|
sb.append(", families={");
|
||||||
boolean moreThanOne = false;
|
boolean moreThanOne = false;
|
||||||
for(Map.Entry<byte [], List<KeyValue>> entry : this.familyMap.entrySet()) {
|
for(Map.Entry<byte [], List<KeyValue>> entry : this.familyMap.entrySet()) {
|
||||||
|
|
|
@ -664,7 +664,7 @@ public class Result implements Writable, WritableWithSize {
|
||||||
throws Exception {
|
throws Exception {
|
||||||
if (res2 == null) {
|
if (res2 == null) {
|
||||||
throw new Exception("There wasn't enough rows, we stopped at "
|
throw new Exception("There wasn't enough rows, we stopped at "
|
||||||
+ Bytes.toString(res1.getRow()));
|
+ Bytes.toStringBinary(res1.getRow()));
|
||||||
}
|
}
|
||||||
if (res1.size() != res2.size()) {
|
if (res1.size() != res2.size()) {
|
||||||
throw new Exception("This row doesn't have the same number of KVs: "
|
throw new Exception("This row doesn't have the same number of KVs: "
|
||||||
|
|
|
@ -258,7 +258,7 @@ public class HalfStoreFileReader extends StoreFile.Reader {
|
||||||
return Bytes.toBytes(scanner.getKey());
|
return Bytes.toBytes(scanner.getKey());
|
||||||
}
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.warn("Failed seekBefore " + Bytes.toString(this.splitkey), e);
|
LOG.warn("Failed seekBefore " + Bytes.toStringBinary(this.splitkey), e);
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -101,7 +101,7 @@ implements Configurable {
|
||||||
if (this.lastReduces != reduces) {
|
if (this.lastReduces != reduces) {
|
||||||
this.splits = Bytes.split(this.startkey, this.endkey, reduces - 1);
|
this.splits = Bytes.split(this.startkey, this.endkey, reduces - 1);
|
||||||
for (int i = 0; i < splits.length; i++) {
|
for (int i = 0; i < splits.length; i++) {
|
||||||
LOG.info(Bytes.toString(splits[i]));
|
LOG.info(Bytes.toStringBinary(splits[i]));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
int pos = Bytes.binarySearch(this.splits, key.get(), key.getOffset(),
|
int pos = Bytes.binarySearch(this.splits, key.get(), key.getOffset(),
|
||||||
|
|
|
@ -822,7 +822,7 @@ implements HMasterInterface, HMasterRegionInterface, MasterServices, Server {
|
||||||
Pair<HRegionInfo, ServerName> p =
|
Pair<HRegionInfo, ServerName> p =
|
||||||
this.assignmentManager.getAssignment(encodedRegionName);
|
this.assignmentManager.getAssignment(encodedRegionName);
|
||||||
if (p == null)
|
if (p == null)
|
||||||
throw new UnknownRegionException(Bytes.toString(encodedRegionName));
|
throw new UnknownRegionException(Bytes.toStringBinary(encodedRegionName));
|
||||||
HRegionInfo hri = p.getFirst();
|
HRegionInfo hri = p.getFirst();
|
||||||
ServerName dest = null;
|
ServerName dest = null;
|
||||||
if (destServerName == null || destServerName.length == 0) {
|
if (destServerName == null || destServerName.length == 0) {
|
||||||
|
@ -1295,7 +1295,7 @@ implements HMasterInterface, HMasterRegionInterface, MasterServices, Server {
|
||||||
}
|
}
|
||||||
Pair<HRegionInfo, ServerName> pair =
|
Pair<HRegionInfo, ServerName> pair =
|
||||||
MetaReader.getRegion(this.catalogTracker, regionName);
|
MetaReader.getRegion(this.catalogTracker, regionName);
|
||||||
if (pair == null) throw new UnknownRegionException(Bytes.toString(regionName));
|
if (pair == null) throw new UnknownRegionException(Bytes.toStringBinary(regionName));
|
||||||
HRegionInfo hri = pair.getFirst();
|
HRegionInfo hri = pair.getFirst();
|
||||||
if (force) this.assignmentManager.clearRegionFromTransition(hri);
|
if (force) this.assignmentManager.clearRegionFromTransition(hri);
|
||||||
this.assignmentManager.unassign(hri, force);
|
this.assignmentManager.unassign(hri, force);
|
||||||
|
|
|
@ -3054,8 +3054,8 @@ public class HRegion implements HeapSize { // , Writable{
|
||||||
|
|
||||||
LOG.info("starting merge of regions: " + a + " and " + b +
|
LOG.info("starting merge of regions: " + a + " and " + b +
|
||||||
" into new region " + newRegionInfo.toString() +
|
" into new region " + newRegionInfo.toString() +
|
||||||
" with start key <" + Bytes.toString(startKey) + "> and end key <" +
|
" with start key <" + Bytes.toStringBinary(startKey) + "> and end key <" +
|
||||||
Bytes.toString(endKey) + ">");
|
Bytes.toStringBinary(endKey) + ">");
|
||||||
|
|
||||||
// Move HStoreFiles under new region directory
|
// Move HStoreFiles under new region directory
|
||||||
Map<byte [], List<StoreFile>> byFamily =
|
Map<byte [], List<StoreFile>> byFamily =
|
||||||
|
@ -3494,7 +3494,7 @@ public class HRegion implements HeapSize { // , Writable{
|
||||||
throws NoSuchColumnFamilyException {
|
throws NoSuchColumnFamilyException {
|
||||||
if(!regionInfo.getTableDesc().hasFamily(family)) {
|
if(!regionInfo.getTableDesc().hasFamily(family)) {
|
||||||
throw new NoSuchColumnFamilyException("Column family " +
|
throw new NoSuchColumnFamilyException("Column family " +
|
||||||
Bytes.toString(family) + " does not exist in region " + this
|
Bytes.toStringBinary(family) + " does not exist in region " + this
|
||||||
+ " in table " + regionInfo.getTableDesc());
|
+ " in table " + regionInfo.getTableDesc());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -404,7 +404,7 @@ class MemStoreFlusher extends Thread implements FlushRequester {
|
||||||
return false;
|
return false;
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
LOG.error("Cache flush failed" +
|
LOG.error("Cache flush failed" +
|
||||||
(region != null ? (" for region " + Bytes.toString(region.getRegionName())) : ""),
|
(region != null ? (" for region " + Bytes.toStringBinary(region.getRegionName())) : ""),
|
||||||
RemoteExceptionHandler.checkIOException(ex));
|
RemoteExceptionHandler.checkIOException(ex));
|
||||||
if (!server.checkFileSystem()) {
|
if (!server.checkFileSystem()) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -533,7 +533,7 @@ class MemStoreFlusher extends Thread implements FlushRequester {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "[flush region " + Bytes.toString(region.getRegionName()) + "]";
|
return "[flush region " + Bytes.toStringBinary(region.getRegionName()) + "]";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -166,7 +166,7 @@ public class SplitTransaction {
|
||||||
if (Bytes.equals(startKey, splitrow) ||
|
if (Bytes.equals(startKey, splitrow) ||
|
||||||
!this.parent.getRegionInfo().containsRow(splitrow)) {
|
!this.parent.getRegionInfo().containsRow(splitrow)) {
|
||||||
LOG.info("Split row is not inside region key range or is equal to " +
|
LOG.info("Split row is not inside region key range or is equal to " +
|
||||||
"startkey: " + Bytes.toString(this.splitrow));
|
"startkey: " + Bytes.toStringBinary(this.splitrow));
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
long rid = getDaughterRegionIdTimestamp(hri);
|
long rid = getDaughterRegionIdTimestamp(hri);
|
||||||
|
|
|
@ -625,7 +625,7 @@ public class HLog implements Syncable {
|
||||||
LOG.debug("Found " + logsToRemove + " hlogs to remove" +
|
LOG.debug("Found " + logsToRemove + " hlogs to remove" +
|
||||||
" out of total " + this.outputfiles.size() + ";" +
|
" out of total " + this.outputfiles.size() + ";" +
|
||||||
" oldest outstanding sequenceid is " + oldestOutstandingSeqNum +
|
" oldest outstanding sequenceid is " + oldestOutstandingSeqNum +
|
||||||
" from region " + Bytes.toString(oldestRegion));
|
" from region " + Bytes.toStringBinary(oldestRegion));
|
||||||
}
|
}
|
||||||
for (Long seq : sequenceNumbers) {
|
for (Long seq : sequenceNumbers) {
|
||||||
archiveLogFile(this.outputfiles.remove(seq), seq);
|
archiveLogFile(this.outputfiles.remove(seq), seq);
|
||||||
|
|
|
@ -209,8 +209,8 @@ class HMerge {
|
||||||
mergedRegion);
|
mergedRegion);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
LOG.info("not merging regions " + Bytes.toString(currentRegion.getRegionName())
|
LOG.info("not merging regions " + Bytes.toStringBinary(currentRegion.getRegionName())
|
||||||
+ " and " + Bytes.toString(nextRegion.getRegionName()));
|
+ " and " + Bytes.toStringBinary(nextRegion.getRegionName()));
|
||||||
currentRegion.close();
|
currentRegion.close();
|
||||||
currentRegion = nextRegion;
|
currentRegion = nextRegion;
|
||||||
currentSize = nextSize;
|
currentSize = nextSize;
|
||||||
|
@ -282,7 +282,7 @@ class HMerge {
|
||||||
Result currentRow = metaScanner.next();
|
Result currentRow = metaScanner.next();
|
||||||
boolean foundResult = false;
|
boolean foundResult = false;
|
||||||
while (currentRow != null) {
|
while (currentRow != null) {
|
||||||
LOG.info("Row: <" + Bytes.toString(currentRow.getRow()) + ">");
|
LOG.info("Row: <" + Bytes.toStringBinary(currentRow.getRow()) + ">");
|
||||||
byte[] regionInfoValue = currentRow.getValue(HConstants.CATALOG_FAMILY,
|
byte[] regionInfoValue = currentRow.getValue(HConstants.CATALOG_FAMILY,
|
||||||
HConstants.REGIONINFO_QUALIFIER);
|
HConstants.REGIONINFO_QUALIFIER);
|
||||||
if (regionInfoValue == null || regionInfoValue.length == 0) {
|
if (regionInfoValue == null || regionInfoValue.length == 0) {
|
||||||
|
@ -324,7 +324,7 @@ class HMerge {
|
||||||
Delete delete = new Delete(regionsToDelete[r]);
|
Delete delete = new Delete(regionsToDelete[r]);
|
||||||
table.delete(delete);
|
table.delete(delete);
|
||||||
if(LOG.isDebugEnabled()) {
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("updated columns in row: " + Bytes.toString(regionsToDelete[r]));
|
LOG.debug("updated columns in row: " + Bytes.toStringBinary(regionsToDelete[r]));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
newRegion.getRegionInfo().setOffline(true);
|
newRegion.getRegionInfo().setOffline(true);
|
||||||
|
@ -336,7 +336,7 @@ class HMerge {
|
||||||
|
|
||||||
if(LOG.isDebugEnabled()) {
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("updated columns in row: "
|
LOG.debug("updated columns in row: "
|
||||||
+ Bytes.toString(newRegion.getRegionName()));
|
+ Bytes.toStringBinary(newRegion.getRegionName()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -417,7 +417,7 @@ class HMerge {
|
||||||
root.delete(delete, null, true);
|
root.delete(delete, null, true);
|
||||||
|
|
||||||
if(LOG.isDebugEnabled()) {
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("updated columns in row: " + Bytes.toString(regionsToDelete[r]));
|
LOG.debug("updated columns in row: " + Bytes.toStringBinary(regionsToDelete[r]));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
HRegionInfo newInfo = newRegion.getRegionInfo();
|
HRegionInfo newInfo = newRegion.getRegionInfo();
|
||||||
|
@ -427,7 +427,7 @@ class HMerge {
|
||||||
Writables.getBytes(newInfo));
|
Writables.getBytes(newInfo));
|
||||||
root.put(put);
|
root.put(put);
|
||||||
if(LOG.isDebugEnabled()) {
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("updated columns in row: " + Bytes.toString(newRegion.getRegionName()));
|
LOG.debug("updated columns in row: " + Bytes.toStringBinary(newRegion.getRegionName()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -196,22 +196,22 @@ public class Merge extends Configured implements Tool {
|
||||||
* Merges two regions from a user table.
|
* Merges two regions from a user table.
|
||||||
*/
|
*/
|
||||||
private void mergeTwoRegions() throws IOException {
|
private void mergeTwoRegions() throws IOException {
|
||||||
LOG.info("Merging regions " + Bytes.toString(this.region1) + " and " +
|
LOG.info("Merging regions " + Bytes.toStringBinary(this.region1) + " and " +
|
||||||
Bytes.toString(this.region2) + " in table " + Bytes.toString(this.tableName));
|
Bytes.toStringBinary(this.region2) + " in table " + Bytes.toString(this.tableName));
|
||||||
// Scan the root region for all the meta regions that contain the regions
|
// Scan the root region for all the meta regions that contain the regions
|
||||||
// we're merging.
|
// we're merging.
|
||||||
MetaScannerListener listener = new MetaScannerListener(region1, region2);
|
MetaScannerListener listener = new MetaScannerListener(region1, region2);
|
||||||
this.utils.scanRootRegion(listener);
|
this.utils.scanRootRegion(listener);
|
||||||
HRegionInfo meta1 = listener.getMeta1();
|
HRegionInfo meta1 = listener.getMeta1();
|
||||||
if (meta1 == null) {
|
if (meta1 == null) {
|
||||||
throw new IOException("Could not find meta region for " + Bytes.toString(region1));
|
throw new IOException("Could not find meta region for " + Bytes.toStringBinary(region1));
|
||||||
}
|
}
|
||||||
HRegionInfo meta2 = listener.getMeta2();
|
HRegionInfo meta2 = listener.getMeta2();
|
||||||
if (meta2 == null) {
|
if (meta2 == null) {
|
||||||
throw new IOException("Could not find meta region for " + Bytes.toString(region2));
|
throw new IOException("Could not find meta region for " + Bytes.toStringBinary(region2));
|
||||||
}
|
}
|
||||||
LOG.info("Found meta for region1 " + Bytes.toString(meta1.getRegionName()) +
|
LOG.info("Found meta for region1 " + Bytes.toStringBinary(meta1.getRegionName()) +
|
||||||
", meta for region2 " + Bytes.toString(meta2.getRegionName()));
|
", meta for region2 " + Bytes.toStringBinary(meta2.getRegionName()));
|
||||||
HRegion metaRegion1 = this.utils.getMetaRegion(meta1);
|
HRegion metaRegion1 = this.utils.getMetaRegion(meta1);
|
||||||
Get get = new Get(region1);
|
Get get = new Get(region1);
|
||||||
get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
|
get.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
|
||||||
|
@ -219,7 +219,7 @@ public class Merge extends Configured implements Tool {
|
||||||
HRegionInfo info1 = Writables.getHRegionInfo((cells1 == null)? null: cells1.get(0).getValue());
|
HRegionInfo info1 = Writables.getHRegionInfo((cells1 == null)? null: cells1.get(0).getValue());
|
||||||
if (info1== null) {
|
if (info1== null) {
|
||||||
throw new NullPointerException("info1 is null using key " +
|
throw new NullPointerException("info1 is null using key " +
|
||||||
Bytes.toString(region1) + " in " + meta1);
|
Bytes.toStringBinary(region1) + " in " + meta1);
|
||||||
}
|
}
|
||||||
|
|
||||||
HRegion metaRegion2;
|
HRegion metaRegion2;
|
||||||
|
@ -244,7 +244,7 @@ public class Merge extends Configured implements Tool {
|
||||||
HRegionInfo mergedInfo = listener.getMeta1();
|
HRegionInfo mergedInfo = listener.getMeta1();
|
||||||
if (mergedInfo == null) {
|
if (mergedInfo == null) {
|
||||||
throw new IOException("Could not find meta region for " +
|
throw new IOException("Could not find meta region for " +
|
||||||
Bytes.toString(merged.getRegionName()));
|
Bytes.toStringBinary(merged.getRegionName()));
|
||||||
}
|
}
|
||||||
HRegion mergeMeta;
|
HRegion mergeMeta;
|
||||||
if (Bytes.equals(mergedInfo.getRegionName(), meta1.getRegionName())) {
|
if (Bytes.equals(mergedInfo.getRegionName(), meta1.getRegionName())) {
|
||||||
|
@ -271,12 +271,12 @@ public class Merge extends Configured implements Tool {
|
||||||
HRegion meta2)
|
HRegion meta2)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
if (info1 == null) {
|
if (info1 == null) {
|
||||||
throw new IOException("Could not find " + Bytes.toString(region1) + " in " +
|
throw new IOException("Could not find " + Bytes.toStringBinary(region1) + " in " +
|
||||||
Bytes.toString(meta1.getRegionName()));
|
Bytes.toStringBinary(meta1.getRegionName()));
|
||||||
}
|
}
|
||||||
if (info2 == null) {
|
if (info2 == null) {
|
||||||
throw new IOException("Cound not find " + Bytes.toString(region2) + " in " +
|
throw new IOException("Cound not find " + Bytes.toStringBinary(region2) + " in " +
|
||||||
Bytes.toString(meta2.getRegionName()));
|
Bytes.toStringBinary(meta2.getRegionName()));
|
||||||
}
|
}
|
||||||
HRegion merged = null;
|
HRegion merged = null;
|
||||||
HLog log = utils.getLog();
|
HLog log = utils.getLog();
|
||||||
|
@ -361,7 +361,7 @@ public class Merge extends Configured implements Tool {
|
||||||
|
|
||||||
private boolean notInTable(final byte [] tn, final byte [] rn) {
|
private boolean notInTable(final byte [] tn, final byte [] rn) {
|
||||||
if (WritableComparator.compareBytes(tn, 0, tn.length, rn, 0, tn.length) != 0) {
|
if (WritableComparator.compareBytes(tn, 0, tn.length, rn, 0, tn.length) != 0) {
|
||||||
LOG.error("Region " + Bytes.toString(rn) + " does not belong to table " +
|
LOG.error("Region " + Bytes.toStringBinary(rn) + " does not belong to table " +
|
||||||
Bytes.toString(tn));
|
Bytes.toString(tn));
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -225,7 +225,7 @@ public class MetaUtils {
|
||||||
info = Writables.getHRegionInfoOrNull(kv.getValue());
|
info = Writables.getHRegionInfoOrNull(kv.getValue());
|
||||||
if (info == null) {
|
if (info == null) {
|
||||||
LOG.warn("Region info is null for row " +
|
LOG.warn("Region info is null for row " +
|
||||||
Bytes.toString(kv.getRow()) + " in table " +
|
Bytes.toStringBinary(kv.getRow()) + " in table " +
|
||||||
r.getTableDesc().getNameAsString());
|
r.getTableDesc().getNameAsString());
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
|
|
Loading…
Reference in New Issue