HBASE-4229 Replace Jettison JSON encoding with Jackson in HLogPrettyPrinter
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1159914 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
95c39ae2ab
commit
38a046ea3c
|
@ -383,6 +383,8 @@ Release 0.91.0 - Unreleased
|
||||||
o.a.h.h.HConstants (Mingjie Lai)
|
o.a.h.h.HConstants (Mingjie Lai)
|
||||||
HBASE-4227 Modify the webUI so that default values of column families are
|
HBASE-4227 Modify the webUI so that default values of column families are
|
||||||
not shown (Nileema Shingte)
|
not shown (Nileema Shingte)
|
||||||
|
HBASE-4229 Replace Jettison JSON encoding with Jackson in HLogPrettyPrinter
|
||||||
|
(Riley Patterson)
|
||||||
|
|
||||||
TASKS
|
TASKS
|
||||||
HBASE-3559 Move report of split to master OFF the heartbeat channel
|
HBASE-3559 Move report of split to master OFF the heartbeat channel
|
||||||
|
|
|
@ -42,11 +42,11 @@ import org.apache.hadoop.fs.FileSystem
|
||||||
import org.apache.commons.logging.LogFactory
|
import org.apache.commons.logging.LogFactory
|
||||||
|
|
||||||
# Name of this script
|
# Name of this script
|
||||||
NAME = "add_table"
|
NAME = "add_region"
|
||||||
|
|
||||||
# Print usage for this script
|
# Print usage for this script
|
||||||
def usage
|
def usage
|
||||||
puts 'Usage: %s.rb TABLE_DIR [alternate_tablename]' % NAME
|
puts 'Usage: %s.rb <PATH_TO_REGIONINFO>' % NAME
|
||||||
exit!
|
exit!
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -63,85 +63,20 @@ fs = FileSystem.get(c)
|
||||||
LOG = LogFactory.getLog(NAME)
|
LOG = LogFactory.getLog(NAME)
|
||||||
|
|
||||||
# Check arguments
|
# Check arguments
|
||||||
if ARGV.size < 1 || ARGV.size > 2
|
if ARGV.size != 1
|
||||||
usage
|
usage
|
||||||
end
|
end
|
||||||
|
|
||||||
# Get cmdline args.
|
# Get cmdline args.
|
||||||
srcdir = fs.makeQualified(Path.new(java.lang.String.new(ARGV[0])))
|
regioninfo = fs.makeQualified(Path.new(java.lang.String.new(ARGV[0])))
|
||||||
|
|
||||||
if not fs.exists(srcdir)
|
if not fs.exists(srcdir)
|
||||||
raise IOError.new("src dir " + srcdir.toString() + " doesn't exist!")
|
raise IOError.new("regioninfo " + srcdir.toString() + " doesn't exist!")
|
||||||
end
|
|
||||||
|
|
||||||
# Get table name
|
|
||||||
tableName = nil
|
|
||||||
if ARGV.size > 1
|
|
||||||
tableName = ARGV[1]
|
|
||||||
raise IOError.new("Not supported yet")
|
|
||||||
elsif
|
|
||||||
# If none provided use dirname
|
|
||||||
tableName = srcdir.getName()
|
|
||||||
end
|
|
||||||
HTableDescriptor.isLegalTableName(tableName.to_java_bytes)
|
|
||||||
|
|
||||||
# Figure locations under hbase.rootdir
|
|
||||||
# Move directories into place; be careful not to overwrite.
|
|
||||||
rootdir = FSUtils.getRootDir(c)
|
|
||||||
tableDir = fs.makeQualified(Path.new(rootdir, tableName))
|
|
||||||
|
|
||||||
# If a directory currently in place, move it aside.
|
|
||||||
if srcdir.equals(tableDir)
|
|
||||||
LOG.info("Source directory is in place under hbase.rootdir: " + srcdir.toString());
|
|
||||||
elsif fs.exists(tableDir)
|
|
||||||
movedTableName = tableName + "." + java.lang.System.currentTimeMillis().to_s
|
|
||||||
movedTableDir = Path.new(rootdir, java.lang.String.new(movedTableName))
|
|
||||||
LOG.warn("Moving " + tableDir.toString() + " aside as " + movedTableDir.toString());
|
|
||||||
raise IOError.new("Failed move of " + tableDir.toString()) unless fs.rename(tableDir, movedTableDir)
|
|
||||||
LOG.info("Moving " + srcdir.toString() + " to " + tableDir.toString());
|
|
||||||
raise IOError.new("Failed move of " + srcdir.toString()) unless fs.rename(srcdir, tableDir)
|
|
||||||
end
|
|
||||||
|
|
||||||
# Clean mentions of table from .META.
|
|
||||||
# Scan the .META. and remove all lines that begin with tablename
|
|
||||||
LOG.info("Deleting mention of " + tableName + " from .META.")
|
|
||||||
metaTable = HTable.new(c, HConstants::META_TABLE_NAME)
|
|
||||||
tableNameMetaPrefix = tableName + HConstants::META_ROW_DELIMITER.chr
|
|
||||||
scan = Scan.new((tableNameMetaPrefix + HConstants::META_ROW_DELIMITER.chr).to_java_bytes)
|
|
||||||
scanner = metaTable.getScanner(scan)
|
|
||||||
# Use java.lang.String doing compares. Ruby String is a bit odd.
|
|
||||||
tableNameStr = java.lang.String.new(tableName)
|
|
||||||
while (result = scanner.next())
|
|
||||||
rowid = Bytes.toString(result.getRow())
|
|
||||||
rowidStr = java.lang.String.new(rowid)
|
|
||||||
if not rowidStr.startsWith(tableNameMetaPrefix)
|
|
||||||
# Gone too far, break
|
|
||||||
break
|
|
||||||
end
|
|
||||||
LOG.info("Deleting row from catalog: " + rowid);
|
|
||||||
d = Delete.new(result.getRow())
|
|
||||||
metaTable.delete(d)
|
|
||||||
end
|
|
||||||
scanner.close()
|
|
||||||
|
|
||||||
# Now, walk the table and per region, add an entry
|
|
||||||
LOG.info("Walking " + srcdir.toString() + " adding regions to catalog table")
|
|
||||||
statuses = fs.listStatus(srcdir)
|
|
||||||
for status in statuses
|
|
||||||
next unless status.isDir()
|
|
||||||
next if status.getPath().getName() == "compaction.dir"
|
|
||||||
regioninfofile = Path.new(status.getPath(), HRegion::REGIONINFO_FILE)
|
|
||||||
unless fs.exists(regioninfofile)
|
|
||||||
LOG.warn("Missing .regioninfo: " + regioninfofile.toString())
|
|
||||||
next
|
|
||||||
end
|
|
||||||
is = fs.open(regioninfofile)
|
|
||||||
hri = HRegionInfo.new()
|
|
||||||
hri.readFields(is)
|
|
||||||
is.close()
|
|
||||||
# TODO: Need to redo table descriptor with passed table name and then recalculate the region encoded names.
|
|
||||||
p = Put.new(hri.getRegionName())
|
|
||||||
p.add(HConstants::CATALOG_FAMILY, HConstants::REGIONINFO_QUALIFIER, Writables.getBytes(hri))
|
|
||||||
metaTable.put(p)
|
|
||||||
LOG.info("Added to catalog: " + hri.toString())
|
|
||||||
end
|
end
|
||||||
|
is = fs.open(regioninfo)
|
||||||
|
hri = HRegionInfo.new()
|
||||||
|
hri.readFields(is)
|
||||||
|
is.close()
|
||||||
|
p = Put.new(hri.getRegionName())
|
||||||
|
p.add(HConstants::CATALOG_FAMILY, HConstants::REGIONINFO_QUALIFIER, Writables.getBytes(hri))
|
||||||
|
metaTable.put(p)
|
||||||
|
LOG.info("Added to catalog: " + hri.toString())
|
||||||
|
|
27
pom.xml
27
pom.xml
|
@ -638,6 +638,7 @@
|
||||||
https://issues.apache.org/jira/secure/attachment/12459473/hdfs-895-branch-20-append.txt
|
https://issues.apache.org/jira/secure/attachment/12459473/hdfs-895-branch-20-append.txt
|
||||||
-->
|
-->
|
||||||
<hadoop.version>0.20-append-r1057313</hadoop.version>
|
<hadoop.version>0.20-append-r1057313</hadoop.version>
|
||||||
|
<jackson.version>1.5.5</jackson.version>
|
||||||
<jasper.version>5.5.23</jasper.version>
|
<jasper.version>5.5.23</jasper.version>
|
||||||
<jaxb-api.version>2.1</jaxb-api.version>
|
<jaxb-api.version>2.1</jaxb-api.version>
|
||||||
<jetty.version>6.1.26</jetty.version>
|
<jetty.version>6.1.26</jetty.version>
|
||||||
|
@ -677,9 +678,6 @@
|
||||||
implementation of the same, because Hadoop also uses this version
|
implementation of the same, because Hadoop also uses this version
|
||||||
* javax.servlet:jsp-api in favour of org.mortbay.jetty:jsp-api-2.1
|
* javax.servlet:jsp-api in favour of org.mortbay.jetty:jsp-api-2.1
|
||||||
* javax.xml.stream:stax-api in favour of stax:stax-api
|
* javax.xml.stream:stax-api in favour of stax:stax-api
|
||||||
|
|
||||||
Note: Both org.apache.avro:avro and com.sun.jersey:jersey-json depend on Jackson so the version
|
|
||||||
is chosen which comes first in the list of dependencies (jersey in this case)
|
|
||||||
-->
|
-->
|
||||||
|
|
||||||
<!-- General dependencies -->
|
<!-- General dependencies -->
|
||||||
|
@ -829,6 +827,29 @@
|
||||||
<artifactId>servlet-api-2.5</artifactId>
|
<artifactId>servlet-api-2.5</artifactId>
|
||||||
<version>${jetty.jspapi.version}</version>
|
<version>${jetty.jspapi.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<!-- While jackson is also a dependency of both jersey and avro, these
|
||||||
|
can bring in jars from different, incompatible versions. We force
|
||||||
|
the same version with these dependencies -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.codehaus.jackson</groupId>
|
||||||
|
<artifactId>jackson-core-asl</artifactId>
|
||||||
|
<version>${jackson.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.codehaus.jackson</groupId>
|
||||||
|
<artifactId>jackson-mapper-asl</artifactId>
|
||||||
|
<version>${jackson.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.codehaus.jackson</groupId>
|
||||||
|
<artifactId>jackson-jaxrs</artifactId>
|
||||||
|
<version>${jackson.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.codehaus.jackson</groupId>
|
||||||
|
<artifactId>jackson-xc</artifactId>
|
||||||
|
<version>${jackson.version}</version>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.slf4j</groupId>
|
<groupId>org.slf4j</groupId>
|
||||||
<artifactId>slf4j-api</artifactId>
|
<artifactId>slf4j-api</artifactId>
|
||||||
|
|
|
@ -20,8 +20,11 @@ package org.apache.hadoop.hbase.regionserver.wal;
|
||||||
import java.io.FileNotFoundException;
|
import java.io.FileNotFoundException;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.PrintStream;
|
import java.io.PrintStream;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.commons.cli.CommandLine;
|
import org.apache.commons.cli.CommandLine;
|
||||||
import org.apache.commons.cli.CommandLineParser;
|
import org.apache.commons.cli.CommandLineParser;
|
||||||
|
@ -36,9 +39,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.KeyValue;
|
import org.apache.hadoop.hbase.KeyValue;
|
||||||
import org.apache.hadoop.hbase.regionserver.wal.HLog.Reader;
|
import org.apache.hadoop.hbase.regionserver.wal.HLog.Reader;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.codehaus.jettison.json.JSONArray;
|
import org.codehaus.jackson.map.ObjectMapper;
|
||||||
import org.codehaus.jettison.json.JSONException;
|
|
||||||
import org.codehaus.jettison.json.JSONObject;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HLogPrettyPrinter prints the contents of a given HLog with a variety of
|
* HLogPrettyPrinter prints the contents of a given HLog with a variety of
|
||||||
|
@ -65,6 +66,8 @@ public class HLogPrettyPrinter {
|
||||||
private boolean firstTxn;
|
private boolean firstTxn;
|
||||||
// useful for programatic capture of JSON output
|
// useful for programatic capture of JSON output
|
||||||
private PrintStream out;
|
private PrintStream out;
|
||||||
|
// for JSON encoding
|
||||||
|
private ObjectMapper mapper;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Basic constructor that simply initializes values to reasonable defaults.
|
* Basic constructor that simply initializes values to reasonable defaults.
|
||||||
|
@ -78,6 +81,7 @@ public class HLogPrettyPrinter {
|
||||||
persistentOutput = false;
|
persistentOutput = false;
|
||||||
firstTxn = true;
|
firstTxn = true;
|
||||||
out = System.out;
|
out = System.out;
|
||||||
|
mapper = new ObjectMapper();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -239,23 +243,25 @@ public class HLogPrettyPrinter {
|
||||||
HLogKey key = entry.getKey();
|
HLogKey key = entry.getKey();
|
||||||
WALEdit edit = entry.getEdit();
|
WALEdit edit = entry.getEdit();
|
||||||
// begin building a transaction structure
|
// begin building a transaction structure
|
||||||
JSONObject txn = new JSONObject(key.toStringMap());
|
Map<String, Object> txn = key.toStringMap();
|
||||||
// check output filters
|
// check output filters
|
||||||
if (sequence >= 0 && ((Long) txn.get("sequence")) != sequence)
|
if (sequence >= 0 && ((Long) txn.get("sequence")) != sequence)
|
||||||
continue;
|
continue;
|
||||||
if (region != null && !((String) txn.get("region")).equals(region))
|
if (region != null && !((String) txn.get("region")).equals(region))
|
||||||
continue;
|
continue;
|
||||||
// initialize list into which we will store atomic actions
|
// initialize list into which we will store atomic actions
|
||||||
JSONArray actions = new JSONArray();
|
List<Map> actions = new ArrayList<Map>();
|
||||||
for (KeyValue kv : edit.getKeyValues()) {
|
for (KeyValue kv : edit.getKeyValues()) {
|
||||||
// add atomic operation to txn
|
// add atomic operation to txn
|
||||||
JSONObject op = new JSONObject(kv.toStringMap());
|
Map<String, Object> op =
|
||||||
|
new HashMap<String, Object>(kv.toStringMap());
|
||||||
if (outputValues)
|
if (outputValues)
|
||||||
op.put("value", Bytes.toStringBinary(kv.getValue()));
|
op.put("value", Bytes.toStringBinary(kv.getValue()));
|
||||||
|
// check row output filter
|
||||||
if (row == null || ((String) op.get("row")).equals(row))
|
if (row == null || ((String) op.get("row")).equals(row))
|
||||||
actions.put(op);
|
actions.add(op);
|
||||||
}
|
}
|
||||||
if (actions.length() == 0)
|
if (actions.size() == 0)
|
||||||
continue;
|
continue;
|
||||||
txn.put("actions", actions);
|
txn.put("actions", actions);
|
||||||
if (outputJSON) {
|
if (outputJSON) {
|
||||||
|
@ -264,27 +270,26 @@ public class HLogPrettyPrinter {
|
||||||
firstTxn = false;
|
firstTxn = false;
|
||||||
else
|
else
|
||||||
out.print(",");
|
out.print(",");
|
||||||
out.print(txn);
|
// encode and print JSON
|
||||||
|
out.print(mapper.writeValueAsString(txn));
|
||||||
} else {
|
} else {
|
||||||
// Pretty output, complete with indentation by atomic action
|
// Pretty output, complete with indentation by atomic action
|
||||||
out.println("Sequence " + txn.getLong("sequence") + " "
|
out.println("Sequence " + txn.get("sequence") + " "
|
||||||
+ "from region " + txn.getString("region") + " " + "in table "
|
+ "from region " + txn.get("region") + " " + "in table "
|
||||||
+ txn.getString("table"));
|
+ txn.get("table"));
|
||||||
for (int i = 0; i < actions.length(); i++) {
|
for (int i = 0; i < actions.size(); i++) {
|
||||||
JSONObject op = actions.getJSONObject(i);
|
Map op = actions.get(i);
|
||||||
out.println(" Action:");
|
out.println(" Action:");
|
||||||
out.println(" row: " + op.getString("row"));
|
out.println(" row: " + op.get("row"));
|
||||||
out.println(" column: " + op.getString("family") + ":"
|
out.println(" column: " + op.get("family") + ":"
|
||||||
+ op.getString("qualifier"));
|
+ op.get("qualifier"));
|
||||||
out.println(" at time: "
|
out.println(" at time: "
|
||||||
+ (new Date(op.getLong("timestamp"))));
|
+ (new Date((Long) op.get("timestamp"))));
|
||||||
if (outputValues)
|
if (outputValues)
|
||||||
out.println(" value: " + op.get("value"));
|
out.println(" value: " + op.get("value"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (JSONException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
} finally {
|
} finally {
|
||||||
log.close();
|
log.close();
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue