HDFS-9835. OIV: add ReverseXML processor which reconstructs an fsimage from an XML file (cmccabe)

This commit is contained in:
Colin Patrick Mccabe 2016-02-25 16:43:54 -08:00
parent 67880ccae6
commit 700b0e4019
7 changed files with 1983 additions and 52 deletions

View File

@ -17,8 +17,12 @@
*/
package org.apache.hadoop.test;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.StringWriter;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
@ -32,6 +36,7 @@ import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.fs.FileUtil;
@ -481,4 +486,55 @@ public abstract class GenericTestUtils {
Assume.assumeTrue(
Boolean.parseBoolean(System.getProperty("runningWithNative", "false")));
}
/**
* Get the diff between two files.
*
* @param a
* @param b
* @return The empty string if there is no diff; the diff, otherwise.
*
* @throws IOException If there is an error reading either file.
*/
public static String getFilesDiff(File a, File b) throws IOException {
StringBuilder bld = new StringBuilder();
BufferedReader ra = null, rb = null;
try {
ra = new BufferedReader(
new InputStreamReader(new FileInputStream(a)));
rb = new BufferedReader(
new InputStreamReader(new FileInputStream(b)));
while (true) {
String la = ra.readLine();
String lb = rb.readLine();
if (la == null) {
if (lb != null) {
addPlusses(bld, ra);
}
break;
} else if (lb == null) {
if (la != null) {
addPlusses(bld, rb);
}
break;
}
if (!la.equals(lb)) {
bld.append(" - ").append(la).append("\n");
bld.append(" + ").append(lb).append("\n");
}
}
} finally {
IOUtils.closeQuietly(ra);
IOUtils.closeQuietly(rb);
}
return bld.toString();
}
private static void addPlusses(StringBuilder bld, BufferedReader r)
throws IOException {
String l;
while ((l = r.readLine()) != null) {
bld.append(" + ").append(l).append("\n");
}
}
}

View File

@ -980,6 +980,8 @@ Release 2.9.0 - UNRELEASED
HDFS-9047. Retire libwebhdfs. (wheat9)
NEW FEATURES
HDFS-9835. OIV: add ReverseXML processor which reconstructs an fsimage from
an XML file (cmccabe)
IMPROVEMENTS

View File

@ -90,14 +90,14 @@ public final class FSImageFormatPBINode {
private static final AclEntryType[] ACL_ENTRY_TYPE_VALUES = AclEntryType
.values();
private static final int XATTR_NAMESPACE_MASK = 3;
private static final int XATTR_NAMESPACE_OFFSET = 30;
private static final int XATTR_NAME_MASK = (1 << 24) - 1;
private static final int XATTR_NAME_OFFSET = 6;
public static final int XATTR_NAMESPACE_MASK = 3;
public static final int XATTR_NAMESPACE_OFFSET = 30;
public static final int XATTR_NAME_MASK = (1 << 24) - 1;
public static final int XATTR_NAME_OFFSET = 6;
/* See the comments in fsimage.proto for an explanation of the following. */
private static final int XATTR_NAMESPACE_EXT_OFFSET = 5;
private static final int XATTR_NAMESPACE_EXT_MASK = 1;
public static final int XATTR_NAMESPACE_EXT_OFFSET = 5;
public static final int XATTR_NAMESPACE_EXT_MASK = 1;
private static final XAttr.NameSpace[] XATTR_NAMESPACE_VALUES =
XAttr.NameSpace.values();

View File

@ -60,6 +60,8 @@ public class OfflineImageViewerPB {
+ " * XML: This processor creates an XML document with all elements of\n"
+ " the fsimage enumerated, suitable for further analysis by XML\n"
+ " tools.\n"
+ " * reverseXML: This processor takes an XML file and creates a\n"
+ " binary fsimage containing the same elements.\n"
+ " * FileDistribution: This processor analyzes the file size\n"
+ " distribution in the image.\n"
+ " -maxSize specifies the range [0, maxSize] of file sizes to be\n"
@ -73,15 +75,18 @@ public class OfflineImageViewerPB {
+ " changed via the -delimiter argument.\n"
+ "\n"
+ "Required command line arguments:\n"
+ "-i,--inputFile <arg> FSImage file to process.\n"
+ "-i,--inputFile <arg> FSImage or XML file to process.\n"
+ "\n"
+ "Optional command line arguments:\n"
+ "-o,--outputFile <arg> Name of output file. If the specified\n"
+ " file exists, it will be overwritten.\n"
+ " (output to stdout by default)\n"
+ " If the input file was an XML file, we\n"
+ " will also create an <outputFile>.md5 file.\n"
+ "-p,--processor <arg> Select which type of processor to apply\n"
+ " against image file. (XML|FileDistribution|Web|Delimited)\n"
+ " (Web by default)\n"
+ " against image file. (XML|FileDistribution|\n"
+ " ReverseXML|Web|Delimited)\n"
+ " The default is Web.\n"
+ "-delimiter <arg> Delimiting string to use with Delimited processor. \n"
+ "-t,--temp <arg> Use temporary dir to cache intermediate result to generate\n"
+ " Delimited outputs. If not set, Delimited processor constructs\n"
@ -177,6 +182,16 @@ public class OfflineImageViewerPB {
new PBImageXmlWriter(conf, out).visit(
new RandomAccessFile(inputFile, "r"));
break;
case "ReverseXML":
try {
OfflineImageReconstructor.run(inputFile, outputFile);
} catch (Exception e) {
System.err.println("OfflineImageReconstructor failed: " +
e.getMessage());
e.printStackTrace(System.err);
System.exit(1);
}
break;
case "Web":
String addr = cmd.getOptionValue("addr", "localhost:5978");
try (WebImageViewer viewer = new WebImageViewer(
@ -200,6 +215,7 @@ public class OfflineImageViewerPB {
System.err.println("Input file ended unexpectedly. Exiting");
} catch (IOException e) {
System.err.println("Encountered exception. Exiting: " + e.getMessage());
e.printStackTrace(System.err);
}
return -1;
}

View File

@ -23,17 +23,24 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.io.RandomAccessFile;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.TimeZone;
import com.google.protobuf.ByteString;
import org.apache.commons.codec.binary.Hex;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.PermissionStatus;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoExpirationProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto;
import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos;
import org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode;
import org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf.SectionName;
import org.apache.hadoop.hdfs.server.namenode.FSImageUtil;
@ -55,6 +62,14 @@ import org.apache.hadoop.hdfs.util.XMLUtils;
import org.apache.hadoop.util.LimitInputStream;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.apache.hadoop.util.VersionInfo;
import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAMESPACE_MASK;
import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAMESPACE_OFFSET;
import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAMESPACE_EXT_MASK;
import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAMESPACE_EXT_OFFSET;
import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAME_OFFSET;
import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAME_MASK;
/**
* PBImageXmlWriter walks over an fsimage structure and writes out
@ -64,11 +79,20 @@ import com.google.common.collect.Lists;
public final class PBImageXmlWriter {
private final Configuration conf;
private final PrintStream out;
private final SimpleDateFormat isoDateFormat;
private String[] stringTable;
public static SimpleDateFormat createSimpleDateFormat() {
SimpleDateFormat format =
new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS");
format.setTimeZone(TimeZone.getTimeZone("UTC"));
return format;
}
public PBImageXmlWriter(Configuration conf, PrintStream out) {
this.conf = conf;
this.out = out;
this.isoDateFormat = createSimpleDateFormat();
}
public void visit(RandomAccessFile file) throws IOException {
@ -80,6 +104,16 @@ public final class PBImageXmlWriter {
try (FileInputStream fin = new FileInputStream(file.getFD())) {
out.print("<?xml version=\"1.0\"?>\n<fsimage>");
out.print("<version>");
o("layoutVersion", summary.getLayoutVersion());
o("onDiskVersion", summary.getOndiskVersion());
// Output the version of OIV (which is not necessarily the version of
// the fsimage file). This could be helpful in the case where a bug
// in OIV leads to information loss in the XML-- we can quickly tell
// if a specific fsimage XML file is affected by this bug.
o("oivRevision", VersionInfo.getRevision());
out.print("</version>\n");
ArrayList<FileSummary.Section> sections = Lists.newArrayList(summary
.getSectionsList());
Collections.sort(sections, new Comparator<FileSummary.Section>() {
@ -146,6 +180,8 @@ public final class PBImageXmlWriter {
out.print("<CacheManagerSection>");
CacheManagerSection s = CacheManagerSection.parseDelimitedFrom(is);
o("nextDirectiveId", s.getNextDirectiveId());
o("numDirectives", s.getNumDirectives());
o("numPools", s.getNumPools());
for (int i = 0; i < s.getNumPools(); ++i) {
CachePoolInfoProto p = CachePoolInfoProto.parseDelimitedFrom(is);
out.print("<pool>");
@ -163,7 +199,7 @@ public final class PBImageXmlWriter {
.o("replication", p.getReplication()).o("pool", p.getPool());
out.print("<expiration>");
CacheDirectiveInfoExpirationProto e = p.getExpiration();
o("millis", e.getMillis()).o("relatilve", e.getIsRelative());
o("millis", e.getMillis()).o("relative", e.getIsRelative());
out.print("</expiration>\n");
out.print("</directive>\n");
}
@ -187,13 +223,48 @@ public final class PBImageXmlWriter {
out.print("</FileUnderConstructionSection>\n");
}
private void dumpXattrs(INodeSection.XAttrFeatureProto xattrs) {
out.print("<xattrs>");
for (INodeSection.XAttrCompactProto xattr : xattrs.getXAttrsList()) {
out.print("<xattr>");
int encodedName = xattr.getName();
int ns = (XATTR_NAMESPACE_MASK & (encodedName >> XATTR_NAMESPACE_OFFSET)) |
((XATTR_NAMESPACE_EXT_MASK & (encodedName >> XATTR_NAMESPACE_EXT_OFFSET)) << 2);
o("ns", XAttrProtos.XAttrProto.
XAttrNamespaceProto.valueOf(ns).toString());
o("name", stringTable[XATTR_NAME_MASK & (encodedName >> XATTR_NAME_OFFSET)]);
ByteString val = xattr.getValue();
if (val.isValidUtf8()) {
o("val", val.toStringUtf8());
} else {
o("valHex", Hex.encodeHexString(val.toByteArray()));
}
out.print("</xattr>");
}
out.print("</xattrs>");
}
private void dumpINodeDirectory(INodeDirectory d) {
o("mtime", d.getModificationTime()).o("permission",
dumpPermission(d.getPermission()));
if (d.hasXAttrs()) {
dumpXattrs(d.getXAttrs());
}
dumpAcls(d.getAcl());
if (d.hasDsQuota() && d.hasNsQuota()) {
o("nsquota", d.getNsQuota()).o("dsquota", d.getDsQuota());
}
INodeSection.QuotaByStorageTypeFeatureProto typeQuotas =
d.getTypeQuotas();
if (typeQuotas != null) {
for (INodeSection.QuotaByStorageTypeEntryProto entry:
typeQuotas.getQuotasList()) {
out.print("<typeQuota>");
o("type", entry.getStorageType().toString());
o("quota", entry.getQuota());
out.print("</typeQuota>");
}
}
}
private void dumpINodeDirectorySection(InputStream in) throws IOException {
@ -208,10 +279,10 @@ public final class PBImageXmlWriter {
out.print("<directory>");
o("parent", e.getParent());
for (long id : e.getChildrenList()) {
o("inode", id);
o("child", id);
}
for (int refId : e.getRefChildrenList()) {
o("inodereference-index", refId);
o("refChild", refId);
}
out.print("</directory>\n");
}
@ -244,6 +315,9 @@ public final class PBImageXmlWriter {
.o("atime", f.getAccessTime())
.o("preferredBlockSize", f.getPreferredBlockSize())
.o("permission", dumpPermission(f.getPermission()));
if (f.hasXAttrs()) {
dumpXattrs(f.getXAttrs());
}
dumpAcls(f.getAcl());
if (f.getBlocksCount() > 0) {
out.print("<blocks>");
@ -255,6 +329,12 @@ public final class PBImageXmlWriter {
}
out.print("</blocks>\n");
}
if (f.hasStoragePolicyID()) {
o("storagePolicyId", f.getStoragePolicyID());
}
if (f.getIsStriped()) {
out.print("<isStriped/>");
}
if (f.hasFileUC()) {
INodeSection.FileUnderConstructionFeature u = f.getFileUC();
@ -281,25 +361,28 @@ public final class PBImageXmlWriter {
INodeSection s = INodeSection.parseDelimitedFrom(in);
out.print("<INodeSection>");
o("lastInodeId", s.getLastInodeId());
o("numInodes", s.getNumInodes());
for (int i = 0; i < s.getNumInodes(); ++i) {
INodeSection.INode p = INodeSection.INode.parseDelimitedFrom(in);
out.print("<inode>");
o("id", p.getId()).o("type", p.getType()).o("name",
p.getName().toStringUtf8());
if (p.hasFile()) {
dumpINodeFile(p.getFile());
} else if (p.hasDirectory()) {
dumpINodeDirectory(p.getDirectory());
} else if (p.hasSymlink()) {
dumpINodeSymlink(p.getSymlink());
}
dumpINodeFields(p);
out.print("</inode>\n");
}
out.print("</INodeSection>\n");
}
private void dumpINodeFields(INodeSection.INode p) {
o("id", p.getId()).o("type", p.getType()).o("name",
p.getName().toStringUtf8());
if (p.hasFile()) {
dumpINodeFile(p.getFile());
} else if (p.hasDirectory()) {
dumpINodeDirectory(p.getDirectory());
} else if (p.hasSymlink()) {
dumpINodeSymlink(p.getSymlink());
}
}
private void dumpINodeSymlink(INodeSymlink s) {
o("permission", dumpPermission(s.getPermission()))
.o("target", s.getTarget().toStringUtf8())
@ -308,7 +391,8 @@ public final class PBImageXmlWriter {
private void dumpNameSection(InputStream in) throws IOException {
NameSystemSection s = NameSystemSection.parseDelimitedFrom(in);
out.print("<NameSection>\n");
out.print("<NameSection>");
o("namespaceId", s.getNamespaceId());
o("genstampV1", s.getGenstampV1()).o("genstampV2", s.getGenstampV2())
.o("genstampV1Limit", s.getGenstampV1Limit())
.o("lastAllocatedBlockId", s.getLastAllocatedBlockId())
@ -317,18 +401,73 @@ public final class PBImageXmlWriter {
}
private String dumpPermission(long permission) {
return FSImageFormatPBINode.Loader.loadPermission(permission, stringTable)
.toString();
PermissionStatus permStatus = FSImageFormatPBINode.Loader.
loadPermission(permission, stringTable);
return String.format("%s:%s:%04o", permStatus.getUserName(),
permStatus.getGroupName(), permStatus.getPermission().toExtendedShort());
}
private void dumpSecretManagerSection(InputStream is) throws IOException {
out.print("<SecretManagerSection>");
SecretManagerSection s = SecretManagerSection.parseDelimitedFrom(is);
int expectedNumDelegationKeys = s.getNumKeys();
int expectedNumTokens = s.getNumTokens();
o("currentId", s.getCurrentId()).o("tokenSequenceNumber",
s.getTokenSequenceNumber());
s.getTokenSequenceNumber()).
o("numDelegationKeys", expectedNumDelegationKeys).
o("numTokens", expectedNumTokens);
for (int i = 0; i < expectedNumDelegationKeys; i++) {
SecretManagerSection.DelegationKey dkey =
SecretManagerSection.DelegationKey.parseDelimitedFrom(is);
out.print("<delegationKey>");
o("id", dkey.getId());
o("key", Hex.encodeHexString(dkey.getKey().toByteArray()));
if (dkey.hasExpiryDate()) {
dumpDate("expiry", dkey.getExpiryDate());
}
out.print("</delegationKey>");
}
for (int i = 0; i < expectedNumTokens; i++) {
SecretManagerSection.PersistToken token =
SecretManagerSection.PersistToken.parseDelimitedFrom(is);
out.print("<token>");
if (token.hasVersion()) {
o("version", token.getVersion());
}
if (token.hasOwner()) {
o("owner", token.getOwner());
}
if (token.hasRenewer()) {
o("renewer", token.getRenewer());
}
if (token.hasRealUser()) {
o("realUser", token.getRealUser());
}
if (token.hasIssueDate()) {
dumpDate("issueDate", token.getIssueDate());
}
if (token.hasMaxDate()) {
dumpDate("maxDate", token.getMaxDate());
}
if (token.hasSequenceNumber()) {
o("sequenceNumber", token.getSequenceNumber());
}
if (token.hasMasterKeyId()) {
o("masterKeyId", token.getMasterKeyId());
}
if (token.hasExpiryDate()) {
dumpDate("expiryDate", token.getExpiryDate());
}
out.print("</token>");
}
out.print("</SecretManagerSection>");
}
private void dumpDate(String tag, long date) {
out.print("<" + tag + ">" +
isoDateFormat.format(new Date(date)) + "</" + tag + ">");
}
private void dumpSnapshotDiffSection(InputStream in) throws IOException {
out.print("<SnapshotDiffSection>");
while (true) {
@ -337,30 +476,46 @@ public final class PBImageXmlWriter {
if (e == null) {
break;
}
out.print("<diff>");
o("inodeid", e.getInodeId());
switch (e.getType()) {
case FILEDIFF:
out.print("<fileDiffEntry>");
break;
case DIRECTORYDIFF:
out.print("<dirDiffEntry>");
break;
default:
throw new IOException("unknown DiffEntry type " + e.getType());
}
o("inodeId", e.getInodeId());
o("count", e.getNumOfDiff());
switch (e.getType()) {
case FILEDIFF: {
for (int i = 0; i < e.getNumOfDiff(); ++i) {
out.print("<filediff>");
out.print("<fileDiff>");
SnapshotDiffSection.FileDiff f = SnapshotDiffSection.FileDiff
.parseDelimitedFrom(in);
o("snapshotId", f.getSnapshotId()).o("size", f.getFileSize()).o(
"name", f.getName().toStringUtf8());
out.print("</filediff>\n");
out.print("</fileDiff>\n");
}
}
break;
case DIRECTORYDIFF: {
for (int i = 0; i < e.getNumOfDiff(); ++i) {
out.print("<dirdiff>");
out.print("<dirDiff>");
SnapshotDiffSection.DirectoryDiff d = SnapshotDiffSection.DirectoryDiff
.parseDelimitedFrom(in);
o("snapshotId", d.getSnapshotId())
.o("isSnapshotroot", d.getIsSnapshotRoot())
.o("childrenSize", d.getChildrenSize())
.o("name", d.getName().toStringUtf8());
.o("isSnapshotRoot", d.getIsSnapshotRoot())
.o("name", d.getName().toStringUtf8())
.o("createdListSize", d.getCreatedListSize());
for (long did : d.getDeletedINodeList()) {
o("deletedInode", did);
}
for (int dRefid : d.getDeletedINodeRefList()) {
o("deletedInoderef", dRefid);
}
for (int j = 0; j < d.getCreatedListSize(); ++j) {
SnapshotDiffSection.CreatedListEntry ce = SnapshotDiffSection.CreatedListEntry
.parseDelimitedFrom(in);
@ -368,24 +523,23 @@ public final class PBImageXmlWriter {
o("name", ce.getName().toStringUtf8());
out.print("</created>\n");
}
for (long did : d.getDeletedINodeList()) {
out.print("<deleted>");
o("inode", did);
out.print("</deleted>\n");
}
for (int dRefid : d.getDeletedINodeRefList()) {
out.print("<deleted>");
o("inodereference-index", dRefid);
out.print("</deleted>\n");
}
out.print("</dirdiff>\n");
out.print("</dirDiff>\n");
}
}
break;
}
default:
break;
}
out.print("</diff>");
switch (e.getType()) {
case FILEDIFF:
out.print("</fileDiffEntry>");
break;
case DIRECTORYDIFF:
out.print("</dirDiffEntry>");
break;
default:
throw new IOException("unknown DiffEntry type " + e.getType());
}
}
out.print("</SnapshotDiffSection>\n");
}
@ -394,6 +548,7 @@ public final class PBImageXmlWriter {
out.print("<SnapshotSection>");
SnapshotSection s = SnapshotSection.parseDelimitedFrom(in);
o("snapshotCounter", s.getSnapshotCounter());
o("numSnapshots", s.getNumSnapshots());
if (s.getSnapshottableDirCount() > 0) {
out.print("<snapshottableDir>");
for (long id : s.getSnapshottableDirList()) {
@ -404,7 +559,12 @@ public final class PBImageXmlWriter {
for (int i = 0; i < s.getNumSnapshots(); ++i) {
SnapshotSection.Snapshot pbs = SnapshotSection.Snapshot
.parseDelimitedFrom(in);
o("snapshot", pbs.getSnapshotId());
out.print("<snapshot>");
o("id", pbs.getSnapshotId());
out.print("<root>");
dumpINodeFields(pbs.getRoot());
out.print("</root>");
out.print("</snapshot>");
}
out.print("</SnapshotSection>\n");
}
@ -420,6 +580,14 @@ public final class PBImageXmlWriter {
}
private PBImageXmlWriter o(final String e, final Object v) {
if (v instanceof Boolean) {
// For booleans, the presence of the element indicates true, and its
// absence indicates false.
if ((Boolean)v != false) {
out.print("<" + e + "/>");
}
return this;
}
out.print("<" + e + ">" +
XMLUtils.mangleXmlString(v.toString(), true) + "</" + e + ">");
return this;

View File

@ -47,6 +47,8 @@ import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import com.google.common.io.Files;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.output.NullOutputStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -66,6 +68,8 @@ import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
@ -89,8 +93,7 @@ public class TestOfflineImageViewer {
final static HashMap<String, FileStatus> writtenFiles = Maps.newHashMap();
static int dirCount = 0;
@Rule
public TemporaryFolder folder = new TemporaryFolder();
private static File tempDir;
// Create a populated namespace for later testing. Save its contents to a
// data structure and store its fsimage location.
@ -98,6 +101,7 @@ public class TestOfflineImageViewer {
// multiple tests.
@BeforeClass
public static void createOriginalFSImage() throws IOException {
tempDir = Files.createTempDir();
MiniDFSCluster cluster = null;
try {
Configuration conf = new Configuration();
@ -169,6 +173,9 @@ public class TestOfflineImageViewer {
hdfs.setXAttr(xattr, "user.a2", new byte[]{ 0x37, 0x38, 0x39 });
// OIV should be able to handle empty value XAttrs
hdfs.setXAttr(xattr, "user.a3", null);
// OIV should be able to handle XAttr values that can't be expressed
// as UTF8
hdfs.setXAttr(xattr, "user.a4", new byte[]{ -0x3d, 0x28 });
writtenFiles.put(xattr.toString(), hdfs.getFileStatus(xattr));
// Write results to the fsimage file
@ -190,6 +197,7 @@ public class TestOfflineImageViewer {
@AfterClass
public static void deleteOriginalFSImage() throws IOException {
FileUtils.deleteQuietly(tempDir);
if (originalFsimage != null && originalFsimage.exists()) {
originalFsimage.delete();
}
@ -204,7 +212,7 @@ public class TestOfflineImageViewer {
@Test(expected = IOException.class)
public void testTruncatedFSImage() throws IOException {
File truncatedFile = folder.newFile();
File truncatedFile = new File(tempDir, "truncatedFsImage");
PrintStream output = new PrintStream(NullOutputStream.NULL_OUTPUT_STREAM);
copyPartOfFile(originalFsimage, truncatedFile);
new FileDistributionCalculator(new Configuration(), 0, 0, output)
@ -449,4 +457,46 @@ public class TestOfflineImageViewer {
connection.connect();
assertEquals(expectedCode, connection.getResponseCode());
}
/**
* Tests the ReverseXML processor.
*
* 1. Translate fsimage -> reverseImage.xml
* 2. Translate reverseImage.xml -> reverseImage
* 3. Translate reverseImage -> reverse2Image.xml
* 4. Verify that reverseImage.xml and reverse2Image.xml match
*
* @throws Throwable
*/
@Test
public void testReverseXmlRoundTrip() throws Throwable {
GenericTestUtils.setLogLevel(OfflineImageReconstructor.LOG,
Level.TRACE);
File reverseImageXml = new File(tempDir, "reverseImage.xml");
File reverseImage = new File(tempDir, "reverseImage");
File reverseImage2Xml = new File(tempDir, "reverseImage2.xml");
LOG.info("Creating reverseImage.xml=" + reverseImageXml.getAbsolutePath() +
", reverseImage=" + reverseImage.getAbsolutePath() +
", reverseImage2Xml=" + reverseImage2Xml.getAbsolutePath());
if (OfflineImageViewerPB.run(new String[] { "-p", "XML",
"-i", originalFsimage.getAbsolutePath(),
"-o", reverseImageXml.getAbsolutePath() }) != 0) {
throw new IOException("oiv returned failure creating first XML file.");
}
if (OfflineImageViewerPB.run(new String[] { "-p", "ReverseXML",
"-i", reverseImageXml.getAbsolutePath(),
"-o", reverseImage.getAbsolutePath() }) != 0) {
throw new IOException("oiv returned failure recreating fsimage file.");
}
if (OfflineImageViewerPB.run(new String[] { "-p", "XML",
"-i", reverseImage.getAbsolutePath(),
"-o", reverseImage2Xml.getAbsolutePath() }) != 0) {
throw new IOException("oiv returned failure creating second " +
"XML file.");
}
// The XML file we wrote based on the re-created fsimage should be the
// same as the one we dumped from the original fsimage.
Assert.assertEquals("",
GenericTestUtils.getFilesDiff(reverseImageXml, reverseImage2Xml));
}
}