HADOOP-9264. Port change to use Java untar API on Windows from branch-1-win to trunk. Contributed by Chris Nauroth.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1441172 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
b04586e7eb
commit
71a57ded39
|
@ -325,6 +325,9 @@ Trunk (Unreleased)
|
|||
HADOOP-9249. hadoop-maven-plugins version-info goal causes build failure
|
||||
when running with Clover. (Chris Nauroth via suresh)
|
||||
|
||||
HADOOP-9264. Port change to use Java untar API on Windows from
|
||||
branch-1-win to trunk. (Chris Nauroth via suresh)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
HADOOP-7761. Improve the performance of raw comparisons. (todd)
|
||||
|
|
|
@ -241,6 +241,11 @@
|
|||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-compress</artifactId>
|
||||
<version>1.4</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
@ -381,6 +386,23 @@
|
|||
</target>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>copy-test-tarballs</id>
|
||||
<phase>process-test-resources</phase>
|
||||
<goals>
|
||||
<goal>run</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<target>
|
||||
<copy toDir="${test.cache.data}">
|
||||
<fileset dir="${basedir}/src/test/java/org/apache/hadoop/fs">
|
||||
<include name="test-untar.tar"/>
|
||||
<include name="test-untar.tgz"/>
|
||||
</fileset>
|
||||
</copy>
|
||||
</target>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
<phase>pre-site</phase>
|
||||
<goals>
|
||||
|
@ -485,6 +507,7 @@
|
|||
<exclude>src/test/all-tests</exclude>
|
||||
<exclude>src/test/resources/kdc/ldif/users.ldif</exclude>
|
||||
<exclude>src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.c</exclude>
|
||||
<exclude>src/test/java/org/apache/hadoop/fs/test-untar.tgz</exclude>
|
||||
</excludes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
|
|
@ -21,9 +21,12 @@ package org.apache.hadoop.fs;
|
|||
import java.io.*;
|
||||
import java.util.Arrays;
|
||||
import java.util.Enumeration;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipFile;
|
||||
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -624,14 +627,28 @@ public class FileUtil {
|
|||
* @throws IOException
|
||||
*/
|
||||
public static void unTar(File inFile, File untarDir) throws IOException {
|
||||
if (!untarDir.mkdirs()) {
|
||||
if (!untarDir.mkdirs()) {
|
||||
if (!untarDir.isDirectory()) {
|
||||
throw new IOException("Mkdirs failed to create " + untarDir);
|
||||
}
|
||||
}
|
||||
|
||||
StringBuilder untarCommand = new StringBuilder();
|
||||
boolean gzipped = inFile.toString().endsWith("gz");
|
||||
if(Shell.WINDOWS) {
|
||||
// Tar is not native to Windows. Use simple Java based implementation for
|
||||
// tests and simple tar archives
|
||||
unTarUsingJava(inFile, untarDir, gzipped);
|
||||
}
|
||||
else {
|
||||
// spawn tar utility to untar archive for full fledged unix behavior such
|
||||
// as resolving symlinks in tar archives
|
||||
unTarUsingTar(inFile, untarDir, gzipped);
|
||||
}
|
||||
}
|
||||
|
||||
private static void unTarUsingTar(File inFile, File untarDir,
|
||||
boolean gzipped) throws IOException {
|
||||
StringBuffer untarCommand = new StringBuffer();
|
||||
if (gzipped) {
|
||||
untarCommand.append(" gzip -dc '");
|
||||
untarCommand.append(FileUtil.makeShellPath(inFile));
|
||||
|
@ -656,7 +673,62 @@ public class FileUtil {
|
|||
". Tar process exited with exit code " + exitcode);
|
||||
}
|
||||
}
|
||||
|
||||
private static void unTarUsingJava(File inFile, File untarDir,
|
||||
boolean gzipped) throws IOException {
|
||||
InputStream inputStream = null;
|
||||
if (gzipped) {
|
||||
inputStream = new BufferedInputStream(new GZIPInputStream(
|
||||
new FileInputStream(inFile)));
|
||||
} else {
|
||||
inputStream = new BufferedInputStream(new FileInputStream(inFile));
|
||||
}
|
||||
|
||||
TarArchiveInputStream tis = new TarArchiveInputStream(inputStream);
|
||||
|
||||
for (TarArchiveEntry entry = tis.getNextTarEntry(); entry != null;) {
|
||||
unpackEntries(tis, entry, untarDir);
|
||||
entry = tis.getNextTarEntry();
|
||||
}
|
||||
}
|
||||
|
||||
private static void unpackEntries(TarArchiveInputStream tis,
|
||||
TarArchiveEntry entry, File outputDir) throws IOException {
|
||||
if (entry.isDirectory()) {
|
||||
File subDir = new File(outputDir, entry.getName());
|
||||
if (!subDir.mkdir() && !subDir.isDirectory()) {
|
||||
throw new IOException("Mkdirs failed to create tar internal dir "
|
||||
+ outputDir);
|
||||
}
|
||||
|
||||
for (TarArchiveEntry e : entry.getDirectoryEntries()) {
|
||||
unpackEntries(tis, e, subDir);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
File outputFile = new File(outputDir, entry.getName());
|
||||
if (!outputDir.exists()) {
|
||||
if (!outputDir.mkdirs()) {
|
||||
throw new IOException("Mkdirs failed to create tar internal dir "
|
||||
+ outputDir);
|
||||
}
|
||||
}
|
||||
|
||||
int count;
|
||||
byte data[] = new byte[2048];
|
||||
BufferedOutputStream outputStream = new BufferedOutputStream(
|
||||
new FileOutputStream(outputFile));
|
||||
|
||||
while ((count = tis.read(data)) != -1) {
|
||||
outputStream.write(data, 0, count);
|
||||
}
|
||||
|
||||
outputStream.flush();
|
||||
outputStream.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* Class for creating hardlinks.
|
||||
* Supports Unix, Cygwin, WindXP.
|
||||
|
|
|
@ -546,4 +546,44 @@ public class TestFileUtil {
|
|||
long expected = 2 * (3 + System.getProperty("line.separator").length());
|
||||
Assert.assertEquals(expected, du);
|
||||
}
|
||||
|
||||
private void doUntarAndVerify(File tarFile, File untarDir)
|
||||
throws IOException {
|
||||
if (untarDir.exists() && !FileUtil.fullyDelete(untarDir)) {
|
||||
throw new IOException("Could not delete directory '" + untarDir + "'");
|
||||
}
|
||||
FileUtil.unTar(tarFile, untarDir);
|
||||
|
||||
String parentDir = untarDir.getCanonicalPath() + Path.SEPARATOR + "name";
|
||||
File testFile = new File(parentDir + Path.SEPARATOR + "version");
|
||||
Assert.assertTrue(testFile.exists());
|
||||
Assert.assertTrue(testFile.length() == 0);
|
||||
String imageDir = parentDir + Path.SEPARATOR + "image";
|
||||
testFile = new File(imageDir + Path.SEPARATOR + "fsimage");
|
||||
Assert.assertTrue(testFile.exists());
|
||||
Assert.assertTrue(testFile.length() == 157);
|
||||
String currentDir = parentDir + Path.SEPARATOR + "current";
|
||||
testFile = new File(currentDir + Path.SEPARATOR + "fsimage");
|
||||
Assert.assertTrue(testFile.exists());
|
||||
Assert.assertTrue(testFile.length() == 4331);
|
||||
testFile = new File(currentDir + Path.SEPARATOR + "edits");
|
||||
Assert.assertTrue(testFile.exists());
|
||||
Assert.assertTrue(testFile.length() == 1033);
|
||||
testFile = new File(currentDir + Path.SEPARATOR + "fstime");
|
||||
Assert.assertTrue(testFile.exists());
|
||||
Assert.assertTrue(testFile.length() == 8);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUntar() throws IOException {
|
||||
String tarGzFileName = System.getProperty("test.cache.data",
|
||||
"build/test/cache") + "/test-untar.tgz";
|
||||
String tarFileName = System.getProperty("test.cache.data",
|
||||
"build/test/cache") + "/test-untar.tar";
|
||||
String dataDir = System.getProperty("test.build.data", "build/test/data");
|
||||
File untarDir = new File(dataDir, "untarDir");
|
||||
|
||||
doUntarAndVerify(new File(tarGzFileName), untarDir);
|
||||
doUntarAndVerify(new File(tarFileName), untarDir);
|
||||
}
|
||||
}
|
||||
|
|
Binary file not shown.
Binary file not shown.
Loading…
Reference in New Issue