HADOOP-10462. DF#getFilesystem is not parsing the command output. Contributed by Akira AJISAKA.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1584571 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Uma Maheswara Rao G 2014-04-04 09:10:51 +00:00
parent b6e31fd8ea
commit 362d284e17
3 changed files with 24 additions and 14 deletions

View File

@ -344,6 +344,9 @@ Release 2.5.0 - UNRELEASED
HADOOP-10459. distcp V2 doesn't preserve root dir's attributes when -p is
specified. (Yongjun Zhang via atm)
HADOOP-10462. DF#getFilesystem is not parsing the command output.
(Akira AJISAKA via umamahesh)
Release 2.4.1 - UNRELEASED
INCOMPATIBLE CHANGES

View File

@ -22,7 +22,6 @@ import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.NoSuchElementException;
import java.util.StringTokenizer;
@ -75,6 +74,8 @@ public class DF extends Shell {
return this.filesystem;
} else {
run();
verifyExitCode();
parseOutput();
return filesystem;
}
}
@ -114,14 +115,7 @@ public class DF extends Shell {
this.mount = dirFile.getCanonicalPath().substring(0, 2);
} else {
run();
// Skip parsing if df was not successful
if (getExitCode() != 0) {
StringBuffer sb = new StringBuffer("df could not be run successfully: ");
for (String line: output) {
sb.append(line);
}
throw new IOException(sb.toString());
}
verifyExitCode();
parseOutput();
}
@ -204,6 +198,17 @@ public class DF extends Shell {
}
}
private void verifyExitCode() throws IOException {
if (getExitCode() != 0) {
StringBuilder sb =
new StringBuilder("df could not be run successfully: ");
for (String line : output) {
sb.append(line);
}
throw new IOException(sb.toString());
}
}
public static void main(String[] args) throws Exception {
String path = ".";
if (args.length > 0)

View File

@ -25,7 +25,6 @@ import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.StringReader;
import java.util.EnumSet;
import java.util.Random;
import org.apache.hadoop.test.GenericTestUtils;
@ -48,16 +47,19 @@ public class TestDFVariations {
}
@Test(timeout=5000)
public void testMountAndFileSystem() throws Exception {
public void testMount() throws Exception {
XXDF df = new XXDF();
String expectedMount =
Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/foo/bar";
String expectedFileSystem =
Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/dev/sda3";
assertEquals("Invalid mount point",
expectedMount, df.getMount());
}
@Test(timeout=5000)
public void testFileSystem() throws Exception {
XXDF df = new XXDF();
String expectedFileSystem =
Shell.WINDOWS ? df.getDirPath().substring(0, 2) : "/dev/sda3";
assertEquals("Invalid filesystem",
expectedFileSystem, df.getFilesystem());
}