HDFS-6062. TestRetryCacheWithHA#testConcat is flaky. Contributed by Jing Zhao.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1574997 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Haohui Mai 2014-03-06 18:51:04 +00:00
parent 5a3f614794
commit e65084b616
2 changed files with 9 additions and 3 deletions

View File

@ -698,6 +698,8 @@ Release 2.4.0 - UNRELEASED
HDFS-6058. Fix TestHDFSCLI failures after HADOOP-8691 change. HDFS-6058. Fix TestHDFSCLI failures after HADOOP-8691 change.
(Akira Ajisaka via wheat9) (Akira Ajisaka via wheat9)
HDFS-6062. TestRetryCacheWithHA#testConcat is flaky. (Jing Zhao via wheat9)
BREAKDOWN OF HDFS-5698 SUBTASKS AND RELATED JIRAS BREAKDOWN OF HDFS-5698 SUBTASKS AND RELATED JIRAS
HDFS-5717. Save FSImage header in protobuf. (Haohui Mai via jing9) HDFS-5717. Save FSImage header in protobuf. (Haohui Mai via jing9)

View File

@ -562,10 +562,12 @@ public class TestRetryCacheWithHA {
@Override @Override
void prepare() throws Exception { void prepare() throws Exception {
DFSTestUtil.createFile(dfs, new Path(target), BlockSize, DataNodes, 0); final Path targetPath = new Path(target);
DFSTestUtil.createFile(dfs, targetPath, BlockSize, DataNodes, 0);
for (int i = 0; i < srcPaths.length; i++) { for (int i = 0; i < srcPaths.length; i++) {
DFSTestUtil.createFile(dfs, srcPaths[i], BlockSize, DataNodes, 0); DFSTestUtil.createFile(dfs, srcPaths[i], BlockSize, DataNodes, 0);
} }
assertEquals(BlockSize, dfs.getFileStatus(targetPath).getLen());
} }
@Override @Override
@ -576,10 +578,12 @@ public class TestRetryCacheWithHA {
@Override @Override
boolean checkNamenodeBeforeReturn() throws Exception { boolean checkNamenodeBeforeReturn() throws Exception {
Path targetPath = new Path(target); Path targetPath = new Path(target);
boolean done = dfs.exists(targetPath); boolean done = dfs.getFileStatus(targetPath).getLen() == BlockSize
* (srcs.length + 1);
for (int i = 0; i < CHECKTIMES && !done; i++) { for (int i = 0; i < CHECKTIMES && !done; i++) {
Thread.sleep(1000); Thread.sleep(1000);
done = dfs.exists(targetPath); done = dfs.getFileStatus(targetPath).getLen() == BlockSize
* (srcs.length + 1);
} }
return done; return done;
} }