HDFS-15286. Concat on a same file deleting the file. Contributed by hemanthboyina.
(cherry picked from commit 5e0eda5d5f
)
This commit is contained in:
parent
62ee5970f8
commit
d504574d82
|
@ -150,7 +150,7 @@ class FSDirConcatOp {
|
||||||
+ " is referred by some other reference in some snapshot.");
|
+ " is referred by some other reference in some snapshot.");
|
||||||
}
|
}
|
||||||
// source file cannot be the same with the target file
|
// source file cannot be the same with the target file
|
||||||
if (srcINode == targetINode) {
|
if (srcINode.equals(targetINode)) {
|
||||||
throw new HadoopIllegalArgumentException("concat: the src file " + src
|
throw new HadoopIllegalArgumentException("concat: the src file " + src
|
||||||
+ " is the same with the target file " + targetIIP.getPath());
|
+ " is the same with the target file " + targetIIP.getPath());
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,6 +45,7 @@ import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
|
||||||
import org.apache.hadoop.ipc.RemoteException;
|
import org.apache.hadoop.ipc.RemoteException;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
|
import org.apache.hadoop.test.LambdaTestUtils;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
@ -525,4 +526,42 @@ public class TestHDFSConcat {
|
||||||
GenericTestUtils.assertExceptionContains(errMsg, e);
|
GenericTestUtils.assertExceptionContains(errMsg, e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test concat on same source and target file which is a inode reference.
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void testConcatOnSameFile() throws Exception {
|
||||||
|
String dir = "/dir1";
|
||||||
|
Path trgDir = new Path(dir);
|
||||||
|
dfs.mkdirs(new Path(dir));
|
||||||
|
|
||||||
|
// create a source file
|
||||||
|
String dir2 = "/dir2";
|
||||||
|
Path srcDir = new Path(dir2);
|
||||||
|
dfs.mkdirs(srcDir);
|
||||||
|
dfs.allowSnapshot(srcDir);
|
||||||
|
Path src = new Path(srcDir, "file1");
|
||||||
|
DFSTestUtil.createFile(dfs, src, 512, (short) 2, 0);
|
||||||
|
|
||||||
|
// make the file as an Inode reference and delete the reference
|
||||||
|
dfs.createSnapshot(srcDir, "s1");
|
||||||
|
dfs.rename(src, trgDir);
|
||||||
|
dfs.deleteSnapshot(srcDir, "s1");
|
||||||
|
Path[] srcs = new Path[1];
|
||||||
|
srcs[0] = new Path(dir, "file1");
|
||||||
|
|
||||||
|
// perform concat
|
||||||
|
LambdaTestUtils.intercept(RemoteException.class,
|
||||||
|
"concat: the src file /dir1/file1 is the same with the target"
|
||||||
|
+ " file /dir1/file1",
|
||||||
|
() -> dfs.concat(srcs[0], srcs));
|
||||||
|
|
||||||
|
// the file should exists and read the file
|
||||||
|
byte[] buff = new byte[1080];
|
||||||
|
FSDataInputStream stream = dfs.open(srcs[0]);
|
||||||
|
stream.readFully(0, buff, 0, 512);
|
||||||
|
|
||||||
|
assertEquals(1, dfs.getContentSummary(new Path(dir)).getFileCount());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue