HADOOP-15744. AbstractContractAppendTest fails against HDFS on HADOOP-15407 branch.
Contributed by Steve Loughran.
This commit is contained in:
parent
b4c23043d3
commit
1cf38a38da
|
@ -291,7 +291,7 @@ public class RawLocalFileSystem extends FileSystem {
|
|||
Progressable progress) throws IOException {
|
||||
FileStatus status = getFileStatus(f);
|
||||
if (status.isDirectory()) {
|
||||
throw new FileAlreadyExistsException("Cannot append to a directory: " + f);
|
||||
throw new IOException("Cannot append to a diretory (=" + f + " )");
|
||||
}
|
||||
return new FSDataOutputStream(new BufferedOutputStream(
|
||||
createOutputStreamWithMode(f, true, null), bufferSize), statistics,
|
||||
|
|
|
@ -18,12 +18,7 @@
|
|||
|
||||
package org.apache.hadoop.fs.contract;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileAlreadyExistsException;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -32,7 +27,6 @@ import org.slf4j.LoggerFactory;
|
|||
import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile;
|
||||
import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset;
|
||||
import static org.apache.hadoop.fs.contract.ContractTestUtils.touch;
|
||||
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
|
||||
|
||||
/**
|
||||
* Test append -if supported
|
||||
|
@ -81,10 +75,15 @@ public abstract class AbstractContractAppendTest extends AbstractFSContractTestB
|
|||
|
||||
@Test
|
||||
public void testAppendNonexistentFile() throws Throwable {
|
||||
//expected
|
||||
handleExpectedException(
|
||||
intercept(Exception.class,
|
||||
() -> getFileSystem().append(target).close()));
|
||||
try {
|
||||
FSDataOutputStream out = getFileSystem().append(target);
|
||||
//got here: trouble
|
||||
out.close();
|
||||
fail("expected a failure");
|
||||
} catch (Exception e) {
|
||||
//expected
|
||||
handleExpectedException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -117,9 +116,15 @@ public abstract class AbstractContractAppendTest extends AbstractFSContractTestB
|
|||
|
||||
@Test
|
||||
public void testAppendMissingTarget() throws Throwable {
|
||||
handleExpectedException(
|
||||
intercept(Exception.class,
|
||||
() -> getFileSystem().append(target).close()));
|
||||
try {
|
||||
FSDataOutputStream out = getFileSystem().append(target);
|
||||
//got here: trouble
|
||||
out.close();
|
||||
fail("expected a failure");
|
||||
} catch (Exception e) {
|
||||
//expected
|
||||
handleExpectedException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -144,30 +149,4 @@ public abstract class AbstractContractAppendTest extends AbstractFSContractTestB
|
|||
dataset.length);
|
||||
ContractTestUtils.compareByteArrays(dataset, bytes, dataset.length);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAppendFileAfterDelete() throws Exception {
|
||||
final FileSystem fs = getFileSystem();
|
||||
final Path filePath = target;
|
||||
fs.create(filePath);
|
||||
fs.delete(filePath, false);
|
||||
intercept(FileNotFoundException.class,
|
||||
() -> fs.append(filePath));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAppendDirectory() throws Exception {
|
||||
final FileSystem fs = getFileSystem();
|
||||
|
||||
final Path folderPath = target;
|
||||
fs.mkdirs(folderPath);
|
||||
IOException ex = intercept(IOException.class,
|
||||
() -> fs.append(folderPath));
|
||||
if (ex instanceof FileAlreadyExistsException) {
|
||||
handleExpectedException(ex);
|
||||
} else {
|
||||
handleRelaxedException("Append to a directory",
|
||||
"FileAlreadyExistsException", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,19 +18,10 @@
|
|||
|
||||
package org.apache.hadoop.fs.azure.contract;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileAlreadyExistsException;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.contract.AbstractContractAppendTest;
|
||||
import org.apache.hadoop.fs.contract.AbstractFSContract;
|
||||
import static org.apache.hadoop.fs.contract.ContractTestUtils.skip;
|
||||
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
|
||||
|
||||
/**
|
||||
* Append test, skipping one of them.
|
||||
|
@ -47,18 +38,4 @@ public class ITestAzureNativeContractAppend extends AbstractContractAppendTest {
|
|||
public void testRenameFileBeingAppended() throws Throwable {
|
||||
skip("Skipping as renaming an opened file is not supported");
|
||||
}
|
||||
|
||||
/**
|
||||
* Wasb returns a different exception, so change the intercept logic here.
|
||||
*/
|
||||
@Override
|
||||
@Test
|
||||
public void testAppendDirectory() throws Exception {
|
||||
final FileSystem fs = getFileSystem();
|
||||
|
||||
final Path folderPath = path("testAppendDirectory");
|
||||
fs.mkdirs(folderPath);
|
||||
intercept(FileNotFoundException.class,
|
||||
() -> fs.append(folderPath));
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue