diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractConcatTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractConcatTest.java
index d30e0d66eff..2236f06e354 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractConcatTest.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractConcatTest.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.fs.contract;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.test.LambdaTestUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -62,14 +63,24 @@ public abstract class AbstractContractConcatTest extends AbstractFSContractTestB
public void testConcatEmptyFiles() throws Throwable {
touch(getFileSystem(), target);
handleExpectedException(intercept(Exception.class,
- () -> getFileSystem().concat(target, new Path[0])));
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ getFileSystem().concat(target, new Path[0]);
+ }
+ }));
}
@Test
public void testConcatMissingTarget() throws Throwable {
handleExpectedException(
intercept(Exception.class,
- () -> getFileSystem().concat(target, new Path[]{zeroByteFile})));
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ getFileSystem().concat(target, new Path[]{zeroByteFile});
+ }
+ }));
}
@Test
@@ -90,7 +101,12 @@ public abstract class AbstractContractConcatTest extends AbstractFSContractTestB
byte[] block = dataset(TEST_FILE_LEN, 0, 255);
createFile(getFileSystem(), target, false, block);
handleExpectedException(intercept(Exception.class,
- () -> getFileSystem().concat(target, new Path[]{target})));
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ getFileSystem().concat(target, new Path[]{target});
+ }
+ }));
}
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractGetFileStatusTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractGetFileStatusTest.java
index cb706ede917..4529cc8b4e6 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractGetFileStatusTest.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractGetFileStatusTest.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.hadoop.test.LambdaTestUtils;
import org.junit.Test;
import static org.apache.hadoop.fs.contract.ContractTestUtils.*;
@@ -277,21 +278,36 @@ public abstract class AbstractContractGetFileStatusTest extends
public void testLocatedStatusNoDir() throws Throwable {
describe("test the LocatedStatus call on a path which is not present");
intercept(FileNotFoundException.class,
- () -> getFileSystem().listLocatedStatus(path("missing")));
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ getFileSystem().listLocatedStatus(path("missing"));
+ }
+ });
}
@Test
public void testListStatusNoDir() throws Throwable {
describe("test the listStatus(path) call on a path which is not present");
intercept(FileNotFoundException.class,
- () -> getFileSystem().listStatus(path("missing")));
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ getFileSystem().listStatus(path("missing"));
+ }
+ });
}
@Test
public void testListStatusFilteredNoDir() throws Throwable {
describe("test the listStatus(path, filter) call on a missing path");
intercept(FileNotFoundException.class,
- () -> getFileSystem().listStatus(path("missing"), ALL_PATHS));
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ getFileSystem().listStatus(path("missing"), ALL_PATHS);
+ }
+ });
}
@Test
diff --git a/hadoop-tools/hadoop-azure/pom.xml b/hadoop-tools/hadoop-azure/pom.xml
index 1e3b5909841..3871c777683 100644
--- a/hadoop-tools/hadoop-azure/pom.xml
+++ b/hadoop-tools/hadoop-azure/pom.xml
@@ -159,13 +159,13 @@
com.google.guava
guava
-
+
org.codehaus.jackson
jackson-mapper-asl
@@ -177,13 +177,13 @@
jackson-core-asl
compile
-
+
@@ -237,13 +237,13 @@
log4j
test
-
+
org.mockito
mockito-all
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java
index f1ece3647ab..4d1156389b8 100644
--- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java
@@ -973,8 +973,7 @@ public class AzureBlobFileSystemStore {
owner,
group,
null,
- path,
- hasAcl, false, false);
+ path);
this.version = version;
}
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsOutputStream.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsOutputStream.java
index 56fe0b11f56..5764bcb5517 100644
--- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsOutputStream.java
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsOutputStream.java
@@ -89,7 +89,7 @@ public class AbfsOutputStream extends OutputStream implements Syncable, StreamCa
maxConcurrentRequestCount,
10L,
TimeUnit.SECONDS,
- new LinkedBlockingQueue<>());
+ new LinkedBlockingQueue());
this.completionService = new ExecutorCompletionService<>(this.threadExecutor);
}
@@ -244,6 +244,11 @@ public class AbfsOutputStream extends OutputStream implements Syncable, StreamCa
}
}
+ @Override
+ public void sync() throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
private synchronized void flushInternal(boolean isClose) throws IOException {
maybeThrowLastError();
writeCurrentBufferToService();
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/SSLSocketFactoryEx.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/SSLSocketFactoryEx.java
index 00e7786fa4a..1fc81e207ab 100644
--- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/SSLSocketFactoryEx.java
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/SSLSocketFactoryEx.java
@@ -32,8 +32,9 @@ import javax.net.ssl.SSLSocketFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+/*
import org.wildfly.openssl.OpenSSLProvider;
-
+*/
/**
* Extension to use native OpenSSL library instead of JSSE for better
@@ -85,11 +86,11 @@ public final class SSLSocketFactoryEx extends SSLSocketFactory {
public static SSLSocketFactoryEx getDefaultFactory() {
return instance;
}
-
+/*
static {
OpenSSLProvider.register();
}
-
+*/
private SSLSocketFactoryEx(SSLChannelMode preferredChannelMode)
throws IOException {
try {
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsClient.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsClient.java
index bc05e7d64d2..eb34999f63b 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsClient.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsClient.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.fs.azurebfs;
import java.util.UUID;
+import org.apache.hadoop.test.LambdaTestUtils;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
@@ -61,7 +62,7 @@ public final class ITestAbfsClient extends AbstractAbfsIntegrationTest {
public void testUnknownHost() throws Exception {
// When hitting hostName not found exception, the retry will take about 14 mins until failed.
// This test is to verify that the "Unknown host name: %s. Retrying to resolve the host name..." is logged as warning during the retry.
- AbfsConfiguration conf = this.getConfiguration();
+ final AbfsConfiguration conf = this.getConfiguration();
String accountName = this.getAccountName();
String fakeAccountName = "fake" + UUID.randomUUID() + accountName.substring(accountName.indexOf("."));
@@ -71,6 +72,11 @@ public final class ITestAbfsClient extends AbstractAbfsIntegrationTest {
intercept(AbfsRestOperationException.class,
"UnknownHostException: " + fakeAccountName,
- () -> FileSystem.get(conf.getRawConfiguration()));
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ FileSystem.get(conf.getRawConfiguration());
+ }
+ });
}
}
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsIdentityTransformer.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsIdentityTransformer.java
index 424361b247e..2316d6b596f 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsIdentityTransformer.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAbfsIdentityTransformer.java
@@ -239,7 +239,7 @@ public class ITestAbfsIdentityTransformer extends AbstractAbfsScaleTest{
List aclEntriesToBeTransformed = Lists.newArrayList(
aclEntry(ACCESS, USER, DAEMON, ALL),
- aclEntry(ACCESS, USER, FULLY_QUALIFIED_NAME,ALL),
+ aclEntry(ACCESS, USER, FULLY_QUALIFIED_NAME, ALL),
aclEntry(DEFAULT, USER, SUPER_USER, ALL),
aclEntry(DEFAULT, USER, SERVICE_PRINCIPAL_ID, ALL),
aclEntry(DEFAULT, USER, SHORT_NAME, ALL),
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemAuthorization.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemAuthorization.java
index e579c14e2b1..af918e3bada 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemAuthorization.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemAuthorization.java
@@ -22,6 +22,7 @@ import java.util.Arrays;
import java.util.List;
import java.util.UUID;
+import org.apache.hadoop.test.LambdaTestUtils;
import org.junit.Test;
import org.apache.hadoop.fs.Path;
@@ -69,9 +70,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
public void testOpenFileWithInvalidPath() throws Exception {
final AzureBlobFileSystem fs = this.getFileSystem();
intercept(IllegalArgumentException.class,
- ()-> {
- fs.open(new Path("")).close();
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.open(new Path("")).close();
+ }
+ });
}
@Test
@@ -86,9 +90,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
final AzureBlobFileSystem fs = this.getFileSystem();
fs.create(TEST_WRITE_ONLY_FILE_PATH_0).close();
intercept(AbfsAuthorizationException.class,
- ()-> {
- fs.open(TEST_WRITE_ONLY_FILE_PATH_0).close();
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.open(TEST_WRITE_ONLY_FILE_PATH_0).close();
+ }
+ });
}
@Test
@@ -101,9 +108,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
public void testCreateFileUnauthorized() throws Exception {
final AzureBlobFileSystem fs = this.getFileSystem();
intercept(AbfsAuthorizationException.class,
- ()-> {
- fs.create(TEST_READ_ONLY_FILE_PATH_0).close();
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.create(TEST_READ_ONLY_FILE_PATH_0).close();
+ }
+ });
}
@Test
@@ -118,9 +128,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
final AzureBlobFileSystem fs = this.getFileSystem();
fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
intercept(AbfsAuthorizationException.class,
- ()-> {
- fs.append(TEST_WRITE_THEN_READ_ONLY_PATH).close();
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.append(TEST_WRITE_THEN_READ_ONLY_PATH).close();
+ }
+ });
}
@Test
@@ -133,9 +146,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
public void testRenameUnauthorized() throws Exception {
final AzureBlobFileSystem fs = this.getFileSystem();
intercept(AbfsAuthorizationException.class,
- ()-> {
- fs.rename(TEST_WRITE_ONLY_FILE_PATH_0, TEST_WRITE_ONLY_FILE_PATH_1);
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.rename(TEST_WRITE_ONLY_FILE_PATH_0, TEST_WRITE_ONLY_FILE_PATH_1);
+ }
+ });
}
@Test
@@ -150,9 +166,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
final AzureBlobFileSystem fs = this.getFileSystem();
fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
intercept(AbfsAuthorizationException.class,
- ()-> {
- fs.delete(TEST_WRITE_THEN_READ_ONLY_PATH, false);
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.delete(TEST_WRITE_THEN_READ_ONLY_PATH, false);
+ }
+ });
}
@Test
@@ -167,9 +186,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
final AzureBlobFileSystem fs = getFileSystem();
fs.create(TEST_WRITE_ONLY_FILE_PATH_0).close();
intercept(AbfsAuthorizationException.class,
- ()-> {
- fs.listStatus(TEST_WRITE_ONLY_FILE_PATH_0);
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.listStatus(TEST_WRITE_ONLY_FILE_PATH_0);
+ }
+ });
}
@Test
@@ -182,9 +204,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
public void testMkDirsUnauthorized() throws Exception {
final AzureBlobFileSystem fs = getFileSystem();
intercept(AbfsAuthorizationException.class,
- ()-> {
- fs.mkdirs(TEST_READ_ONLY_FOLDER_PATH, new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE));
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.mkdirs(TEST_READ_ONLY_FOLDER_PATH, new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE));
+ }
+ });
}
@Test
@@ -199,9 +224,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
final AzureBlobFileSystem fs = getFileSystem();
fs.create(TEST_WRITE_ONLY_FILE_PATH_0).close();
intercept(AbfsAuthorizationException.class,
- ()-> {
- fs.getFileStatus(TEST_WRITE_ONLY_FILE_PATH_0);
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.getFileStatus(TEST_WRITE_ONLY_FILE_PATH_0);
+ }
+ });
}
@Test
@@ -218,9 +246,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
intercept(AbfsAuthorizationException.class,
- ()-> {
- fs.setOwner(TEST_WRITE_THEN_READ_ONLY_PATH, TEST_USER, TEST_GROUP);
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.setOwner(TEST_WRITE_THEN_READ_ONLY_PATH, TEST_USER, TEST_GROUP);
+ }
+ });
}
@Test
@@ -237,9 +268,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
intercept(AbfsAuthorizationException.class,
- ()-> {
- fs.setPermission(TEST_WRITE_THEN_READ_ONLY_PATH, new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE));
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.setPermission(TEST_WRITE_THEN_READ_ONLY_PATH, new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE));
+ }
+ });
}
@Test
@@ -256,11 +290,14 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
final AzureBlobFileSystem fs = getFileSystem();
assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
- List aclSpec = Arrays.asList(aclEntry(ACCESS, GROUP, BAR, FsAction.ALL));
+ final List aclSpec = Arrays.asList(aclEntry(ACCESS, GROUP, BAR, FsAction.ALL));
intercept(AbfsAuthorizationException.class,
- ()-> {
- fs.modifyAclEntries(TEST_WRITE_THEN_READ_ONLY_PATH, aclSpec);
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.modifyAclEntries(TEST_WRITE_THEN_READ_ONLY_PATH, aclSpec);
+ }
+ });
}
@Test
@@ -277,11 +314,14 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
final AzureBlobFileSystem fs = getFileSystem();
assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
- List aclSpec = Arrays.asList(aclEntry(ACCESS, GROUP, BAR, FsAction.ALL));
+ final List aclSpec = Arrays.asList(aclEntry(ACCESS, GROUP, BAR, FsAction.ALL));
intercept(AbfsAuthorizationException.class,
- ()-> {
- fs.removeAclEntries(TEST_WRITE_THEN_READ_ONLY_PATH, aclSpec);
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.removeAclEntries(TEST_WRITE_THEN_READ_ONLY_PATH, aclSpec);
+ }
+ });
}
@Test
@@ -298,9 +338,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
intercept(AbfsAuthorizationException.class,
- ()-> {
- fs.removeDefaultAcl(TEST_WRITE_THEN_READ_ONLY_PATH);
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.removeDefaultAcl(TEST_WRITE_THEN_READ_ONLY_PATH);
+ }
+ });
}
@Test
@@ -317,9 +360,12 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
intercept(AbfsAuthorizationException.class,
- ()-> {
- fs.removeAcl(TEST_WRITE_THEN_READ_ONLY_PATH);
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.removeAcl(TEST_WRITE_THEN_READ_ONLY_PATH);
+ }
+ });
}
@Test
@@ -336,11 +382,14 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
final AzureBlobFileSystem fs = getFileSystem();
assumeTrue("This test case only runs when namespace is enabled", fs.getIsNamespaceEnabled());
fs.create(TEST_WRITE_THEN_READ_ONLY_PATH).close();
- List aclSpec = Arrays.asList(aclEntry(ACCESS, GROUP, BAR, FsAction.ALL));
+ final List aclSpec = Arrays.asList(aclEntry(ACCESS, GROUP, BAR, FsAction.ALL));
intercept(AbfsAuthorizationException.class,
- ()-> {
- fs.setAcl(TEST_WRITE_THEN_READ_ONLY_PATH, aclSpec);
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.setAcl(TEST_WRITE_THEN_READ_ONLY_PATH, aclSpec);
+ }
+ });
}
@Test
@@ -359,8 +408,11 @@ public class ITestAzureBlobFileSystemAuthorization extends AbstractAbfsIntegrati
fs.create(TEST_WRITE_ONLY_FILE_PATH_0).close();
List aclSpec = Arrays.asList(aclEntry(ACCESS, GROUP, BAR, FsAction.ALL));
intercept(AbfsAuthorizationException.class,
- ()-> {
- fs.getAclStatus(TEST_WRITE_ONLY_FILE_PATH_0);
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.getAclStatus(TEST_WRITE_ONLY_FILE_PATH_0);
+ }
+ });
}
}
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelete.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelete.java
index 486daca4f11..4cb1ea1e8ab 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelete.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelete.java
@@ -26,6 +26,7 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
+import org.apache.hadoop.test.LambdaTestUtils;
import org.junit.Test;
import org.apache.hadoop.fs.FileAlreadyExistsException;
@@ -69,12 +70,17 @@ public class ITestAzureBlobFileSystemDelete extends
@Test()
public void testOpenFileAfterDelete() throws Exception {
final AzureBlobFileSystem fs = getFileSystem();
- Path testfile = new Path("/testFile");
+ final Path testfile = new Path("/testFile");
touch(testfile);
assertDeleted(fs, testfile, false);
intercept(FileNotFoundException.class,
- () -> fs.open(testfile));
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.open(testfile);
+ }
+ });
}
@Test
@@ -122,10 +128,15 @@ public class ITestAzureBlobFileSystemDelete extends
}
es.shutdownNow();
- Path dir = new Path("/test");
+ final Path dir = new Path("/test");
// first try a non-recursive delete, expect failure
intercept(FileAlreadyExistsException.class,
- () -> fs.delete(dir, false));
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.delete(dir, false);
+ }
+ });
assertDeleted(fs, dir, true);
assertPathDoesNotExist(fs, "deleted", dir);
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemE2E.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemE2E.java
index ebc9c07e53e..94ed738377f 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemE2E.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemE2E.java
@@ -23,6 +23,7 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Random;
+import org.apache.hadoop.test.LambdaTestUtils;
import org.junit.Test;
import org.apache.hadoop.conf.Configuration;
@@ -174,12 +175,17 @@ public class ITestAzureBlobFileSystemE2E extends AbstractAbfsIntegrationTest {
final Path testFilePath = new Path(methodName.getMethodName());
testWriteOneByteToFile(testFilePath);
- FSDataInputStream inputStream = fs.open(testFilePath, TEST_DEFAULT_BUFFER_SIZE);
+ final FSDataInputStream inputStream = fs.open(testFilePath, TEST_DEFAULT_BUFFER_SIZE);
fs.delete(testFilePath, true);
assertFalse(fs.exists(testFilePath));
intercept(FileNotFoundException.class,
- () -> inputStream.read(new byte[1]));
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ inputStream.read(new byte[1]);
+ }
+ });
}
@Test
@@ -187,7 +193,7 @@ public class ITestAzureBlobFileSystemE2E extends AbstractAbfsIntegrationTest {
final AzureBlobFileSystem fs = getFileSystem();
final Path testFilePath = new Path(methodName.getMethodName());
- FSDataOutputStream stream = fs.create(testFilePath);
+ final FSDataOutputStream stream = fs.create(testFilePath);
assertTrue(fs.exists(testFilePath));
stream.write(TEST_BYTE);
@@ -196,7 +202,12 @@ public class ITestAzureBlobFileSystemE2E extends AbstractAbfsIntegrationTest {
// trigger append call
intercept(FileNotFoundException.class,
- () -> stream.close());
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ stream.close();
+ }
+ });
}
@Test
@@ -204,14 +215,19 @@ public class ITestAzureBlobFileSystemE2E extends AbstractAbfsIntegrationTest {
final AzureBlobFileSystem fs = getFileSystem();
final Path testFilePath = new Path(methodName.getMethodName());
- FSDataOutputStream stream = fs.create(testFilePath);
+ final FSDataOutputStream stream = fs.create(testFilePath);
assertTrue(fs.exists(testFilePath));
fs.delete(testFilePath, true);
assertFalse(fs.exists(testFilePath));
intercept(FileNotFoundException.class,
- () -> stream.close());
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ stream.close();
+ }
+ });
}
private void testWriteOneByteToFile(Path testFilePath) throws Exception {
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemListStatus.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemListStatus.java
index 25a15679263..a171d9176ed 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemListStatus.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemListStatus.java
@@ -27,6 +27,7 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
+import org.apache.hadoop.test.LambdaTestUtils;
import org.junit.Test;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -38,7 +39,7 @@ import org.apache.hadoop.fs.contract.ContractTestUtils;
import static org.apache.hadoop.fs.contract.ContractTestUtils.assertMkdirs;
import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile;
import static org.apache.hadoop.fs.contract.ContractTestUtils.assertPathExists;
-import static org.apache.hadoop.fs.contract.ContractTestUtils.rename;
+import static org.apache.hadoop.fs.contract.ContractTestUtils.assertRenameOutcome;
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
@@ -136,7 +137,7 @@ public class ITestAzureBlobFileSystemListStatus extends
assertEquals(1, fileStatuses.length);
assertEquals("sub", fileStatuses[0].getPath().getName());
assertIsDirectoryReference(fileStatuses[0]);
- Path childF = fs.makeQualified(new Path("/test/f"));
+ final Path childF = fs.makeQualified(new Path("/test/f"));
touch(childF);
fileStatuses = fs.listStatus(testDir);
assertEquals(2, fileStatuses.length);
@@ -154,7 +155,12 @@ public class ITestAzureBlobFileSystemListStatus extends
fs.delete(testDir, true);
intercept(FileNotFoundException.class,
- () -> fs.listFiles(childF, false).next());
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.listFiles(childF, false).next();
+ }
+ });
// do some final checks on the status (failing due to version checks)
assertEquals("Path mismatch of " + locatedChildStatus,
@@ -228,7 +234,7 @@ public class ITestAzureBlobFileSystemListStatus extends
createFile(fs, nonTrailingPeriodFile, false, new byte[0]);
try {
- rename(fs, nonTrailingPeriodFile, trailingPeriodFile);
+ assertRenameOutcome(fs, nonTrailingPeriodFile, trailingPeriodFile, true);
}
catch(IllegalArgumentException e) {
exceptionThrown = true;
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestGetNameSpaceEnabled.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestGetNameSpaceEnabled.java
index b9a23156696..12157568dd2 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestGetNameSpaceEnabled.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestGetNameSpaceEnabled.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.fs.azurebfs;
import java.io.IOException;
import java.util.UUID;
+import org.apache.hadoop.test.LambdaTestUtils;
import org.junit.Assume;
import org.junit.Test;
@@ -65,13 +66,16 @@ public class ITestGetNameSpaceEnabled extends AbstractAbfsIntegrationTest {
String testUri = this.getTestUrl();
String nonExistingFsUrl = getAbfsScheme() + "://" + UUID.randomUUID()
+ testUri.substring(testUri.indexOf("@"));
- AzureBlobFileSystem fs = this.getFileSystem(nonExistingFsUrl);
+ final AzureBlobFileSystem fs = this.getFileSystem(nonExistingFsUrl);
intercept(AbfsRestOperationException.class,
"\"The specified filesystem does not exist.\", 404",
- ()-> {
- fs.getIsNamespaceEnabled();
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.getIsNamespaceEnabled();
+ }
+ });
}
@Test
@@ -86,11 +90,14 @@ public class ITestGetNameSpaceEnabled extends AbstractAbfsIntegrationTest {
secret = (char) (secret.charAt(0) + 1) + secret.substring(1);
config.set(configkKey, secret);
- AzureBlobFileSystem fs = this.getFileSystem(config);
+ final AzureBlobFileSystem fs = this.getFileSystem(config);
intercept(AbfsRestOperationException.class,
"\"Server failed to authenticate the request. Make sure the value of Authorization header is formed correctly including the signature.\", 403",
- ()-> {
- fs.getIsNamespaceEnabled();
- });
+ new LambdaTestUtils.VoidCallable() {
+ @Override
+ public void call() throws Exception {
+ fs.getIsNamespaceEnabled();
+ }
+ });
}
}
\ No newline at end of file
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java
index eeed6cec872..a78602b7c7d 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.fs.azurebfs;
import java.io.IOException;
import java.lang.reflect.Field;
+import java.nio.charset.StandardCharsets;
-import org.apache.commons.codec.Charsets;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys;
import org.apache.hadoop.fs.azurebfs.constants.TestConfigurationKeys;
@@ -98,8 +98,8 @@ public class TestAbfsConfigurationFieldsValidation {
public TestAbfsConfigurationFieldsValidation() throws Exception {
super();
this.accountName = "testaccount1.blob.core.windows.net";
- this.encodedString = Base64.encode("base64Value".getBytes(Charsets.UTF_8));
- this.encodedAccountKey = Base64.encode("someAccountKey".getBytes(Charsets.UTF_8));
+ this.encodedString = Base64.encode("base64Value".getBytes(StandardCharsets.UTF_8));
+ this.encodedAccountKey = Base64.encode("someAccountKey".getBytes(StandardCharsets.UTF_8));
Configuration configuration = new Configuration();
configuration.addResource(TestConfigurationKeys.TEST_CONFIGURATION_FILE_NAME);
configuration.set(INT_KEY, "1234565");
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockAbfsAuthorizer.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockAbfsAuthorizer.java
index 6820edd4ea2..c8c060c5da2 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockAbfsAuthorizer.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockAbfsAuthorizer.java
@@ -21,8 +21,6 @@ package org.apache.hadoop.fs.azurebfs.extensions;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
@@ -46,6 +44,8 @@ public class MockAbfsAuthorizer implements AbfsAuthorizer {
private Set readOnlyPaths = new HashSet();
private Set writeOnlyPaths = new HashSet();
private Set readWritePaths = new HashSet();
+ private Set readPaths = new HashSet<>();
+ private Set writePaths = new HashSet<>();
private int writeThenReadOnly = 0;
public MockAbfsAuthorizer(Configuration conf) {
this.conf = conf;
@@ -61,6 +61,10 @@ public class MockAbfsAuthorizer implements AbfsAuthorizer {
writeOnlyPaths.add(new Path(TEST_WRITE_ONLY_FOLDER));
readWritePaths.add(new Path(TEST_READ_WRITE_FILE_0));
readWritePaths.add(new Path(TEST_READ_WRITE_FILE_1));
+ readPaths.addAll(readOnlyPaths);
+ readPaths.addAll(readWritePaths);
+ writePaths.addAll(writeOnlyPaths);
+ writePaths.addAll(readWritePaths);
}
@Override
@@ -70,12 +74,11 @@ public class MockAbfsAuthorizer implements AbfsAuthorizer {
paths.add(new Path(path.getName()));
}
- if (action.equals(FsAction.READ) && Stream.concat(readOnlyPaths.stream(), readWritePaths.stream()).collect(Collectors.toSet()).containsAll(paths)) {
+ if (action.equals(FsAction.READ) && readPaths.containsAll(paths)) {
return true;
} else if (action.equals(FsAction.READ) && paths.contains(new Path(TEST_WRITE_THEN_READ_ONLY)) && writeThenReadOnly == 1) {
return true;
- } else if (action.equals(FsAction.WRITE)
- && Stream.concat(writeOnlyPaths.stream(), readWritePaths.stream()).collect(Collectors.toSet()).containsAll(paths)) {
+ } else if (action.equals(FsAction.WRITE) && writePaths.containsAll(paths)) {
return true;
} else if (action.equals(FsAction.WRITE) && paths.contains(new Path(TEST_WRITE_THEN_READ_ONLY)) && writeThenReadOnly == 0) {
writeThenReadOnly = 1;