From 7051bd78b17b2666c2fa0f61823920285a060a76 Mon Sep 17 00:00:00 2001 From: Steve Loughran Date: Wed, 3 Oct 2018 12:52:53 +0100 Subject: [PATCH] HADOOP-15795. Make HTTPS the default protocol for ABFS. Contributed by Da Zhou. --- .../hadoop/fs/azurebfs/AbfsConfiguration.java | 8 ++ .../fs/azurebfs/AzureBlobFileSystem.java | 4 +- .../fs/azurebfs/AzureBlobFileSystemStore.java | 12 +-- .../azurebfs/SecureAzureBlobFileSystem.java | 2 +- .../azurebfs/constants/ConfigurationKeys.java | 1 + .../constants/FileSystemConfigurations.java | 2 + .../ITestAzureBlobFileSystemFileStatus.java | 2 +- .../fs/azurebfs/ITestClientUrlScheme.java | 101 ++++++++++++++++++ .../fs/azurebfs/ITestOauthOverAbfsScheme.java | 63 +++++++++++ .../contract/AbfsFileSystemContract.java | 2 +- .../services/TestOauthFailOverHttp.java | 55 ---------- 11 files changed, 185 insertions(+), 67 deletions(-) create mode 100644 hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestClientUrlScheme.java create mode 100644 hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestOauthOverAbfsScheme.java delete mode 100644 hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestOauthFailOverHttp.java diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AbfsConfiguration.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AbfsConfiguration.java index c57c34097c6..58e12a84ab2 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AbfsConfiguration.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AbfsConfiguration.java @@ -162,6 +162,10 @@ public class AbfsConfiguration{ DefaultValue = "") private String abfsExternalAuthorizationClass; + @BooleanConfigurationValidatorAnnotation(ConfigurationKey = FS_AZURE_ALWAYS_USE_HTTPS, + DefaultValue = DEFAULT_ENABLE_HTTPS) + private boolean alwaysUseHttps; + private Map storageAccountKeys; public AbfsConfiguration(final Configuration rawConfig, String accountName) @@ -433,6 +437,10 @@ public AbfsDelegationTokenManager getDelegationTokenManager() throws IOException return new AbfsDelegationTokenManager(getRawConfiguration()); } + public boolean isHttpsAlwaysUsed() { + return this.alwaysUseHttps; + } + public AccessTokenProvider getTokenProvider() throws TokenAccessProviderException { AuthType authType = getEnum(FS_AZURE_ACCOUNT_AUTH_TYPE_PROPERTY_NAME, AuthType.SharedKey); if (authType == AuthType.OAuth) { diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java index d84ea5ba747..7d805421d94 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystem.java @@ -105,7 +105,7 @@ public void initialize(URI uri, Configuration configuration) this.uri = URI.create(uri.getScheme() + "://" + uri.getAuthority()); this.userGroupInformation = UserGroupInformation.getCurrentUser(); this.user = userGroupInformation.getUserName(); - this.abfsStore = new AzureBlobFileSystemStore(uri, this.isSecure(), configuration, userGroupInformation); + this.abfsStore = new AzureBlobFileSystemStore(uri, this.isSecureScheme(), configuration, userGroupInformation); final AbfsConfiguration abfsConfiguration = abfsStore.getAbfsConfiguration(); this.setWorkingDirectory(this.getHomeDirectory()); @@ -154,7 +154,7 @@ public String toString() { return sb.toString(); } - public boolean isSecure() { + public boolean isSecureScheme() { return false; } diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java index a735ce094ea..1ac1761352e 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/AzureBlobFileSystemStore.java @@ -115,7 +115,7 @@ public class AzureBlobFileSystemStore { private boolean isNamespaceEnabledSet; private boolean isNamespaceEnabled; - public AzureBlobFileSystemStore(URI uri, boolean isSecure, Configuration configuration, UserGroupInformation userGroupInformation) + public AzureBlobFileSystemStore(URI uri, boolean isSecureScheme, Configuration configuration, UserGroupInformation userGroupInformation) throws AzureBlobFileSystemException, IOException { this.uri = uri; @@ -142,13 +142,11 @@ public AzureBlobFileSystemStore(URI uri, boolean isSecure, Configuration configu this.azureAtomicRenameDirSet = new HashSet<>(Arrays.asList( abfsConfiguration.getAzureAtomicRenameDirs().split(AbfsHttpConstants.COMMA))); - if (AuthType.OAuth == abfsConfiguration.getEnum(FS_AZURE_ACCOUNT_AUTH_TYPE_PROPERTY_NAME, AuthType.SharedKey) - && !FileSystemUriSchemes.ABFS_SECURE_SCHEME.equals(uri.getScheme())) { - throw new IllegalArgumentException( - String.format("Incorrect URI %s, URI scheme must be abfss when authenticating using Oauth.", uri)); - } + boolean usingOauth = (AuthType.OAuth == abfsConfiguration.getEnum( + FS_AZURE_ACCOUNT_AUTH_TYPE_PROPERTY_NAME, AuthType.SharedKey)); - initializeClient(uri, fileSystemName, accountName, isSecure); + boolean useHttps = (usingOauth || abfsConfiguration.isHttpsAlwaysUsed()) ? true : isSecureScheme; + initializeClient(uri, fileSystemName, accountName, useHttps); } private String[] authorityParts(URI uri) throws InvalidUriAuthorityException, InvalidUriException { diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/SecureAzureBlobFileSystem.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/SecureAzureBlobFileSystem.java index 15fe5427252..bc9530c66d2 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/SecureAzureBlobFileSystem.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/SecureAzureBlobFileSystem.java @@ -28,7 +28,7 @@ @InterfaceStability.Evolving public class SecureAzureBlobFileSystem extends AzureBlobFileSystem { @Override - public boolean isSecure() { + public boolean isSecureScheme() { return true; } diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/ConfigurationKeys.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/ConfigurationKeys.java index 3e4ae21aeba..97217f7f47a 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/ConfigurationKeys.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/ConfigurationKeys.java @@ -48,6 +48,7 @@ public final class ConfigurationKeys { public static final String AZURE_CREATE_REMOTE_FILESYSTEM_DURING_INITIALIZATION = "fs.azure.createRemoteFileSystemDuringInitialization"; public static final String AZURE_SKIP_USER_GROUP_METADATA_DURING_INITIALIZATION = "fs.azure.skipUserGroupMetadataDuringInitialization"; public static final String FS_AZURE_ENABLE_AUTOTHROTTLING = "fs.azure.enable.autothrottling"; + public static final String FS_AZURE_ALWAYS_USE_HTTPS = "fs.azure.always.use.https"; public static final String FS_AZURE_ATOMIC_RENAME_KEY = "fs.azure.atomic.rename.key"; public static final String FS_AZURE_READ_AHEAD_QUEUE_DEPTH = "fs.azure.readaheadqueue.depth"; public static final String FS_AZURE_ENABLE_FLUSH = "fs.azure.enable.flush"; diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/FileSystemConfigurations.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/FileSystemConfigurations.java index a9412a961c0..4949d59170a 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/FileSystemConfigurations.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/constants/FileSystemConfigurations.java @@ -63,5 +63,7 @@ public final class FileSystemConfigurations { = SSLSocketFactoryEx.SSLChannelMode.Default; public static final boolean DEFAULT_ENABLE_DELEGATION_TOKEN = false; + public static final boolean DEFAULT_ENABLE_HTTPS = true; + private FileSystemConfigurations() {} } \ No newline at end of file diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemFileStatus.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemFileStatus.java index 02f938f19f4..707a1452d50 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemFileStatus.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemFileStatus.java @@ -70,7 +70,7 @@ private FileStatus validateStatus(final AzureBlobFileSystem fs, final Path name, String errorInStatus = "error in " + fileStatus + " from " + fs; // When running with Oauth, the owner and group info retrieved from server will be digit ids. - if (this.getAuthType() != AuthType.OAuth && !fs.isSecure()) { + if (this.getAuthType() != AuthType.OAuth && !fs.isSecureScheme()) { assertEquals(errorInStatus + ": owner", fs.getOwnerUser(), fileStatus.getOwner()); assertEquals(errorInStatus + ": group", diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestClientUrlScheme.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestClientUrlScheme.java new file mode 100644 index 00000000000..ad889838ff1 --- /dev/null +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestClientUrlScheme.java @@ -0,0 +1,101 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs.azurebfs; + +import java.lang.reflect.Field; +import java.net.URL; +import java.util.Arrays; + +import org.junit.Assert; +import org.junit.Assume; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.azurebfs.services.AbfsClient; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.fs.azurebfs.constants.FileSystemUriSchemes; +import org.apache.hadoop.fs.azurebfs.services.AuthType; + +import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_ALWAYS_USE_HTTPS; + +/** + * Parameterized test of ABFS CLIENT URL scheme verification. + */ + +@RunWith(Parameterized.class) +public class ITestClientUrlScheme extends AbstractAbfsIntegrationTest{ + + @Parameterized.Parameter + public boolean useSecureScheme; + + @Parameterized.Parameter(1) + public boolean alwaysUseHttps; + + @Parameterized.Parameters + public static Iterable params() { + return Arrays.asList( + new Object[][]{ + {false, false}, + {false, true}, + {true, true}, + {true, false} + }); + } + + public ITestClientUrlScheme() throws Exception { + super(); + // authentication like OAUTH must use HTTPS + Assume.assumeTrue("ITestClientUrlScheme is skipped because auth type is not SharedKey", + getAuthType() == AuthType.SharedKey); + } + + @Test + public void testClientUrlScheme() throws Exception { + String[] urlWithoutScheme = this.getTestUrl().split(":"); + String fsUrl; + // update filesystem scheme + if (useSecureScheme) { + fsUrl = FileSystemUriSchemes.ABFS_SECURE_SCHEME + ":" + urlWithoutScheme[1]; + } else { + fsUrl = FileSystemUriSchemes.ABFS_SCHEME + ":" + urlWithoutScheme[1]; + } + + Configuration config = getRawConfiguration(); + config.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsUrl.toString()); + config.setBoolean(FS_AZURE_ALWAYS_USE_HTTPS, alwaysUseHttps); + + AbfsClient client = this.getFileSystem(config).getAbfsClient(); + + Field baseUrlField = AbfsClient.class. + getDeclaredField("baseUrl"); + baseUrlField.setAccessible(true); + + String url = ((URL) baseUrlField.get(client)).toString(); + + // HTTP is enabled only when "abfs://XXX" is used and FS_AZURE_ALWAYS_USE_HTTPS + // is set as false, otherwise HTTPS should be used. + if (!useSecureScheme && !alwaysUseHttps) { + Assert.assertTrue(url.startsWith(FileSystemUriSchemes.HTTP_SCHEME)); + } else { + Assert.assertTrue(url.startsWith(FileSystemUriSchemes.HTTPS_SCHEME)); + } + } +} diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestOauthOverAbfsScheme.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestOauthOverAbfsScheme.java new file mode 100644 index 00000000000..2c80ce85f4e --- /dev/null +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestOauthOverAbfsScheme.java @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.fs.azurebfs; + +import java.lang.reflect.Field; +import java.net.URL; + +import org.junit.Assume; +import org.junit.Test; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.azurebfs.constants.FileSystemUriSchemes; +import org.apache.hadoop.fs.azurebfs.services.AbfsClient; +import org.apache.hadoop.fs.azurebfs.services.AuthType; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; + +/** + * Test Oauth fail fast when uri scheme is incorrect. + */ +public class ITestOauthOverAbfsScheme extends AbstractAbfsIntegrationTest { + + public ITestOauthOverAbfsScheme() throws Exception { + Assume.assumeTrue("ITestOauthOverAbfsScheme is skipped because auth type is not OAuth", + getAuthType() == AuthType.OAuth); + } + + @Test + public void testOauthOverSchemeAbfs() throws Exception { + String[] urlWithoutScheme = this.getTestUrl().split(":"); + String fsUrl; + // update filesystem scheme to use abfs:// + fsUrl = FileSystemUriSchemes.ABFS_SCHEME + ":" + urlWithoutScheme[1]; + + Configuration config = getRawConfiguration(); + config.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsUrl.toString()); + + AbfsClient client = this.getFileSystem(config).getAbfsClient(); + + Field baseUrlField = AbfsClient.class. + getDeclaredField("baseUrl"); + baseUrlField.setAccessible(true); + String url = ((URL) baseUrlField.get(client)).toString(); + + Assume.assumeTrue("OAuth authentication over scheme abfs must use HTTPS", + url.startsWith(FileSystemUriSchemes.HTTPS_SCHEME)); + + } +} diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/contract/AbfsFileSystemContract.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/contract/AbfsFileSystemContract.java index c0c5f91fabc..62bcca174ef 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/contract/AbfsFileSystemContract.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/contract/AbfsFileSystemContract.java @@ -56,7 +56,7 @@ public Path getTestPath() { public String toString() { final StringBuilder sb = new StringBuilder( "AbfsFileSystemContract{"); - sb.append("isSecure=").append(isSecure); + sb.append("isSecureScheme=").append(isSecure); sb.append(super.toString()); sb.append('}'); return sb.toString(); diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestOauthFailOverHttp.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestOauthFailOverHttp.java deleted file mode 100644 index de07c4b2b91..00000000000 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestOauthFailOverHttp.java +++ /dev/null @@ -1,55 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.fs.azurebfs.services; - -import java.net.URI; - -import org.junit.Test; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.azurebfs.constants.FileSystemUriSchemes; - -import static org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys.FS_AZURE_ACCOUNT_AUTH_TYPE_PROPERTY_NAME; -import static org.apache.hadoop.fs.azurebfs.constants.TestConfigurationKeys.FS_AZURE_ABFS_ACCOUNT_NAME; -import static org.apache.hadoop.test.LambdaTestUtils.intercept; - -/** - * Test Oauth fail fast when uri scheme is incorrect. - */ -public class TestOauthFailOverHttp { - - @Test - public void testOauthFailWithSchemeAbfs() throws Exception { - Configuration conf = new Configuration(); - final String account = "fakeaccount.dfs.core.windows.net"; - conf.set(FS_AZURE_ABFS_ACCOUNT_NAME, account); - conf.setEnum(FS_AZURE_ACCOUNT_AUTH_TYPE_PROPERTY_NAME, AuthType.OAuth); - URI defaultUri = new URI(FileSystemUriSchemes.ABFS_SCHEME, - "fakecontainer@" + account, - null, - null, - null); - conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, defaultUri.toString()); - // IllegalArgumentException is expected - // when authenticating using Oauth and scheme is not abfss - intercept(IllegalArgumentException.class, "Incorrect URI", - () -> FileSystem.get(conf)); - } -}