diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index 202cde4f162..d0b10cbbcf8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -152,7 +152,6 @@ public class WebHdfsFileSystem extends FileSystem + "/v" + VERSION; public static final String EZ_HEADER = "X-Hadoop-Accept-EZ"; public static final String FEFINFO_HEADER = "X-Hadoop-feInfo"; - public static final String DFS_HTTP_POLICY_KEY = "dfs.http.policy"; /** * Default connection factory may be overridden in tests to use smaller @@ -182,7 +181,6 @@ public class WebHdfsFileSystem extends FileSystem private DFSOpsCountStatistics storageStatistics; private KeyProvider testProvider; - private boolean isTLSKrb; /** * Return the protocol scheme for the FileSystem. @@ -244,7 +242,6 @@ public class WebHdfsFileSystem extends FileSystem .newDefaultURLConnectionFactory(connectTimeout, readTimeout, conf); } - this.isTLSKrb = "HTTPS_ONLY".equals(conf.get(DFS_HTTP_POLICY_KEY)); ugi = UserGroupInformation.getCurrentUser(); this.uri = URI.create(uri.getScheme() + "://" + uri.getAuthority()); @@ -702,11 +699,6 @@ public class WebHdfsFileSystem extends FileSystem //redirect hostname and port redirectHost = null; - if (url.getProtocol().equals(getTransportScheme()) && - UserGroupInformation.isSecurityEnabled() && - isTLSKrb) { - throw new IOException("Access denied: dfs.http.policy is HTTPS_ONLY."); - } // resolve redirects for a DN operation unless already resolved if (op.getRedirect() && !redirected) {