diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewerPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewerPB.java index 0f2ac81d8d5..e4afa994614 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewerPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewerPB.java @@ -72,6 +72,7 @@ public class OfflineImageViewerPB { + " rather than a number of bytes. (false by default)\n" + " * Web: Run a viewer to expose read-only WebHDFS API.\n" + " -addr specifies the address to listen. (localhost:5978 by default)\n" + + " It does not support secure mode nor HTTPS.\n" + " * Delimited (experimental): Generate a text file with all of the elements common\n" + " to both inodes and inodes-under-construction, separated by a\n" + " delimiter. The default delimiter is \\t, though this may be\n" @@ -200,7 +201,7 @@ public class OfflineImageViewerPB { case "WEB": String addr = cmd.getOptionValue("addr", "localhost:5978"); try (WebImageViewer viewer = - new WebImageViewer(NetUtils.createSocketAddr(addr))) { + new WebImageViewer(NetUtils.createSocketAddr(addr), conf)) { viewer.start(inputFile); } break; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/WebImageViewer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/WebImageViewer.java index 087972f94cc..a50e828e4a1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/WebImageViewer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/WebImageViewer.java @@ -34,6 +34,9 @@ import io.netty.handler.codec.string.StringEncoder; import io.netty.util.concurrent.GlobalEventExecutor; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.security.UserGroupInformation; import java.io.Closeable; import java.io.IOException; @@ -53,8 +56,12 @@ public class WebImageViewer implements Closeable { private final EventLoopGroup bossGroup; private final EventLoopGroup workerGroup; private final ChannelGroup allChannels; + private final Configuration conf; public WebImageViewer(InetSocketAddress address) { + this(address, new Configuration()); + } + public WebImageViewer(InetSocketAddress address, Configuration conf) { this.address = address; this.bossGroup = new NioEventLoopGroup(); this.workerGroup = new NioEventLoopGroup(); @@ -62,15 +69,25 @@ public class WebImageViewer implements Closeable { this.bootstrap = new ServerBootstrap() .group(bossGroup, workerGroup) .channel(NioServerSocketChannel.class); + this.conf = conf; + UserGroupInformation.setConfiguration(conf); } /** * Start WebImageViewer and wait until the thread is interrupted. * @param fsimage the fsimage to load. * @throws IOException if failed to load the fsimage. + * @throws RuntimeException if security is enabled in configuration. */ public void start(String fsimage) throws IOException { try { + if (UserGroupInformation.isSecurityEnabled()) { + throw new RuntimeException( + "WebImageViewer does not support secure mode. To start in " + + "non-secure mode, pass -D" + + CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION + + "=simple"); + } initServer(fsimage); channel.closeFuture().await(); } catch (InterruptedException e) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HdfsImageViewer.md b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HdfsImageViewer.md index bd3a797bfa0..6b0c27c8a29 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HdfsImageViewer.md +++ b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HdfsImageViewer.md @@ -26,7 +26,8 @@ The Offline Image Viewer provides several output processors: 1. Web is the default output processor. It launches a HTTP server that exposes read-only WebHDFS API. Users can investigate the namespace - interactively by using HTTP REST API. + interactively by using HTTP REST API. It does not support secure mode, nor + HTTPS. 2. XML creates an XML document of the fsimage and includes all of the information within the fsimage. The diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java index b8078049be3..c84237cb836 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hdfs.tools.offlineImageViewer; import com.google.common.collect.ImmutableMap; + +import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION; import static org.apache.hadoop.fs.permission.AclEntryScope.ACCESS; import static org.apache.hadoop.fs.permission.AclEntryType.GROUP; import static org.apache.hadoop.fs.permission.AclEntryType.OTHER; @@ -100,8 +102,10 @@ import org.apache.hadoop.hdfs.server.namenode.NameNodeLayoutVersion; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.test.GenericTestUtils; +import org.apache.hadoop.test.LambdaTestUtils; import org.apache.log4j.Level; import org.junit.AfterClass; import org.junit.Assert; @@ -583,6 +587,22 @@ public class TestOfflineImageViewer { } } + @Test + public void testWebImageViewerSecureMode() throws Exception { + Configuration conf = new Configuration(); + conf.set(HADOOP_SECURITY_AUTHENTICATION, "kerberos"); + try (WebImageViewer viewer = + new WebImageViewer( + NetUtils.createSocketAddr("localhost:0"), conf)) { + RuntimeException ex = LambdaTestUtils.intercept(RuntimeException.class, + "WebImageViewer does not support secure mode.", + () -> viewer.start("foo")); + } finally { + conf.set(HADOOP_SECURITY_AUTHENTICATION, "simple"); + UserGroupInformation.setConfiguration(conf); + } + } + @Test public void testPBDelimitedWriter() throws IOException, InterruptedException { testPBDelimitedWriter(""); // Test in memory db.