diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/HostRestrictingAuthorizationFilterHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/HostRestrictingAuthorizationFilterHandler.java index 798def0c716..8c5d538d233 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/HostRestrictingAuthorizationFilterHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/HostRestrictingAuthorizationFilterHandler.java @@ -197,7 +197,7 @@ public String getRemoteAddr() { @Override public String getQueryString() { try { - return (new URI(req.getUri()).getQuery()); + return (new URI(req.uri()).getQuery()); } catch (URISyntaxException e) { return null; } @@ -205,7 +205,7 @@ public String getQueryString() { @Override public String getRequestURI() { - String uri = req.getUri(); + String uri = req.uri(); // Netty's getUri includes the query string, while Servlet's does not return (uri.substring(0, uri.indexOf("?") >= 0 ? uri.indexOf("?") : uri.length())); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/SimpleHttpProxyHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/SimpleHttpProxyHandler.java index 9a2e0b71a44..51aeab024b8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/SimpleHttpProxyHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/SimpleHttpProxyHandler.java @@ -98,7 +98,7 @@ public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { @Override public void channelRead0 (final ChannelHandlerContext ctx, final HttpRequest req) { - uri = req.getUri(); + uri = req.uri(); final Channel client = ctx.channel(); Bootstrap proxiedServer = new Bootstrap() .group(client.eventLoop()) @@ -117,8 +117,7 @@ protected void initChannel(SocketChannel ch) throws Exception { public void operationComplete(ChannelFuture future) throws Exception { if (future.isSuccess()) { ctx.channel().pipeline().remove(HttpResponseEncoder.class); - HttpRequest newReq = new DefaultFullHttpRequest(HTTP_1_1, - req.getMethod(), req.getUri()); + HttpRequest newReq = new DefaultFullHttpRequest(HTTP_1_1, req.method(), req.uri()); newReq.headers().add(req.headers()); newReq.headers().set(CONNECTION, Values.CLOSE); future.channel().writeAndFlush(newReq); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/URLDispatcher.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/URLDispatcher.java index 8ec5bf6f64c..4545f733a8c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/URLDispatcher.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/URLDispatcher.java @@ -43,7 +43,7 @@ class URLDispatcher extends SimpleChannelInboundHandler { @Override protected void channelRead0(ChannelHandlerContext ctx, HttpRequest req) throws Exception { - String uri = req.getUri(); + String uri = req.uri(); ChannelPipeline p = ctx.pipeline(); if (uri.startsWith(WEBHDFS_PREFIX)) { WebHdfsHandler h = new WebHdfsHandler(conf, confForCreate); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java index f2ced88aaa3..75671628408 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java @@ -122,8 +122,8 @@ public WebHdfsHandler(Configuration conf, Configuration confForCreate) @Override public void channelRead0(final ChannelHandlerContext ctx, final HttpRequest req) throws Exception { - Preconditions.checkArgument(req.getUri().startsWith(WEBHDFS_PREFIX)); - QueryStringDecoder queryString = new QueryStringDecoder(req.getUri()); + Preconditions.checkArgument(req.uri().startsWith(WEBHDFS_PREFIX)); + QueryStringDecoder queryString = new QueryStringDecoder(req.uri()); params = new ParameterParser(queryString, conf); DataNodeUGIProvider ugiProvider = new DataNodeUGIProvider(params); ugi = ugiProvider.ugi(); @@ -144,7 +144,7 @@ public Void run() throws Exception { LOG.warn("Error retrieving hostname: ", e); host = "unknown"; } - REQLOG.info(host + " " + req.getMethod() + " " + req.getUri() + " " + + REQLOG.info(host + " " + req.method() + " " + req.uri() + " " + getResponseCode()); } return null; @@ -154,13 +154,13 @@ public Void run() throws Exception { int getResponseCode() { return (resp == null) ? INTERNAL_SERVER_ERROR.code() : - resp.getStatus().code(); + resp.status().code(); } public void handle(ChannelHandlerContext ctx, HttpRequest req) throws IOException, URISyntaxException { String op = params.op(); - HttpMethod method = req.getMethod(); + HttpMethod method = req.method(); if (PutOpParam.Op.CREATE.name().equalsIgnoreCase(op) && method == PUT) { onCreate(ctx); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java index f14ee5f930f..bfa35aaf1e8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java @@ -76,7 +76,7 @@ public void channelActive(ChannelHandlerContext ctx) throws Exception { @Override public void channelRead0(ChannelHandlerContext ctx, HttpRequest request) throws Exception { - if (request.getMethod() != HttpMethod.GET) { + if (request.method() != HttpMethod.GET) { DefaultHttpResponse resp = new DefaultHttpResponse(HTTP_1_1, METHOD_NOT_ALLOWED); resp.headers().set(CONNECTION, CLOSE); @@ -84,7 +84,7 @@ public void channelRead0(ChannelHandlerContext ctx, HttpRequest request) return; } - QueryStringDecoder decoder = new QueryStringDecoder(request.getUri()); + QueryStringDecoder decoder = new QueryStringDecoder(request.uri()); // check path. throw exception if path doesn't start with WEBHDFS_PREFIX String path = getPath(decoder); final String op = getOp(decoder); @@ -140,7 +140,7 @@ public void channelReadComplete(ChannelHandlerContext ctx) throws Exception { public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { Exception e = cause instanceof Exception ? (Exception) cause : new - Exception(cause); + Exception(cause); final String output = JsonUtil.toJsonString(e); ByteBuf content = Unpooled.wrappedBuffer(output.getBytes(Charsets.UTF_8)); final DefaultFullHttpResponse resp = new DefaultFullHttpResponse( diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestHostRestrictingAuthorizationFilterHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestHostRestrictingAuthorizationFilterHandler.java index 031ac0aa29d..a918a081e7f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestHostRestrictingAuthorizationFilterHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestHostRestrictingAuthorizationFilterHandler.java @@ -48,7 +48,7 @@ public class TestHostRestrictingAuthorizationFilterHandler { * Test running in with no ACL rules (restrict all) */ @Test - public void testRejectAll() throws Exception { + public void testRejectAll() { EmbeddedChannel channel = new CustomEmbeddedChannel("127.0.0.1", 1006, new HostRestrictingAuthorizationFilterHandler()); FullHttpRequest httpRequest = @@ -61,7 +61,7 @@ public void testRejectAll() throws Exception { DefaultHttpResponse channelResponse = (DefaultHttpResponse) channel.outboundMessages().poll(); assertNotNull("Expected response to exist.", channelResponse); - assertEquals(HttpResponseStatus.FORBIDDEN, channelResponse.getStatus()); + assertEquals(HttpResponseStatus.FORBIDDEN, channelResponse.status()); assertFalse(channel.isOpen()); } @@ -70,7 +70,7 @@ public void testRejectAll() throws Exception { * reused */ @Test - public void testMultipleAcceptedGETsOneChannel() throws Exception { + public void testMultipleAcceptedGETsOneChannel() { Configuration conf = new Configuration(); conf.set(CONFNAME, "*,*,/allowed"); HostRestrictingAuthorizationFilter filter = @@ -102,7 +102,7 @@ public void testMultipleAcceptedGETsOneChannel() throws Exception { * single filter instance */ @Test - public void testMultipleChannels() throws Exception { + public void testMultipleChannels() { Configuration conf = new Configuration(); conf.set(CONFNAME, "*,*,/allowed"); HostRestrictingAuthorizationFilter filter = @@ -140,7 +140,7 @@ public void testMultipleChannels() throws Exception { * Test accepting a GET request for the file checksum */ @Test - public void testAcceptGETFILECHECKSUM() throws Exception { + public void testAcceptGETFILECHECKSUM() { EmbeddedChannel channel = new CustomEmbeddedChannel("127.0.0.1", 1006, new HostRestrictingAuthorizationFilterHandler()); FullHttpRequest httpRequest = @@ -158,7 +158,7 @@ public void testAcceptGETFILECHECKSUM() throws Exception { */ protected static class CustomEmbeddedChannel extends EmbeddedChannel { - private InetSocketAddress socketAddress; + private final InetSocketAddress socketAddress; /* * A normal @{EmbeddedChannel} constructor which takes the remote client