From 3f23d50c9ae627fc5326055a0a5aed4fd16c5040 Mon Sep 17 00:00:00 2001 From: Andrew Wang Date: Mon, 6 Mar 2017 15:04:13 -0800 Subject: [PATCH] HDFS-11498. Make RestCsrfPreventionHandler and WebHdfsHandler compatible with Netty 4.0. (cherry picked from commit 5e74196ede9bfc20eb6d6fe3aa6a0e5c47a40fdd) (cherry picked from commit 6e75c1e2f04be03410dd942df4dc3e94bf7b4b24) --- .../web/RestCsrfPreventionFilterHandler.java | 6 ++-- .../datanode/web/webhdfs/WebHdfsHandler.java | 33 +++++++++---------- hadoop-project/pom.xml | 2 +- 3 files changed, 20 insertions(+), 21 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/RestCsrfPreventionFilterHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/RestCsrfPreventionFilterHandler.java index f2f0533894d..4958bb59202 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/RestCsrfPreventionFilterHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/RestCsrfPreventionFilterHandler.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hdfs.server.datanode.web; -import static io.netty.handler.codec.http.HttpHeaderNames.CONNECTION; -import static io.netty.handler.codec.http.HttpHeaderValues.CLOSE; +import static io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION; +import static io.netty.handler.codec.http.HttpHeaders.Values.CLOSE; import static io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR; import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; @@ -119,7 +119,7 @@ final class RestCsrfPreventionFilterHandler @Override public String getMethod() { - return req.method().name(); + return req.getMethod().name(); } @Override diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java index 69cb89cc08e..d51b8bd894a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java @@ -29,17 +29,6 @@ import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.HttpRequest; import io.netty.handler.codec.http.QueryStringDecoder; import io.netty.handler.stream.ChunkedStream; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.net.InetSocketAddress; -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; -import java.security.PrivilegedExceptionAction; -import java.util.EnumSet; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -62,17 +51,27 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.LimitInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.InetSocketAddress; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.security.PrivilegedExceptionAction; +import java.util.EnumSet; + +import static io.netty.handler.codec.http.HttpHeaders.Names.ACCEPT; +import static io.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_HEADERS; import static io.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_METHODS; import static io.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_ORIGIN; +import static io.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_MAX_AGE; import static io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION; import static io.netty.handler.codec.http.HttpHeaders.Names.CONTENT_LENGTH; import static io.netty.handler.codec.http.HttpHeaders.Names.CONTENT_TYPE; import static io.netty.handler.codec.http.HttpHeaders.Names.LOCATION; import static io.netty.handler.codec.http.HttpHeaders.Values.CLOSE; -import static io.netty.handler.codec.http.HttpHeaderNames.ACCEPT; -import static io.netty.handler.codec.http.HttpHeaderNames.ACCESS_CONTROL_ALLOW_HEADERS; -import static io.netty.handler.codec.http.HttpHeaderNames.ACCESS_CONTROL_MAX_AGE; -import static io.netty.handler.codec.http.HttpHeaderValues.KEEP_ALIVE; +import static io.netty.handler.codec.http.HttpHeaders.Values.KEEP_ALIVE; import static io.netty.handler.codec.http.HttpMethod.GET; import static io.netty.handler.codec.http.HttpMethod.OPTIONS; import static io.netty.handler.codec.http.HttpMethod.POST; @@ -141,7 +140,7 @@ public class WebHdfsHandler extends SimpleChannelInboundHandler { LOG.warn("Error retrieving hostname: ", e); host = "unknown"; } - REQLOG.info(host + " " + req.method() + " " + req.uri() + " " + + REQLOG.info(host + " " + req.getMethod() + " " + req.getUri() + " " + getResponseCode()); } return null; @@ -151,7 +150,7 @@ public class WebHdfsHandler extends SimpleChannelInboundHandler { int getResponseCode() { return (resp == null) ? INTERNAL_SERVER_ERROR.code() : - resp.status().code(); + resp.getStatus().code(); } public void handle(ChannelHandlerContext ctx, HttpRequest req) diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index f9e44df3b58..e5a31eadd21 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -612,7 +612,7 @@ io.netty netty-all - 4.1.0.Beta5 + 4.0.23.Final