diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/HostRestrictingAuthorizationFilterHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/HostRestrictingAuthorizationFilterHandler.java index 8c5d538d233..c76d9346919 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/HostRestrictingAuthorizationFilterHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/HostRestrictingAuthorizationFilterHandler.java @@ -43,8 +43,8 @@ import java.net.URISyntaxException; import java.util.List; import java.util.Map; -import static io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION; -import static io.netty.handler.codec.http.HttpHeaders.Values.CLOSE; +import static io.netty.handler.codec.http.HttpHeaderNames.CONNECTION; +import static io.netty.handler.codec.http.HttpHeaderValues.CLOSE; import static io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR; import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/RestCsrfPreventionFilterHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/RestCsrfPreventionFilterHandler.java index a2c2d370439..4e3c2d1ae7b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/RestCsrfPreventionFilterHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/RestCsrfPreventionFilterHandler.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hdfs.server.datanode.web; -import static io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION; -import static io.netty.handler.codec.http.HttpHeaders.Values.CLOSE; +import static io.netty.handler.codec.http.HttpHeaderNames.CONNECTION; +import static io.netty.handler.codec.http.HttpHeaderValues.CLOSE; import static io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR; import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_REST_CSRF_ENABLED_DEFAULT; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/SimpleHttpProxyHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/SimpleHttpProxyHandler.java index 51aeab024b8..ef8d90a4c4e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/SimpleHttpProxyHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/SimpleHttpProxyHandler.java @@ -34,12 +34,12 @@ import io.netty.handler.codec.http.DefaultHttpResponse; import io.netty.handler.codec.http.HttpRequest; import io.netty.handler.codec.http.HttpRequestEncoder; import io.netty.handler.codec.http.HttpResponseEncoder; +import io.netty.handler.codec.http.HttpHeaderValues; import org.slf4j.Logger; import java.net.InetSocketAddress; -import static io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION; -import static io.netty.handler.codec.http.HttpHeaders.Values; +import static io.netty.handler.codec.http.HttpHeaderNames.CONNECTION; import static io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR; import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; @@ -119,12 +119,12 @@ class SimpleHttpProxyHandler extends SimpleChannelInboundHandler { ctx.channel().pipeline().remove(HttpResponseEncoder.class); HttpRequest newReq = new DefaultFullHttpRequest(HTTP_1_1, req.method(), req.uri()); newReq.headers().add(req.headers()); - newReq.headers().set(CONNECTION, Values.CLOSE); + newReq.headers().set(CONNECTION, HttpHeaderValues.CLOSE); future.channel().writeAndFlush(newReq); } else { DefaultHttpResponse resp = new DefaultHttpResponse(HTTP_1_1, INTERNAL_SERVER_ERROR); - resp.headers().set(CONNECTION, Values.CLOSE); + resp.headers().set(CONNECTION, HttpHeaderValues.CLOSE); LOG.info("Proxy " + uri + " failed. Cause: ", future.cause()); ctx.writeAndFlush(resp).addListener(ChannelFutureListener.CLOSE); client.close(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ExceptionHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ExceptionHandler.java index 02ec25c13c8..6fe0851bb3f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ExceptionHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ExceptionHandler.java @@ -33,8 +33,8 @@ import org.apache.hadoop.security.token.SecretManager; import java.io.FileNotFoundException; import java.io.IOException; -import static io.netty.handler.codec.http.HttpHeaders.Names.CONTENT_LENGTH; -import static io.netty.handler.codec.http.HttpHeaders.Names.CONTENT_TYPE; +import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH; +import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_TYPE; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; import static io.netty.handler.codec.http.HttpResponseStatus.FORBIDDEN; import static io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/HdfsWriter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/HdfsWriter.java index 3d928ff3eed..f01479a9b92 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/HdfsWriter.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/HdfsWriter.java @@ -30,8 +30,8 @@ import org.apache.hadoop.io.IOUtils; import java.io.IOException; import java.io.OutputStream; -import static io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION; -import static io.netty.handler.codec.http.HttpHeaders.Values.CLOSE; +import static io.netty.handler.codec.http.HttpHeaderNames.CONNECTION; +import static io.netty.handler.codec.http.HttpHeaderValues.CLOSE; class HdfsWriter extends SimpleChannelInboundHandler { private final DFSClient client; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java index 75671628408..8de736ac0e2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java @@ -62,17 +62,17 @@ import java.nio.charset.StandardCharsets; import java.security.PrivilegedExceptionAction; import java.util.EnumSet; -import static io.netty.handler.codec.http.HttpHeaders.Names.ACCEPT; -import static io.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_HEADERS; -import static io.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_METHODS; -import static io.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_ALLOW_ORIGIN; -import static io.netty.handler.codec.http.HttpHeaders.Names.ACCESS_CONTROL_MAX_AGE; -import static io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION; -import static io.netty.handler.codec.http.HttpHeaders.Names.CONTENT_LENGTH; -import static io.netty.handler.codec.http.HttpHeaders.Names.CONTENT_TYPE; -import static io.netty.handler.codec.http.HttpHeaders.Names.LOCATION; -import static io.netty.handler.codec.http.HttpHeaders.Values.CLOSE; -import static io.netty.handler.codec.http.HttpHeaders.Values.KEEP_ALIVE; +import static io.netty.handler.codec.http.HttpHeaderNames.ACCEPT; +import static io.netty.handler.codec.http.HttpHeaderNames.ACCESS_CONTROL_ALLOW_HEADERS; +import static io.netty.handler.codec.http.HttpHeaderNames.ACCESS_CONTROL_ALLOW_METHODS; +import static io.netty.handler.codec.http.HttpHeaderNames.ACCESS_CONTROL_ALLOW_ORIGIN; +import static io.netty.handler.codec.http.HttpHeaderNames.ACCESS_CONTROL_MAX_AGE; +import static io.netty.handler.codec.http.HttpHeaderNames.CONNECTION; +import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH; +import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_TYPE; +import static io.netty.handler.codec.http.HttpHeaderNames.LOCATION; +import static io.netty.handler.codec.http.HttpHeaderValues.CLOSE; +import static io.netty.handler.codec.http.HttpHeaderValues.KEEP_ALIVE; import static io.netty.handler.codec.http.HttpMethod.GET; import static io.netty.handler.codec.http.HttpMethod.OPTIONS; import static io.netty.handler.codec.http.HttpMethod.POST; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java index bfa35aaf1e8..963406771b1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java @@ -40,10 +40,10 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import static io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION; -import static io.netty.handler.codec.http.HttpHeaders.Names.CONTENT_LENGTH; -import static io.netty.handler.codec.http.HttpHeaders.Names.CONTENT_TYPE; -import static io.netty.handler.codec.http.HttpHeaders.Values.CLOSE; +import static io.netty.handler.codec.http.HttpHeaderNames.CONNECTION; +import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH; +import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_TYPE; +import static io.netty.handler.codec.http.HttpHeaderValues.CLOSE; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; import static io.netty.handler.codec.http.HttpResponseStatus.FORBIDDEN; import static io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestDatanodeHttpXFrame.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestDatanodeHttpXFrame.java index 62827a2e5d3..b6ad5a9a5d6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestDatanodeHttpXFrame.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestDatanodeHttpXFrame.java @@ -57,8 +57,7 @@ public class TestDatanodeHttpXFrame { cluster = createCluster(xFrameEnabled, null); HttpURLConnection conn = getConn(cluster); String xfoHeader = conn.getHeaderField("X-FRAME-OPTIONS"); - Assert.assertTrue("X-FRAME-OPTIONS is absent in the header", - xfoHeader != null); + Assert.assertNotNull("X-FRAME-OPTIONS is absent in the header", xfoHeader); Assert.assertTrue(xfoHeader.endsWith(HttpServer2.XFrameOption .SAMEORIGIN.toString())); } @@ -69,7 +68,7 @@ public class TestDatanodeHttpXFrame { cluster = createCluster(xFrameEnabled, null); HttpURLConnection conn = getConn(cluster); String xfoHeader = conn.getHeaderField("X-FRAME-OPTIONS"); - Assert.assertTrue("unexpected X-FRAME-OPTION in header", xfoHeader == null); + Assert.assertNull("unexpected X-FRAME-OPTION in header", xfoHeader); } @Test