HBASE-25770 Http InfoServers should honor gzip encoding when requested (#3159)

Signed-off-by: Duo Zhang <zhangduo@apache.org>
Signed-off-by: Josh Elser <elserj@apache.org>
This commit is contained in:
Nick Dimiduk 2021-04-15 09:07:13 -07:00 committed by Nick Dimiduk
parent 819062bac3
commit 32207cd478
4 changed files with 95 additions and 5 deletions

View File

@ -220,6 +220,11 @@
<artifactId>junit</artifactId> <artifactId>junit</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-library</artifactId>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>org.mockito</groupId> <groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId> <artifactId>mockito-core</artifactId>

View File

@ -66,7 +66,6 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability; import org.apache.yetus.audience.InterfaceStability;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists; import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
import org.apache.hbase.thirdparty.org.eclipse.jetty.http.HttpVersion; import org.apache.hbase.thirdparty.org.eclipse.jetty.http.HttpVersion;
@ -81,6 +80,7 @@ import org.apache.hbase.thirdparty.org.eclipse.jetty.server.SslConnectionFactory
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.ContextHandlerCollection; import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.ContextHandlerCollection;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.HandlerCollection; import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.HandlerCollection;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.RequestLogHandler; import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.RequestLogHandler;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.handler.gzip.GzipHandler;
import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.DefaultServlet; import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.DefaultServlet;
import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.FilterHolder; import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.FilterHolder;
import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.FilterMapping; import org.apache.hbase.thirdparty.org.eclipse.jetty.servlet.FilterMapping;
@ -575,6 +575,7 @@ public class HttpServer implements FilterContainer {
this.findPort = b.findPort; this.findPort = b.findPort;
this.authenticationEnabled = b.securityEnabled; this.authenticationEnabled = b.securityEnabled;
initializeWebServer(b.name, b.hostName, b.conf, b.pathSpecs, b); initializeWebServer(b.name, b.hostName, b.conf, b.pathSpecs, b);
this.webServer.setHandler(buildGzipHandler(this.webServer.getHandler()));
} }
private void initializeWebServer(String name, String hostName, private void initializeWebServer(String name, String hostName,
@ -662,6 +663,23 @@ public class HttpServer implements FilterContainer {
return ctx; return ctx;
} }
/**
* Construct and configure an instance of {@link GzipHandler}. With complex
* multi-{@link WebAppContext} configurations, it's easiest to apply this handler directly to the
* instance of {@link Server} near the end of its configuration, something like
* <pre>
* Server server = new Server();
* //...
* server.setHandler(buildGzipHandler(server.getHandler()));
* server.start();
* </pre>
*/
public static GzipHandler buildGzipHandler(final Handler wrapped) {
final GzipHandler gzipHandler = new GzipHandler();
gzipHandler.setHandler(wrapped);
return gzipHandler;
}
private static void addNoCacheFilter(WebAppContext ctxt) { private static void addNoCacheFilter(WebAppContext ctxt) {
defineFilter(ctxt, NO_CACHE_FILTER, NoCacheFilter.class.getName(), defineFilter(ctxt, NO_CACHE_FILTER, NoCacheFilter.class.getName(),
Collections.<String, String> emptyMap(), new String[] { "/*" }); Collections.<String, String> emptyMap(), new String[] { "/*" });

View File

@ -1,4 +1,4 @@
/** /*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
@ -17,11 +17,16 @@
*/ */
package org.apache.hadoop.hbase.http; package org.apache.hadoop.hbase.http;
import static org.hamcrest.Matchers.greaterThan;
import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter; import java.io.PrintWriter;
import java.net.HttpURLConnection; import java.net.HttpURLConnection;
import java.net.URI; import java.net.URI;
import java.net.URL; import java.net.URL;
import java.nio.CharBuffer;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.Enumeration; import java.util.Enumeration;
@ -56,6 +61,13 @@ import org.apache.hadoop.security.Groups;
import org.apache.hadoop.security.ShellBasedUnixGroupsMapping; import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHeaders;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.hamcrest.MatcherAssert;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Assert; import org.junit.Assert;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -66,7 +78,6 @@ import org.junit.experimental.categories.Category;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.ServerConnector; import org.apache.hbase.thirdparty.org.eclipse.jetty.server.ServerConnector;
import org.apache.hbase.thirdparty.org.eclipse.jetty.util.ajax.JSON; import org.apache.hbase.thirdparty.org.eclipse.jetty.util.ajax.JSON;
@ -269,6 +280,60 @@ public class TestHttpServer extends HttpServerFunctionalTest {
// assertEquals("text/html; charset=utf-8", conn.getContentType()); // assertEquals("text/html; charset=utf-8", conn.getContentType());
} }
@Test
public void testNegotiatesEncodingGzip() throws IOException {
final InputStream stream = ClassLoader.getSystemResourceAsStream("webapps/static/test.css");
assertNotNull(stream);
final String sourceContent = readFully(stream);
try (final CloseableHttpClient client = HttpClients.createMinimal()) {
final HttpGet request = new HttpGet(new URL(baseUrl, "/static/test.css").toString());
request.setHeader(HttpHeaders.ACCEPT_ENCODING, null);
final long unencodedContentLength;
try (final CloseableHttpResponse response = client.execute(request)) {
final HttpEntity entity = response.getEntity();
assertNotNull(entity);
assertNull(entity.getContentEncoding());
unencodedContentLength = entity.getContentLength();
MatcherAssert.assertThat(unencodedContentLength, greaterThan(0L));
final String unencodedEntityBody = readFully(entity.getContent());
assertEquals(sourceContent, unencodedEntityBody);
}
request.setHeader(HttpHeaders.ACCEPT_ENCODING, "gzip");
final long encodedContentLength;
try (final CloseableHttpResponse response = client.execute(request)) {
final HttpEntity entity = response.getEntity();
assertNotNull(entity);
assertNotNull(entity.getContentEncoding());
assertEquals("gzip", entity.getContentEncoding().getValue());
encodedContentLength = entity.getContentLength();
MatcherAssert.assertThat(encodedContentLength, greaterThan(0L));
final String encodedEntityBody = readFully(entity.getContent());
// the encoding/decoding process, as implemented in this specific combination of dependency
// versions, does not perfectly preserve trailing whitespace. thus, `trim()`.
assertEquals(sourceContent.trim(), encodedEntityBody.trim());
}
MatcherAssert.assertThat(unencodedContentLength, greaterThan(encodedContentLength));
}
}
private static String readFully(final InputStream input) throws IOException {
// TODO: when the time comes, delete me and replace with a JDK11 IO helper API.
try (final BufferedReader reader = new BufferedReader(new InputStreamReader(input))) {
final StringBuilder sb = new StringBuilder();
final CharBuffer buffer = CharBuffer.allocate(1024 * 2);
while (reader.read(buffer) > 0) {
sb.append(buffer);
buffer.clear();
}
return sb.toString();
} finally {
input.close();
}
}
/** /**
* Dummy filter that mimics as an authentication filter. Obtains user identity * Dummy filter that mimics as an authentication filter. Obtains user identity
* from the request parameter user.name. Wraps around the request so that * from the request parameter user.name. Wraps around the request so that

View File

@ -89,10 +89,10 @@ import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableState; import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.exceptions.MasterStoppedException; import org.apache.hadoop.hbase.exceptions.MasterStoppedException;
import org.apache.hadoop.hbase.executor.ExecutorService.ExecutorConfig;
import org.apache.hadoop.hbase.executor.ExecutorType; import org.apache.hadoop.hbase.executor.ExecutorType;
import org.apache.hadoop.hbase.favored.FavoredNodesManager; import org.apache.hadoop.hbase.favored.FavoredNodesManager;
import org.apache.hadoop.hbase.favored.FavoredNodesPromoter; import org.apache.hadoop.hbase.favored.FavoredNodesPromoter;
import org.apache.hadoop.hbase.http.HttpServer;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException; import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
@ -543,7 +543,8 @@ public class HMaster extends HRegionServer implements MasterServices {
if (infoPort < 0 || infoServer == null) { if (infoPort < 0 || infoServer == null) {
return -1; return -1;
} }
if(infoPort == infoServer.getPort()) { if (infoPort == infoServer.getPort()) {
// server is already running
return infoPort; return infoPort;
} }
final String addr = conf.get("hbase.master.info.bindAddress", "0.0.0.0"); final String addr = conf.get("hbase.master.info.bindAddress", "0.0.0.0");
@ -565,6 +566,7 @@ public class HMaster extends HRegionServer implements MasterServices {
connector.setPort(infoPort); connector.setPort(infoPort);
masterJettyServer.addConnector(connector); masterJettyServer.addConnector(connector);
masterJettyServer.setStopAtShutdown(true); masterJettyServer.setStopAtShutdown(true);
masterJettyServer.setHandler(HttpServer.buildGzipHandler(masterJettyServer.getHandler()));
final String redirectHostname = final String redirectHostname =
StringUtils.isBlank(useThisHostnameInstead) ? null : useThisHostnameInstead; StringUtils.isBlank(useThisHostnameInstead) ? null : useThisHostnameInstead;