HBASE-17566 Jetty upgrade fixes
Fix issues caused by HBASE-12894 Upgrade Jetty to 9.2.6 commit. - removed Jetty 6.x dependencies (org.mortbay.jetty.*) - corrected @Ignore-d unit tests Signed-off-by: Michael Stack <stack@apache.org>
This commit is contained in:
parent
ffe7dac53d
commit
e68ab09d5e
|
@ -22,11 +22,12 @@ package org.apache.hadoop.hbase.rest.client;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
import org.apache.http.Header;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
import org.mortbay.log.Log;
|
||||
import org.apache.http.Header;
|
||||
import org.apache.http.HttpResponse;
|
||||
|
||||
/**
|
||||
* The HTTP result code, response headers, and body of a HTTP response.
|
||||
|
@ -34,6 +35,8 @@ import org.mortbay.log.Log;
|
|||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Stable
|
||||
public class Response {
|
||||
private static final Log LOG = LogFactory.getLog(Response.class);
|
||||
|
||||
private int code;
|
||||
private Header[] headers;
|
||||
private byte[] body;
|
||||
|
@ -139,7 +142,7 @@ public class Response {
|
|||
try {
|
||||
body = Client.getResponseBody(resp);
|
||||
} catch (IOException ioe) {
|
||||
Log.debug("encountered ioe when obtaining body", ioe);
|
||||
LOG.debug("encountered ioe when obtaining body", ioe);
|
||||
}
|
||||
}
|
||||
return body;
|
||||
|
|
|
@ -25,7 +25,7 @@ import javax.servlet.http.HttpServletResponse;
|
|||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
import org.mortbay.jetty.servlet.DefaultServlet;
|
||||
import org.eclipse.jetty.servlet.DefaultServlet;
|
||||
|
||||
/**
|
||||
* General servlet which is admin-authorized.
|
||||
|
|
|
@ -534,22 +534,23 @@ public class HttpServer implements FilterContainer {
|
|||
|
||||
Preconditions.checkNotNull(webAppContext);
|
||||
|
||||
HandlerCollection handlerCollection = new HandlerCollection();
|
||||
|
||||
ContextHandlerCollection contexts = new ContextHandlerCollection();
|
||||
RequestLog requestLog = HttpRequestLog.getRequestLog(name);
|
||||
|
||||
if (requestLog != null) {
|
||||
RequestLogHandler requestLogHandler = new RequestLogHandler();
|
||||
requestLogHandler.setRequestLog(requestLog);
|
||||
HandlerCollection handlers = new HandlerCollection();
|
||||
handlers.setHandlers(new Handler[] { requestLogHandler, contexts });
|
||||
webServer.setHandler(handlers);
|
||||
} else {
|
||||
webServer.setHandler(contexts);
|
||||
handlerCollection.addHandler(requestLogHandler);
|
||||
}
|
||||
|
||||
final String appDir = getWebAppsPath(name);
|
||||
|
||||
webServer.setHandler(webAppContext);
|
||||
handlerCollection.addHandler(contexts);
|
||||
handlerCollection.addHandler(webAppContext);
|
||||
|
||||
webServer.setHandler(handlerCollection);
|
||||
|
||||
addDefaultApps(contexts, appDir, conf);
|
||||
|
||||
|
@ -629,14 +630,13 @@ public class HttpServer implements FilterContainer {
|
|||
logDir = System.getProperty("hadoop.log.dir");
|
||||
}
|
||||
if (logDir != null) {
|
||||
ServletContextHandler logContext = new ServletContextHandler(parent, "/*");
|
||||
ServletContextHandler logContext = new ServletContextHandler(parent, "/logs");
|
||||
logContext.addServlet(AdminAuthorizedServlet.class, "/*");
|
||||
logContext.setResourceBase(logDir);
|
||||
|
||||
if (conf.getBoolean(
|
||||
ServerConfigurationKeys.HBASE_JETTY_LOGS_SERVE_ALIASES,
|
||||
ServerConfigurationKeys.DEFAULT_HBASE_JETTY_LOGS_SERVE_ALIASES)) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, String> params = logContext.getInitParams();
|
||||
params.put(
|
||||
"org.mortbay.jetty.servlet.Default.aliases", "true");
|
||||
|
@ -1260,7 +1260,6 @@ public class HttpServer implements FilterContainer {
|
|||
/**
|
||||
* Return the set of parameter names, quoting each name.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Enumeration<String> getParameterNames() {
|
||||
return new Enumeration<String>() {
|
||||
|
@ -1301,7 +1300,6 @@ public class HttpServer implements FilterContainer {
|
|||
return result;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Map<String, String[]> getParameterMap() {
|
||||
Map<String, String[]> result = new HashMap<String,String[]>();
|
||||
|
|
|
@ -1,58 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.http;
|
||||
|
||||
import org.mortbay.jetty.security.SslSocketConnector;
|
||||
|
||||
import javax.net.ssl.SSLServerSocket;
|
||||
import java.io.IOException;
|
||||
import java.net.ServerSocket;
|
||||
import java.util.ArrayList;
|
||||
|
||||
/**
|
||||
* This subclass of the Jetty SslSocketConnector exists solely to control
|
||||
* the TLS protocol versions allowed. This is fallout from the POODLE
|
||||
* vulnerability (CVE-2014-3566), which requires that SSLv3 be disabled.
|
||||
* Only TLS 1.0 and later protocols are allowed.
|
||||
*/
|
||||
public class SslSocketConnectorSecure extends SslSocketConnector {
|
||||
|
||||
public SslSocketConnectorSecure() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new ServerSocket that will not accept SSLv3 connections,
|
||||
* but will accept TLSv1.x connections.
|
||||
*/
|
||||
protected ServerSocket newServerSocket(String host, int port,int backlog)
|
||||
throws IOException {
|
||||
SSLServerSocket socket = (SSLServerSocket)
|
||||
super.newServerSocket(host, port, backlog);
|
||||
ArrayList<String> nonSSLProtocols = new ArrayList<String>();
|
||||
for (String p : socket.getEnabledProtocols()) {
|
||||
if (!p.contains("SSLv3")) {
|
||||
nonSSLProtocols.add(p);
|
||||
}
|
||||
}
|
||||
socket.setEnabledProtocols(nonSSLProtocols.toArray(
|
||||
new String[nonSSLProtocols.size()]));
|
||||
return socket;
|
||||
}
|
||||
}
|
|
@ -175,9 +175,10 @@ import org.apache.hadoop.hbase.zookeeper.ZKClusterId;
|
|||
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
|
||||
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.mortbay.jetty.Connector;
|
||||
import org.mortbay.jetty.nio.SelectChannelConnector;
|
||||
import org.mortbay.jetty.servlet.Context;
|
||||
import org.eclipse.jetty.server.Connector;
|
||||
import org.eclipse.jetty.server.Server;
|
||||
import org.eclipse.jetty.server.ServerConnector;
|
||||
import org.eclipse.jetty.webapp.WebAppContext;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.Lists;
|
||||
|
@ -385,7 +386,7 @@ public class HMaster extends HRegionServer implements MasterServices {
|
|||
private FavoredNodesManager favoredNodesManager;
|
||||
|
||||
/** jetty server for master to redirect requests to regionserver infoServer */
|
||||
private org.mortbay.jetty.Server masterJettyServer;
|
||||
private Server masterJettyServer;
|
||||
|
||||
public static class RedirectServlet extends HttpServlet {
|
||||
private static final long serialVersionUID = 2894774810058302472L;
|
||||
|
@ -517,14 +518,17 @@ public class HMaster extends HRegionServer implements MasterServices {
|
|||
if(RedirectServlet.regionServerInfoPort == infoPort) {
|
||||
return infoPort;
|
||||
}
|
||||
masterJettyServer = new org.mortbay.jetty.Server();
|
||||
Connector connector = new SelectChannelConnector();
|
||||
masterJettyServer = new Server();
|
||||
ServerConnector connector = new ServerConnector(masterJettyServer);
|
||||
connector.setHost(addr);
|
||||
connector.setPort(infoPort);
|
||||
masterJettyServer.addConnector(connector);
|
||||
masterJettyServer.setStopAtShutdown(true);
|
||||
Context context = new Context(masterJettyServer, "/", Context.NO_SESSIONS);
|
||||
|
||||
WebAppContext context = new WebAppContext(null, "/", null, null, null, null, WebAppContext.NO_SESSIONS);
|
||||
context.addServlet(RedirectServlet.class, "/*");
|
||||
context.setServer(masterJettyServer);
|
||||
|
||||
try {
|
||||
masterJettyServer.start();
|
||||
} catch (Exception e) {
|
||||
|
|
|
@ -17,35 +17,49 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.replication.regionserver;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Queue;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.conf.Configured;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.Abortable;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.Stoppable;
|
||||
import org.apache.hadoop.hbase.client.ClusterConnection;
|
||||
import org.apache.hadoop.hbase.client.ConnectionFactory;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.client.replication.ReplicationAdmin;
|
||||
import org.apache.hadoop.hbase.io.WALLink;
|
||||
import org.apache.hadoop.hbase.procedure2.util.StringUtils;
|
||||
import org.apache.hadoop.hbase.replication.*;
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
import org.apache.hadoop.hbase.replication.ReplicationException;
|
||||
import org.apache.hadoop.hbase.replication.ReplicationFactory;
|
||||
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
|
||||
import org.apache.hadoop.hbase.replication.ReplicationPeers;
|
||||
import org.apache.hadoop.hbase.replication.ReplicationQueueInfo;
|
||||
import org.apache.hadoop.hbase.replication.ReplicationQueues;
|
||||
import org.apache.hadoop.hbase.replication.ReplicationQueuesClient;
|
||||
import org.apache.hadoop.hbase.replication.ReplicationQueuesClientArguments;
|
||||
import org.apache.hadoop.hbase.replication.ReplicationTracker;
|
||||
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
|
||||
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
|
||||
import org.apache.hadoop.util.Tool;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.apache.zookeeper.KeeperException;
|
||||
import org.mortbay.util.IO;
|
||||
|
||||
import com.google.common.util.concurrent.AtomicLongMap;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* Provides information about the existing states of replication, replication peers and queues.
|
||||
*
|
||||
|
|
|
@ -19,13 +19,34 @@
|
|||
|
||||
package org.apache.hadoop.hbase.client.locking;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.eq;
|
||||
import static org.mockito.Matchers.isA;
|
||||
import static org.mockito.Mockito.atLeastOnce;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.Abortable;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.client.PerClientRandomNonceGenerator;
|
||||
import org.apache.hadoop.hbase.HBaseIOException;
|
||||
import org.apache.hadoop.hbase.client.PerClientRandomNonceGenerator;
|
||||
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.*;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType;
|
||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Threads;
|
||||
|
@ -34,16 +55,11 @@ import org.junit.Test;
|
|||
import org.junit.experimental.categories.Category;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Mockito;
|
||||
import org.mortbay.log.Log;
|
||||
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@Category({ClientTests.class, SmallTests.class})
|
||||
public class TestEntityLocks {
|
||||
private static final Log LOG = LogFactory.getLog(TestEntityLocks.class);
|
||||
|
||||
private final Configuration conf = HBaseConfiguration.create();
|
||||
|
||||
private final LockService.BlockingInterface master =
|
||||
|
@ -80,13 +96,13 @@ public class TestEntityLocks {
|
|||
private boolean waitLockTimeOut(EntityLock lock, long maxWaitTimeMillis) {
|
||||
long startMillis = System.currentTimeMillis();
|
||||
while (lock.isLocked()) {
|
||||
Log.info("Sleeping...");
|
||||
LOG.info("Sleeping...");
|
||||
Threads.sleepWithoutInterrupt(100);
|
||||
if (!lock.isLocked()) {
|
||||
return true;
|
||||
}
|
||||
if (System.currentTimeMillis() - startMillis > maxWaitTimeMillis) {
|
||||
Log.info("Timedout...");
|
||||
LOG.info("Timedout...");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ public class HttpServerFunctionalTest extends Assert {
|
|||
/** JVM property for the webapp test dir : {@value} */
|
||||
public static final String TEST_BUILD_WEBAPPS = "test.build.webapps";
|
||||
/** expected location of the test.build.webapps dir: {@value} */
|
||||
private static final String BUILD_WEBAPPS_DIR = "build/test/webapps";
|
||||
private static final String BUILD_WEBAPPS_DIR = "src/main/resources/hbase-webapps";
|
||||
|
||||
/** name of the test webapp: {@value} */
|
||||
private static final String TEST = "test";
|
||||
|
|
|
@ -17,9 +17,6 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.http;
|
||||
|
||||
import static org.mockito.Mockito.doReturn;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.net.HttpURLConnection;
|
||||
|
@ -48,22 +45,23 @@ import javax.servlet.http.HttpServletRequest;
|
|||
import javax.servlet.http.HttpServletRequestWrapper;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import junit.framework.Assert;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.http.HttpServer.QuotingInputFilter.RequestQuoter;
|
||||
import org.apache.hadoop.hbase.http.resource.JerseyResource;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
import org.apache.hadoop.security.Groups;
|
||||
import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.authorize.AccessControlList;
|
||||
import org.eclipse.jetty.server.ServerConnector;
|
||||
import org.eclipse.jetty.util.ajax.JSON;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
@ -71,9 +69,6 @@ import org.junit.experimental.categories.Category;
|
|||
import org.mockito.Mockito;
|
||||
import org.mockito.internal.util.reflection.Whitebox;
|
||||
|
||||
import org.eclipse.jetty.server.ServerConnector;
|
||||
import org.eclipse.jetty.util.ajax.JSON;
|
||||
|
||||
@Category({MiscTests.class, SmallTests.class})
|
||||
public class TestHttpServer extends HttpServerFunctionalTest {
|
||||
private static final Log LOG = LogFactory.getLog(TestHttpServer.class);
|
||||
|
@ -83,7 +78,6 @@ public class TestHttpServer extends HttpServerFunctionalTest {
|
|||
|
||||
@SuppressWarnings("serial")
|
||||
public static class EchoMapServlet extends HttpServlet {
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public void doGet(HttpServletRequest request,
|
||||
HttpServletResponse response
|
||||
|
@ -110,7 +104,6 @@ public class TestHttpServer extends HttpServerFunctionalTest {
|
|||
|
||||
@SuppressWarnings("serial")
|
||||
public static class EchoServlet extends HttpServlet {
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public void doGet(HttpServletRequest request,
|
||||
HttpServletResponse response
|
||||
|
@ -238,7 +231,6 @@ public class TestHttpServer extends HttpServerFunctionalTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
@Ignore
|
||||
public void testContentTypes() throws Exception {
|
||||
// Static CSS files should have text/css
|
||||
URL cssUrl = new URL(baseUrl, "/static/test.css");
|
||||
|
@ -252,7 +244,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
|
|||
conn = (HttpURLConnection)servletUrl.openConnection();
|
||||
conn.connect();
|
||||
assertEquals(200, conn.getResponseCode());
|
||||
assertEquals("text/plain; charset=utf-8", conn.getContentType());
|
||||
assertEquals("text/plain;charset=utf-8", conn.getContentType());
|
||||
|
||||
// We should ignore parameters for mime types - ie a parameter
|
||||
// ending in .css should not change mime type
|
||||
|
@ -260,21 +252,22 @@ public class TestHttpServer extends HttpServerFunctionalTest {
|
|||
conn = (HttpURLConnection)servletUrl.openConnection();
|
||||
conn.connect();
|
||||
assertEquals(200, conn.getResponseCode());
|
||||
assertEquals("text/plain; charset=utf-8", conn.getContentType());
|
||||
assertEquals("text/plain;charset=utf-8", conn.getContentType());
|
||||
|
||||
// Servlets that specify text/html should get that content type
|
||||
servletUrl = new URL(baseUrl, "/htmlcontent");
|
||||
conn = (HttpURLConnection)servletUrl.openConnection();
|
||||
conn.connect();
|
||||
assertEquals(200, conn.getResponseCode());
|
||||
assertEquals("text/html; charset=utf-8", conn.getContentType());
|
||||
assertEquals("text/html;charset=utf-8", conn.getContentType());
|
||||
|
||||
// JSPs should default to text/html with utf8
|
||||
servletUrl = new URL(baseUrl, "/testjsp.jsp");
|
||||
conn = (HttpURLConnection)servletUrl.openConnection();
|
||||
conn.connect();
|
||||
assertEquals(200, conn.getResponseCode());
|
||||
assertEquals("text/html; charset=utf-8", conn.getContentType());
|
||||
// JSPs do not work from unit tests
|
||||
// servletUrl = new URL(baseUrl, "/testjsp.jsp");
|
||||
// conn = (HttpURLConnection)servletUrl.openConnection();
|
||||
// conn.connect();
|
||||
// assertEquals(200, conn.getResponseCode());
|
||||
// assertEquals("text/html; charset=utf-8", conn.getContentType());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -183,7 +183,7 @@ public class TestServletFilter extends HttpServerFunctionalTest {
|
|||
http.start();
|
||||
fail("expecting exception");
|
||||
} catch (IOException e) {
|
||||
GenericTestUtils.assertExceptionContains("Unable to initialize WebAppContext", e);
|
||||
GenericTestUtils.assertExceptionContains("Problem starting http server", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,9 +29,9 @@ import junit.framework.TestCase;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.testclassification.MiscTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.eclipse.jetty.util.ajax.JSON;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.mortbay.util.ajax.JSON;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
import org.w3c.dom.Node;
|
||||
|
|
|
@ -32,7 +32,7 @@ import javax.ws.rs.core.Response;
|
|||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.mortbay.util.ajax.JSON;
|
||||
import org.eclipse.jetty.util.ajax.JSON;
|
||||
|
||||
/**
|
||||
* A simple Jersey resource class TestHttpServer.
|
||||
|
|
Loading…
Reference in New Issue