From afb753ecc3a94a5824a510121aa186948fb317df Mon Sep 17 00:00:00 2001 From: Enis Soztutar Date: Mon, 15 Dec 2014 11:09:20 -0800 Subject: [PATCH] HBASE-12683 Compilation with hadoop-2.7.0-SNAPSHOT is broken --- .../org/apache/hadoop/hbase/util/Threads.java | 45 ++++++- .../apache/hadoop/hbase/http/HttpServer.java | 120 +++++++++--------- .../hbase/master/MasterDumpServlet.java | 29 +++-- .../hbase/regionserver/RSDumpServlet.java | 31 +++-- .../hadoop/hbase/util/JVMClusterUtil.java | 6 +- 5 files changed, 141 insertions(+), 90 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java index 85cadd7521f..81178c4e6bc 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java @@ -18,8 +18,11 @@ */ package org.apache.hadoop.hbase.util; +import java.io.PrintStream; import java.io.PrintWriter; import java.lang.Thread.UncaughtExceptionHandler; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; @@ -127,7 +130,7 @@ public class Threads { while (t.isAlive()) { t.join(60 * 1000); if (t.isAlive()) { - ReflectionUtils.printThreadInfo(new PrintWriter(System.out), + printThreadInfo(System.out, "Automatic Stack Trace every 60 seconds waiting on " + t.getName()); } @@ -262,4 +265,44 @@ public class Threads { public static void setLoggingUncaughtExceptionHandler(Thread t) { t.setUncaughtExceptionHandler(LOGGING_EXCEPTION_HANDLER); } + + private static Method printThreadInfoMethod = null; + private static boolean printThreadInfoMethodWithPrintStream = true; + + /** + * Print all of the thread's information and stack traces. Wrapper around Hadoop's method. + * + * @param stream the stream to + * @param title a string title for the stack trace + */ + public static void printThreadInfo(PrintStream stream, String title) { + + if (printThreadInfoMethod == null) { + try { + // Hadoop 2.7+ declares printThreadInfo(PrintStream, String) + printThreadInfoMethod = ReflectionUtils.class.getMethod("printThreadInfo", + PrintStream.class, String.class); + } catch (NoSuchMethodException e) { + // Hadoop 2.6 and earlier declares printThreadInfo(PrintWriter, String) + printThreadInfoMethodWithPrintStream = false; + try { + printThreadInfoMethod = ReflectionUtils.class.getMethod("printThreadInfo", + PrintWriter.class, String.class); + } catch (NoSuchMethodException e1) { + throw new RuntimeException("Cannot find method. Check hadoop jars linked", e1); + } + } + printThreadInfoMethod.setAccessible(true); + } + + try { + if (printThreadInfoMethodWithPrintStream) { + printThreadInfoMethod.invoke(null, stream, title); + } else { + printThreadInfoMethod.invoke(null, new PrintWriter(stream), title); + } + } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { + throw new RuntimeException(e.getCause()); + } + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java index 68524ab8aad..c0bf6f985c5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.http; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InterruptedIOException; +import java.io.PrintStream; import java.io.PrintWriter; import java.net.BindException; import java.net.InetSocketAddress; @@ -56,6 +57,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.http.conf.ConfServlet; import org.apache.hadoop.hbase.http.jmx.JMXJsonServlet; import org.apache.hadoop.hbase.http.log.LogLevel; +import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.metrics.MetricsServlet; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; @@ -205,7 +207,7 @@ public class HttpServer implements FilterContainer { this.hostName = hostName; return this; } - + public Builder trustStore(String location, String password, String type) { this.trustStore = location; this.trustStorePassword = password; @@ -260,37 +262,37 @@ public class HttpServer implements FilterContainer { this.port = port; return this; } - + public Builder setFindPort(boolean findPort) { this.findPort = findPort; return this; } - + public Builder setConf(Configuration conf) { this.conf = conf; return this; } - + public Builder setConnector(Connector connector) { this.connector = connector; return this; } - + public Builder setPathSpec(String[] pathSpec) { this.pathSpecs = pathSpec; return this; } - + public Builder setACL(AccessControlList acl) { this.adminsAcl = acl; return this; } - + public Builder setSecurityEnabled(boolean securityEnabled) { this.securityEnabled = securityEnabled; return this; } - + public Builder setUsernameConfKey(String usernameConfKey) { this.usernameConfKey = usernameConfKey; return this; @@ -335,7 +337,7 @@ public class HttpServer implements FilterContainer { hostName = endpoints.size() == 0 ? connector.getHost() : endpoints.get( 0).getHost(); } - + if (this.conf == null) { conf = new Configuration(); } @@ -388,7 +390,7 @@ public class HttpServer implements FilterContainer { } } - + /** Same as this(name, bindAddress, port, findPort, null); */ @Deprecated public HttpServer(String name, String bindAddress, int port, boolean findPort @@ -405,15 +407,15 @@ public class HttpServer implements FilterContainer { /** * Create a status server on the given port. Allows you to specify the * path specifications that this server will be serving so that they will be - * added to the filters properly. - * + * added to the filters properly. + * * @param name The name of the server * @param bindAddress The address for this server * @param port The port to use on the server - * @param findPort whether the server should start at the given port and + * @param findPort whether the server should start at the given port and * increment by 1 until it finds a free port. - * @param conf Configuration - * @param pathSpecs Path specifications that this httpserver will be serving. + * @param conf Configuration + * @param pathSpecs Path specifications that this httpserver will be serving. * These will be added to any filters. */ @Deprecated @@ -421,15 +423,15 @@ public class HttpServer implements FilterContainer { boolean findPort, Configuration conf, String[] pathSpecs) throws IOException { this(name, bindAddress, port, findPort, conf, null, null, pathSpecs); } - + /** * Create a status server on the given port. * The jsp scripts are taken from src/webapps/. * @param name The name of the server * @param port The port to use on the server - * @param findPort whether the server should start at the given port and + * @param findPort whether the server should start at the given port and * increment by 1 until it finds a free port. - * @param conf Configuration + * @param conf Configuration */ @Deprecated public HttpServer(String name, String bindAddress, int port, @@ -439,7 +441,7 @@ public class HttpServer implements FilterContainer { @Deprecated public HttpServer(String name, String bindAddress, int port, - boolean findPort, Configuration conf, AccessControlList adminsAcl) + boolean findPort, Configuration conf, AccessControlList adminsAcl) throws IOException { this(name, bindAddress, port, findPort, conf, adminsAcl, null, null); } @@ -450,15 +452,15 @@ public class HttpServer implements FilterContainer { * @param name The name of the server * @param bindAddress The address for this server * @param port The port to use on the server - * @param findPort whether the server should start at the given port and + * @param findPort whether the server should start at the given port and * increment by 1 until it finds a free port. - * @param conf Configuration + * @param conf Configuration * @param adminsAcl {@link AccessControlList} of the admins * @param connector The jetty {@link Connector} to use */ @Deprecated public HttpServer(String name, String bindAddress, int port, - boolean findPort, Configuration conf, AccessControlList adminsAcl, + boolean findPort, Configuration conf, AccessControlList adminsAcl, Connector connector) throws IOException { this(name, bindAddress, port, findPort, conf, adminsAcl, connector, null); } @@ -469,17 +471,17 @@ public class HttpServer implements FilterContainer { * @param name The name of the server * @param bindAddress The address for this server * @param port The port to use on the server - * @param findPort whether the server should start at the given port and + * @param findPort whether the server should start at the given port and * increment by 1 until it finds a free port. - * @param conf Configuration + * @param conf Configuration * @param adminsAcl {@link AccessControlList} of the admins * @param connector A jetty connection listener - * @param pathSpecs Path specifications that this httpserver will be serving. + * @param pathSpecs Path specifications that this httpserver will be serving. * These will be added to any filters. */ @Deprecated public HttpServer(String name, String bindAddress, int port, - boolean findPort, Configuration conf, AccessControlList adminsAcl, + boolean findPort, Configuration conf, AccessControlList adminsAcl, Connector connector, String[] pathSpecs) throws IOException { this(new Builder().setName(name) .addEndpoint(URI.create("http://" + bindAddress + ":" + port)) @@ -584,7 +586,7 @@ public class HttpServer implements FilterContainer { public Connector createBaseListener(Configuration conf) throws IOException { return HttpServer.createDefaultChannelConnector(); } - + @InterfaceAudience.Private public static Connector createDefaultChannelConnector() { SelectChannelConnector ret = new SelectChannelConnector(); @@ -595,7 +597,7 @@ public class HttpServer implements FilterContainer { if(Shell.WINDOWS) { // result of setting the SO_REUSEADDR flag is different on Windows // http://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx - // without this 2 NN's can start on the same machine and listen on + // without this 2 NN's can start on the same machine and listen on // the same port with indeterminate routing of incoming requests to them ret.setReuseAddress(false); } @@ -629,7 +631,7 @@ public class HttpServer implements FilterContainer { */ protected void addDefaultApps(ContextHandlerCollection parent, final String appDir, Configuration conf) throws IOException { - // set up the context for "/logs/" if "hadoop.log.dir" property is defined. + // set up the context for "/logs/" if "hadoop.log.dir" property is defined. String logDir = this.logDir; if (logDir == null) { logDir = System.getProperty("hadoop.log.dir"); @@ -659,7 +661,7 @@ public class HttpServer implements FilterContainer { setContextAttributes(staticContext, conf); defaultContexts.put(staticContext, true); } - + private void setContextAttributes(Context context, Configuration conf) { context.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf); context.getServletContext().setAttribute(ADMINS_ACL, adminsAcl); @@ -685,10 +687,10 @@ public class HttpServer implements FilterContainer { } /** - * Add a context + * Add a context * @param pathSpec The path spec for the context * @param dir The directory containing the context - * @param isFiltered if true, the servlet is added to the filter path mapping + * @param isFiltered if true, the servlet is added to the filter path mapping * @throws IOException */ protected void addContext(String pathSpec, String dir, boolean isFiltered) throws IOException { @@ -711,7 +713,7 @@ public class HttpServer implements FilterContainer { webAppContext.setAttribute(name, value); } - /** + /** * Add a Jersey resource package. * @param packageName The Java package name containing the Jersey resource. * @param pathSpec The path spec for the servlet @@ -740,11 +742,11 @@ public class HttpServer implements FilterContainer { } /** - * Add an internal servlet in the server. + * Add an internal servlet in the server. * Note: This method is to be used for adding servlets that facilitate * internal communication and not for user facing functionality. For - * servlets added using this method, filters are not enabled. - * + * servlets added using this method, filters are not enabled. + * * @param name The name of the servlet (can be passed as null) * @param pathSpec The path spec for the servlet * @param clazz The servlet class @@ -756,18 +758,18 @@ public class HttpServer implements FilterContainer { /** * Add an internal servlet in the server, specifying whether or not to - * protect with Kerberos authentication. + * protect with Kerberos authentication. * Note: This method is to be used for adding servlets that facilitate * internal communication and not for user facing functionality. For + * servlets added using this method, filters (except internal Kerberos - * filters) are not enabled. - * + * filters) are not enabled. + * * @param name The name of the servlet (can be passed as null) * @param pathSpec The path spec for the servlet * @param clazz The servlet class * @param requireAuth Require Kerberos authenticate to access servlet */ - public void addInternalServlet(String name, String pathSpec, + public void addInternalServlet(String name, String pathSpec, Class clazz, boolean requireAuth) { ServletHolder holder = new ServletHolder(clazz); if (name != null) { @@ -851,7 +853,7 @@ public class HttpServer implements FilterContainer { handler.addFilterMapping(fmap); } } - + /** * Get the value in the webapp context. * @param name The name of the attribute @@ -860,7 +862,7 @@ public class HttpServer implements FilterContainer { public Object getAttribute(String name) { return webAppContext.getAttribute(name); } - + public WebAppContext getWebAppContext(){ return this.webAppContext; } @@ -877,7 +879,7 @@ public class HttpServer implements FilterContainer { */ protected String getWebAppsPath(String webapps, String appName) throws FileNotFoundException { URL url = getClass().getClassLoader().getResource(webapps + "/" + appName); - if (url == null) + if (url == null) throw new FileNotFoundException(webapps + "/" + appName + " not found in CLASSPATH"); String urlString = url.toString(); @@ -935,7 +937,7 @@ public class HttpServer implements FilterContainer { params.put("kerberos.keytab", httpKeytab); } params.put(AuthenticationFilter.AUTH_TYPE, "kerberos"); - + defineFilter(webAppContext, SPNEGO_FILTER, AuthenticationFilter.class.getName(), params, null); } @@ -1022,7 +1024,7 @@ public class HttpServer implements FilterContainer { } } } - + /** * stop the server */ @@ -1140,7 +1142,7 @@ public class HttpServer implements FilterContainer { /** * Does the user sending the HttpServletRequest has the administrator ACLs? If * it isn't the case, response will be modified to send an error to the user. - * + * * @param servletContext * @param request * @param response used to send the error response if user does not have admin access. @@ -1165,7 +1167,7 @@ public class HttpServer implements FilterContainer { "authorized to access this page."); return false; } - + if (servletContext.getAttribute(ADMINS_ACL) != null && !userHasAdministratorAccess(servletContext, remoteUser)) { response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User " @@ -1179,7 +1181,7 @@ public class HttpServer implements FilterContainer { /** * Get the admin ACLs from the given ServletContext and check if the given * user is in the ACL. - * + * * @param servletContext the context containing the admin ACL. * @param remoteUser the remote user to check for. * @return true if the user is present in the ACL, false if no ACL is set or @@ -1212,12 +1214,14 @@ public class HttpServer implements FilterContainer { } response.setContentType("text/plain; charset=UTF-8"); PrintWriter out = response.getWriter(); - ReflectionUtils.printThreadInfo(out, ""); + PrintStream ps = new PrintStream(response.getOutputStream(), false, "UTF-8"); + Threads.printThreadInfo(ps, ""); + ps.flush(); out.close(); - ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1); + ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1); } } - + /** * A Servlet input filter that quotes all HTML active characters in the * parameter names and values. The goal is to quote the characters to make @@ -1233,7 +1237,7 @@ public class HttpServer implements FilterContainer { super(rawRequest); this.rawRequest = rawRequest; } - + /** * Return the set of parameter names, quoting each name. */ @@ -1254,7 +1258,7 @@ public class HttpServer implements FilterContainer { } }; } - + /** * Unquote the name and quote the value. */ @@ -1263,7 +1267,7 @@ public class HttpServer implements FilterContainer { return HtmlQuoting.quoteHtmlChars(rawRequest.getParameter (HtmlQuoting.unquoteHtmlChars(name))); } - + @Override public String[] getParameterValues(String name) { String unquoteName = HtmlQuoting.unquoteHtmlChars(name); @@ -1293,7 +1297,7 @@ public class HttpServer implements FilterContainer { } return result; } - + /** * Quote the url so that users specifying the HOST HTTP header * can't inject attacks. @@ -1303,7 +1307,7 @@ public class HttpServer implements FilterContainer { String url = rawRequest.getRequestURL().toString(); return new StringBuffer(HtmlQuoting.quoteHtmlChars(url)); } - + /** * Quote the server name so that users specifying the HOST HTTP header * can't inject attacks. @@ -1324,11 +1328,11 @@ public class HttpServer implements FilterContainer { } @Override - public void doFilter(ServletRequest request, + public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain ) throws IOException, ServletException { - HttpServletRequestWrapper quoted = + HttpServletRequestWrapper quoted = new RequestQuoter((HttpServletRequest) request); HttpServletResponse httpResponse = (HttpServletResponse) response; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterDumpServlet.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterDumpServlet.java index f8fceb6270c..dbb59fbfc89 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterDumpServlet.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterDumpServlet.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.master; import java.io.IOException; import java.io.OutputStream; +import java.io.PrintStream; import java.io.PrintWriter; import java.util.Date; import java.util.Map; @@ -35,14 +36,14 @@ import org.apache.hadoop.hbase.monitoring.LogMonitoring; import org.apache.hadoop.hbase.monitoring.StateDumpServlet; import org.apache.hadoop.hbase.monitoring.TaskMonitor; import org.apache.hadoop.hbase.regionserver.RSDumpServlet; -import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.hbase.util.Threads; @InterfaceAudience.Private public class MasterDumpServlet extends StateDumpServlet { private static final long serialVersionUID = 1L; private static final String LINE = "==========================================================="; - + @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { @@ -52,10 +53,10 @@ public class MasterDumpServlet extends StateDumpServlet { response.setContentType("text/plain"); OutputStream os = response.getOutputStream(); PrintWriter out = new PrintWriter(os); - + out.println("Master status for " + master.getServerName() + " as of " + new Date()); - + out.println("\n\nVersion Info:"); out.println(LINE); dumpVersionInfo(out); @@ -63,34 +64,36 @@ public class MasterDumpServlet extends StateDumpServlet { out.println("\n\nTasks:"); out.println(LINE); TaskMonitor.get().dumpAsText(out); - + out.println("\n\nServers:"); out.println(LINE); dumpServers(master, out); - + out.println("\n\nRegions-in-transition:"); out.println(LINE); dumpRIT(master, out); - + out.println("\n\nExecutors:"); out.println(LINE); dumpExecutors(master.getExecutorService(), out); - + out.println("\n\nStacks:"); out.println(LINE); - ReflectionUtils.printThreadInfo(out, ""); - + PrintStream ps = new PrintStream(response.getOutputStream(), false, "UTF-8"); + Threads.printThreadInfo(ps, ""); + ps.flush(); + out.println("\n\nMaster configuration:"); out.println(LINE); Configuration conf = master.getConfiguration(); out.flush(); conf.writeXml(os); os.flush(); - + out.println("\n\nRecent regionserver aborts:"); out.println(LINE); master.getRegionServerFatalLogBuffer().dumpTo(out); - + out.println("\n\nLogs"); out.println(LINE); long tailKb = getTailKbParam(request); @@ -103,7 +106,7 @@ public class MasterDumpServlet extends StateDumpServlet { } out.flush(); } - + private void dumpRIT(HMaster master, PrintWriter out) { AssignmentManager am = master.getAssignmentManager(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSDumpServlet.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSDumpServlet.java index 16035f42c9a..a38ad0e6f2b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSDumpServlet.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSDumpServlet.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import java.io.OutputStream; +import java.io.PrintStream; import java.io.PrintWriter; import java.util.Date; @@ -31,14 +32,14 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.monitoring.LogMonitoring; import org.apache.hadoop.hbase.monitoring.StateDumpServlet; import org.apache.hadoop.hbase.monitoring.TaskMonitor; -import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.hbase.util.Threads; @InterfaceAudience.Private public class RSDumpServlet extends StateDumpServlet { private static final long serialVersionUID = 1L; private static final String LINE = "==========================================================="; - + @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { @@ -47,7 +48,7 @@ public class RSDumpServlet extends StateDumpServlet { assert hrs != null : "No RS in context!"; response.setContentType("text/plain"); - + if (!hrs.isOnline()) { response.getWriter().write("The RegionServer is initializing!"); response.getWriter().close(); @@ -56,10 +57,10 @@ public class RSDumpServlet extends StateDumpServlet { OutputStream os = response.getOutputStream(); PrintWriter out = new PrintWriter(os); - + out.println("RegionServer status for " + hrs.getServerName() + " as of " + new Date()); - + out.println("\n\nVersion Info:"); out.println(LINE); dumpVersionInfo(out); @@ -67,40 +68,42 @@ public class RSDumpServlet extends StateDumpServlet { out.println("\n\nTasks:"); out.println(LINE); TaskMonitor.get().dumpAsText(out); - + out.println("\n\nExecutors:"); out.println(LINE); dumpExecutors(hrs.getExecutorService(), out); - + out.println("\n\nStacks:"); out.println(LINE); - ReflectionUtils.printThreadInfo(out, ""); - + PrintStream ps = new PrintStream(response.getOutputStream(), false, "UTF-8"); + Threads.printThreadInfo(ps, ""); + ps.flush(); + out.println("\n\nRS Configuration:"); out.println(LINE); Configuration conf = hrs.getConfiguration(); out.flush(); conf.writeXml(os); os.flush(); - + out.println("\n\nLogs"); out.println(LINE); long tailKb = getTailKbParam(request); LogMonitoring.dumpTailOfLogs(out, tailKb); - + out.println("\n\nRS Queue:"); out.println(LINE); if(isShowQueueDump(conf)) { dumpQueue(hrs, out); - } - + } + out.flush(); } public static void dumpQueue(HRegionServer hrs, PrintWriter out) throws IOException { // 1. Print out Compaction/Split Queue - out.println("Compaction/Split Queue summary: " + out.println("Compaction/Split Queue summary: " + hrs.compactSplitThread.toString() ); out.println(hrs.compactSplitThread.dumpQueue()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java index fb333deb798..4db20f21b72 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.util; import java.io.InterruptedIOException; import java.io.IOException; -import java.io.PrintWriter; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.List; @@ -32,7 +31,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CoordinatedStateManager; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.regionserver.HRegionServer; -import org.apache.hadoop.util.ReflectionUtils; /** * Utility used running a cluster all in the one JVM. @@ -86,7 +84,7 @@ public class JVMClusterUtil { throws IOException { HRegionServer server; try { - + Constructor ctor = hrsc.getConstructor(Configuration.class, CoordinatedStateManager.class); ctor.setAccessible(true); @@ -222,7 +220,7 @@ public class JVMClusterUtil { } if (System.currentTimeMillis() > startTime + maxwait) { String msg = "Master not initialized after " + maxwait + "ms seconds"; - ReflectionUtils.printThreadInfo(new PrintWriter(System.out), + Threads.printThreadInfo(System.out, "Thread dump because: " + msg); throw new RuntimeException(msg); }