HBASE-12683 Compilation with hadoop-2.7.0-SNAPSHOT is broken
This commit is contained in:
parent
f7f7d37ada
commit
afb753ecc3
|
@ -18,8 +18,11 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.util;
|
||||
|
||||
import java.io.PrintStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.lang.Thread.UncaughtExceptionHandler;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.ThreadFactory;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
|
@ -127,7 +130,7 @@ public class Threads {
|
|||
while (t.isAlive()) {
|
||||
t.join(60 * 1000);
|
||||
if (t.isAlive()) {
|
||||
ReflectionUtils.printThreadInfo(new PrintWriter(System.out),
|
||||
printThreadInfo(System.out,
|
||||
"Automatic Stack Trace every 60 seconds waiting on " +
|
||||
t.getName());
|
||||
}
|
||||
|
@ -262,4 +265,44 @@ public class Threads {
|
|||
public static void setLoggingUncaughtExceptionHandler(Thread t) {
|
||||
t.setUncaughtExceptionHandler(LOGGING_EXCEPTION_HANDLER);
|
||||
}
|
||||
|
||||
private static Method printThreadInfoMethod = null;
|
||||
private static boolean printThreadInfoMethodWithPrintStream = true;
|
||||
|
||||
/**
|
||||
* Print all of the thread's information and stack traces. Wrapper around Hadoop's method.
|
||||
*
|
||||
* @param stream the stream to
|
||||
* @param title a string title for the stack trace
|
||||
*/
|
||||
public static void printThreadInfo(PrintStream stream, String title) {
|
||||
|
||||
if (printThreadInfoMethod == null) {
|
||||
try {
|
||||
// Hadoop 2.7+ declares printThreadInfo(PrintStream, String)
|
||||
printThreadInfoMethod = ReflectionUtils.class.getMethod("printThreadInfo",
|
||||
PrintStream.class, String.class);
|
||||
} catch (NoSuchMethodException e) {
|
||||
// Hadoop 2.6 and earlier declares printThreadInfo(PrintWriter, String)
|
||||
printThreadInfoMethodWithPrintStream = false;
|
||||
try {
|
||||
printThreadInfoMethod = ReflectionUtils.class.getMethod("printThreadInfo",
|
||||
PrintWriter.class, String.class);
|
||||
} catch (NoSuchMethodException e1) {
|
||||
throw new RuntimeException("Cannot find method. Check hadoop jars linked", e1);
|
||||
}
|
||||
}
|
||||
printThreadInfoMethod.setAccessible(true);
|
||||
}
|
||||
|
||||
try {
|
||||
if (printThreadInfoMethodWithPrintStream) {
|
||||
printThreadInfoMethod.invoke(null, stream, title);
|
||||
} else {
|
||||
printThreadInfoMethod.invoke(null, new PrintWriter(stream), title);
|
||||
}
|
||||
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
|
||||
throw new RuntimeException(e.getCause());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.http;
|
|||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InterruptedIOException;
|
||||
import java.io.PrintStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.net.BindException;
|
||||
import java.net.InetSocketAddress;
|
||||
|
@ -56,6 +57,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience;
|
|||
import org.apache.hadoop.hbase.http.conf.ConfServlet;
|
||||
import org.apache.hadoop.hbase.http.jmx.JMXJsonServlet;
|
||||
import org.apache.hadoop.hbase.http.log.LogLevel;
|
||||
import org.apache.hadoop.hbase.util.Threads;
|
||||
import org.apache.hadoop.metrics.MetricsServlet;
|
||||
import org.apache.hadoop.security.SecurityUtil;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
|
@ -1212,7 +1214,9 @@ public class HttpServer implements FilterContainer {
|
|||
}
|
||||
response.setContentType("text/plain; charset=UTF-8");
|
||||
PrintWriter out = response.getWriter();
|
||||
ReflectionUtils.printThreadInfo(out, "");
|
||||
PrintStream ps = new PrintStream(response.getOutputStream(), false, "UTF-8");
|
||||
Threads.printThreadInfo(ps, "");
|
||||
ps.flush();
|
||||
out.close();
|
||||
ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1);
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.master;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
|
@ -35,7 +36,7 @@ import org.apache.hadoop.hbase.monitoring.LogMonitoring;
|
|||
import org.apache.hadoop.hbase.monitoring.StateDumpServlet;
|
||||
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
|
||||
import org.apache.hadoop.hbase.regionserver.RSDumpServlet;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.apache.hadoop.hbase.util.Threads;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
public class MasterDumpServlet extends StateDumpServlet {
|
||||
|
@ -78,7 +79,9 @@ public class MasterDumpServlet extends StateDumpServlet {
|
|||
|
||||
out.println("\n\nStacks:");
|
||||
out.println(LINE);
|
||||
ReflectionUtils.printThreadInfo(out, "");
|
||||
PrintStream ps = new PrintStream(response.getOutputStream(), false, "UTF-8");
|
||||
Threads.printThreadInfo(ps, "");
|
||||
ps.flush();
|
||||
|
||||
out.println("\n\nMaster configuration:");
|
||||
out.println(LINE);
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.util.Date;
|
||||
|
||||
|
@ -31,7 +32,7 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.hbase.monitoring.LogMonitoring;
|
||||
import org.apache.hadoop.hbase.monitoring.StateDumpServlet;
|
||||
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.apache.hadoop.hbase.util.Threads;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
public class RSDumpServlet extends StateDumpServlet {
|
||||
|
@ -74,7 +75,9 @@ public class RSDumpServlet extends StateDumpServlet {
|
|||
|
||||
out.println("\n\nStacks:");
|
||||
out.println(LINE);
|
||||
ReflectionUtils.printThreadInfo(out, "");
|
||||
PrintStream ps = new PrintStream(response.getOutputStream(), false, "UTF-8");
|
||||
Threads.printThreadInfo(ps, "");
|
||||
ps.flush();
|
||||
|
||||
out.println("\n\nRS Configuration:");
|
||||
out.println(LINE);
|
||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.util;
|
|||
|
||||
import java.io.InterruptedIOException;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.List;
|
||||
|
@ -32,7 +31,6 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.hbase.CoordinatedStateManager;
|
||||
import org.apache.hadoop.hbase.master.HMaster;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
|
||||
/**
|
||||
* Utility used running a cluster all in the one JVM.
|
||||
|
@ -222,7 +220,7 @@ public class JVMClusterUtil {
|
|||
}
|
||||
if (System.currentTimeMillis() > startTime + maxwait) {
|
||||
String msg = "Master not initialized after " + maxwait + "ms seconds";
|
||||
ReflectionUtils.printThreadInfo(new PrintWriter(System.out),
|
||||
Threads.printThreadInfo(System.out,
|
||||
"Thread dump because: " + msg);
|
||||
throw new RuntimeException(msg);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue