HBASE-547 UI shows hadoop version, not hbase version
We now show hadoop and hbase versions in master. Also cleaned up how the jsp is generated moving it into main build.xml done as a pre-compile step. D src/java/org/apache/hadoop/hbase/generated Removed this directory. No longer check in jsps. Generate them every time. M src/webapps/regionserver/regionserver.jsp Use the hbase VersionInfo so we get hbase version, not hadoop's. M src/webapps/master/master.jsp Output hadoop and hbase versions. D build-webapps.xml Remove. Integrated into main build.xml. Reason we were doing this distinct C --CLASSPATH issues when compiling in hadoop context -- no longer prevail. M build.xml (jspc): Added target and made it a prereq of compile. git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@644034 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
2cea25052c
commit
2a38a13eaa
|
@ -1,74 +0,0 @@
|
||||||
<?xml version="1.0"?>
|
|
||||||
|
|
||||||
<!--
|
|
||||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
contributor license agreements. See the NOTICE file distributed with
|
|
||||||
this work for additional information regarding copyright ownership.
|
|
||||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
|
||||||
(the "License"); you may not use this file except in compliance with
|
|
||||||
the License. You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
-->
|
|
||||||
|
|
||||||
<!--
|
|
||||||
This is a script to compile the jsp for the hbase webapps. Currently,
|
|
||||||
generation of java classes from jsp is done manually and the produced
|
|
||||||
java classes are then checked in. We do it this way keeping all to do
|
|
||||||
with jsp in a separate build file because trying to build jsp inline
|
|
||||||
we trip over the 'famous' commons-logging classloader problem:
|
|
||||||
|
|
||||||
org.apache.commons.logging.LogConfigurationException: Invalid
|
|
||||||
class loader hierarchy. You have more than one version of
|
|
||||||
'org.apache.commons.logging.Log' visible, which is not allowed.
|
|
||||||
|
|
||||||
See http://www.qos.ch/logging/classloader.jsp. Its addressed
|
|
||||||
with later versions of jasper apparently (Using later versions of
|
|
||||||
jasper in this hbase subproject is not sufficent), so when hadoop
|
|
||||||
goes to jetty6 (HADOOP-1650), we should be able to integrate jsp compiling into
|
|
||||||
general compile (See http://issues.apache.org/bugzilla/show_bug.cgi?id=36968)
|
|
||||||
|
|
||||||
Meantime, if changes in jsps, just checkin the product of this script:
|
|
||||||
the generated java classes and the web.xml. To run, do following:
|
|
||||||
|
|
||||||
$ ant -f build-webapps.xml
|
|
||||||
-->
|
|
||||||
<project name="build.hbase.jsp" default="jspc">
|
|
||||||
<property name="lib.dir" value="${basedir}/lib" />
|
|
||||||
<property name="src.webapps" value="${basedir}/src/webapps" />
|
|
||||||
<property name="generated.webapps.src"
|
|
||||||
value="${basedir}/src/java"/>
|
|
||||||
|
|
||||||
<target name="jspc" >
|
|
||||||
<path id="jspc.classpath">
|
|
||||||
<fileset dir="${basedir}/lib/jetty-ext/">
|
|
||||||
<include name="*jar" />
|
|
||||||
</fileset>
|
|
||||||
<fileset dir="${basedir}/lib/">
|
|
||||||
<include name="servlet-api*jar" />
|
|
||||||
<include name="commons-logging*jar" />
|
|
||||||
</fileset>
|
|
||||||
</path>
|
|
||||||
<taskdef classname="org.apache.jasper.JspC" name="jspcompiler" >
|
|
||||||
<classpath refid="jspc.classpath"/>
|
|
||||||
</taskdef>
|
|
||||||
<jspcompiler
|
|
||||||
uriroot="${src.webapps}/master"
|
|
||||||
outputdir="${generated.webapps.src}"
|
|
||||||
package="org.apache.hadoop.hbase.generated.master"
|
|
||||||
webxml="${src.webapps}/master/WEB-INF/web.xml">
|
|
||||||
</jspcompiler>
|
|
||||||
<jspcompiler
|
|
||||||
uriroot="${src.webapps}/regionserver"
|
|
||||||
outputdir="${generated.webapps.src}"
|
|
||||||
package="org.apache.hadoop.hbase.generated.regionserver"
|
|
||||||
webxml="${src.webapps}/regionserver/WEB-INF/web.xml">
|
|
||||||
</jspcompiler>
|
|
||||||
</target>
|
|
||||||
</project>
|
|
30
build.xml
30
build.xml
|
@ -53,6 +53,7 @@
|
||||||
<property name="build.javadoc" value="${build.docs}/api"/>
|
<property name="build.javadoc" value="${build.docs}/api"/>
|
||||||
<property name="build.encoding" value="ISO-8859-1"/>
|
<property name="build.encoding" value="ISO-8859-1"/>
|
||||||
<property name="build.src" value="${build.dir}/src"/>
|
<property name="build.src" value="${build.dir}/src"/>
|
||||||
|
<property name="generated.webapps.src" value="${build.src}"/>
|
||||||
|
|
||||||
<property name="test.build.dir" value="${build.dir}/test"/>
|
<property name="test.build.dir" value="${build.dir}/test"/>
|
||||||
<property name="test.log.dir" value="${test.build.dir}/logs"/>
|
<property name="test.log.dir" value="${test.build.dir}/logs"/>
|
||||||
|
@ -134,7 +135,7 @@
|
||||||
/>
|
/>
|
||||||
</target>
|
</target>
|
||||||
|
|
||||||
<target name="compile" depends="init,javacc">
|
<target name="compile" depends="init,jspc,javacc">
|
||||||
<!--Compile whats under src and generated java classes made from jsp-->
|
<!--Compile whats under src and generated java classes made from jsp-->
|
||||||
<javac
|
<javac
|
||||||
encoding="${build.encoding}"
|
encoding="${build.encoding}"
|
||||||
|
@ -156,6 +157,33 @@
|
||||||
</jar>
|
</jar>
|
||||||
</target>
|
</target>
|
||||||
|
|
||||||
|
<target name="jspc" >
|
||||||
|
<path id="jspc.classpath">
|
||||||
|
<fileset dir="${basedir}/lib/jetty-ext/">
|
||||||
|
<include name="*jar" />
|
||||||
|
</fileset>
|
||||||
|
<fileset dir="${basedir}/lib/">
|
||||||
|
<include name="servlet-api*jar" />
|
||||||
|
<include name="commons-logging*jar" />
|
||||||
|
</fileset>
|
||||||
|
</path>
|
||||||
|
<taskdef classname="org.apache.jasper.JspC" name="jspcompiler" >
|
||||||
|
<classpath refid="jspc.classpath"/>
|
||||||
|
</taskdef>
|
||||||
|
<jspcompiler
|
||||||
|
uriroot="${src.webapps}/master"
|
||||||
|
outputdir="${generated.webapps.src}"
|
||||||
|
package="org.apache.hadoop.hbase.generated.master"
|
||||||
|
webxml="${src.webapps}/master/WEB-INF/web.xml">
|
||||||
|
</jspcompiler>
|
||||||
|
<jspcompiler
|
||||||
|
uriroot="${src.webapps}/regionserver"
|
||||||
|
outputdir="${generated.webapps.src}"
|
||||||
|
package="org.apache.hadoop.hbase.generated.regionserver"
|
||||||
|
webxml="${src.webapps}/regionserver/WEB-INF/web.xml">
|
||||||
|
</jspcompiler>
|
||||||
|
</target>
|
||||||
|
|
||||||
<!-- ================================================================== -->
|
<!-- ================================================================== -->
|
||||||
<!-- Package -->
|
<!-- Package -->
|
||||||
<!-- ================================================================== -->
|
<!-- ================================================================== -->
|
||||||
|
|
|
@ -1,93 +0,0 @@
|
||||||
package org.apache.hadoop.hbase.generated.master;
|
|
||||||
|
|
||||||
import javax.servlet.*;
|
|
||||||
import javax.servlet.http.*;
|
|
||||||
import javax.servlet.jsp.*;
|
|
||||||
import java.util.*;
|
|
||||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
|
||||||
import org.apache.hadoop.hbase.hql.TableFormatter;
|
|
||||||
import org.apache.hadoop.hbase.hql.ReturnMsg;
|
|
||||||
import org.apache.hadoop.hbase.hql.generated.HQLParser;
|
|
||||||
import org.apache.hadoop.hbase.hql.Command;
|
|
||||||
import org.apache.hadoop.hbase.hql.formatter.HtmlTableFormatter;
|
|
||||||
|
|
||||||
public final class hql_jsp extends org.apache.jasper.runtime.HttpJspBase
|
|
||||||
implements org.apache.jasper.runtime.JspSourceDependent {
|
|
||||||
|
|
||||||
private static java.util.Vector _jspx_dependants;
|
|
||||||
|
|
||||||
public java.util.List getDependants() {
|
|
||||||
return _jspx_dependants;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void _jspService(HttpServletRequest request, HttpServletResponse response)
|
|
||||||
throws java.io.IOException, ServletException {
|
|
||||||
|
|
||||||
JspFactory _jspxFactory = null;
|
|
||||||
PageContext pageContext = null;
|
|
||||||
HttpSession session = null;
|
|
||||||
ServletContext application = null;
|
|
||||||
ServletConfig config = null;
|
|
||||||
JspWriter out = null;
|
|
||||||
Object page = this;
|
|
||||||
JspWriter _jspx_out = null;
|
|
||||||
PageContext _jspx_page_context = null;
|
|
||||||
|
|
||||||
|
|
||||||
try {
|
|
||||||
_jspxFactory = JspFactory.getDefaultFactory();
|
|
||||||
response.setContentType("text/html;charset=UTF-8");
|
|
||||||
pageContext = _jspxFactory.getPageContext(this, request, response,
|
|
||||||
null, true, 8192, true);
|
|
||||||
_jspx_page_context = pageContext;
|
|
||||||
application = pageContext.getServletContext();
|
|
||||||
config = pageContext.getServletConfig();
|
|
||||||
session = pageContext.getSession();
|
|
||||||
out = pageContext.getOut();
|
|
||||||
_jspx_out = out;
|
|
||||||
|
|
||||||
out.write("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \n \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> \n<html xmlns=\"http://www.w3.org/1999/xhtml\">\n<head><meta http-equiv=\"Content-Type\" content=\"text/html;charset=UTF-8\"/>\n<title>HQL</title>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/static/hbase.css\" />\n</head>\n\n<body>\n<a id=\"logo\" href=\"http://wiki.apache.org/lucene-hadoop/Hbase\"><img src=\"/static/hbase_logo_med.gif\" alt=\"Hbase Logo\" title=\"Hbase Logo\" /></a>\n<h1 id=\"page_title\"><a href=\"http://wiki.apache.org/lucene-hadoop/Hbase/HbaseShell\">HQL</a></h1>\n<p id=\"links_menu\"><a href=\"/master.jsp\">Home</a></p>\n<hr id=\"head_rule\" />\n");
|
|
||||||
String query = request.getParameter("q");
|
|
||||||
if (query == null) {
|
|
||||||
query = "";
|
|
||||||
}
|
|
||||||
|
|
||||||
out.write("\n<form action=\"/hql.jsp\" method=\"get\">\n <p>\n <label for=\"query\">Query: </label>\n <input type=\"text\" name=\"q\" id=\"q\" size=\"60\" value=\"");
|
|
||||||
out.print( query );
|
|
||||||
out.write("\" />\n <input type=\"submit\" value=\"submit\" />\n </p>\n </form>\n <p>Enter 'help;' -- thats 'help' plus a semi-colon -- for the list of <em>HQL</em> commands.\n Data Definition, SHELL, INSERTS, DELETES, and UPDATE commands are disabled in this interface\n </p>\n \n ");
|
|
||||||
|
|
||||||
if (query.length() > 0) {
|
|
||||||
|
|
||||||
out.write("\n <hr/>\n ");
|
|
||||||
|
|
||||||
HQLParser parser = new HQLParser(query, out, new HtmlTableFormatter(out));
|
|
||||||
Command cmd = parser.terminatedCommand();
|
|
||||||
if (cmd.getCommandType() != Command.CommandType.SELECT) {
|
|
||||||
|
|
||||||
out.write("\n <p>");
|
|
||||||
out.print( cmd.getCommandType() );
|
|
||||||
out.write("-type commands are disabled in this interface.</p>\n ");
|
|
||||||
|
|
||||||
} else {
|
|
||||||
ReturnMsg rm = cmd.execute(new HBaseConfiguration());
|
|
||||||
String summary = rm == null? "": rm.toString();
|
|
||||||
|
|
||||||
out.write("\n <p>");
|
|
||||||
out.print( summary );
|
|
||||||
out.write("</p>\n ");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
out.write("\n</body>\n</html>\n");
|
|
||||||
} catch (Throwable t) {
|
|
||||||
if (!(t instanceof SkipPageException)){
|
|
||||||
out = _jspx_out;
|
|
||||||
if (out != null && out.getBufferSize() != 0)
|
|
||||||
out.clearBuffer();
|
|
||||||
if (_jspx_page_context != null) _jspx_page_context.handlePageException(t);
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
if (_jspxFactory != null) _jspxFactory.releasePageContext(_jspx_page_context);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,168 +0,0 @@
|
||||||
package org.apache.hadoop.hbase.generated.master;
|
|
||||||
|
|
||||||
import javax.servlet.*;
|
|
||||||
import javax.servlet.http.*;
|
|
||||||
import javax.servlet.jsp.*;
|
|
||||||
import java.util.*;
|
|
||||||
import org.apache.hadoop.io.Text;
|
|
||||||
import org.apache.hadoop.util.VersionInfo;
|
|
||||||
import org.apache.hadoop.hbase.master.HMaster;
|
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
|
||||||
import org.apache.hadoop.hbase.master.MetaRegion;
|
|
||||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
|
||||||
import org.apache.hadoop.hbase.HServerInfo;
|
|
||||||
import org.apache.hadoop.hbase.HServerAddress;
|
|
||||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
|
||||||
import org.apache.hadoop.hbase.hql.ShowCommand;
|
|
||||||
import org.apache.hadoop.hbase.hql.TableFormatter;
|
|
||||||
import org.apache.hadoop.hbase.hql.ReturnMsg;
|
|
||||||
import org.apache.hadoop.hbase.hql.formatter.HtmlTableFormatter;
|
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
|
||||||
|
|
||||||
public final class master_jsp extends org.apache.jasper.runtime.HttpJspBase
|
|
||||||
implements org.apache.jasper.runtime.JspSourceDependent {
|
|
||||||
|
|
||||||
private static java.util.Vector _jspx_dependants;
|
|
||||||
|
|
||||||
public java.util.List getDependants() {
|
|
||||||
return _jspx_dependants;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void _jspService(HttpServletRequest request, HttpServletResponse response)
|
|
||||||
throws java.io.IOException, ServletException {
|
|
||||||
|
|
||||||
JspFactory _jspxFactory = null;
|
|
||||||
PageContext pageContext = null;
|
|
||||||
HttpSession session = null;
|
|
||||||
ServletContext application = null;
|
|
||||||
ServletConfig config = null;
|
|
||||||
JspWriter out = null;
|
|
||||||
Object page = this;
|
|
||||||
JspWriter _jspx_out = null;
|
|
||||||
PageContext _jspx_page_context = null;
|
|
||||||
|
|
||||||
|
|
||||||
try {
|
|
||||||
_jspxFactory = JspFactory.getDefaultFactory();
|
|
||||||
response.setContentType("text/html;charset=UTF-8");
|
|
||||||
pageContext = _jspxFactory.getPageContext(this, request, response,
|
|
||||||
null, true, 8192, true);
|
|
||||||
_jspx_page_context = pageContext;
|
|
||||||
application = pageContext.getServletContext();
|
|
||||||
config = pageContext.getServletConfig();
|
|
||||||
session = pageContext.getSession();
|
|
||||||
out = pageContext.getOut();
|
|
||||||
_jspx_out = out;
|
|
||||||
|
|
||||||
|
|
||||||
HMaster master = (HMaster)getServletContext().getAttribute(HMaster.MASTER);
|
|
||||||
HBaseConfiguration conf = master.getConfiguration();
|
|
||||||
TableFormatter formatter = new HtmlTableFormatter(out);
|
|
||||||
ShowCommand show = new ShowCommand(out, formatter, "tables");
|
|
||||||
HServerAddress rootLocation = master.getRootRegionLocation();
|
|
||||||
Map<Text, MetaRegion> onlineRegions = master.getOnlineMetaRegions();
|
|
||||||
Map<String, HServerInfo> serverToServerInfos =
|
|
||||||
master.getServersToServerInfo();
|
|
||||||
int interval = conf.getInt("hbase.regionserver.msginterval", 6000)/1000;
|
|
||||||
|
|
||||||
out.write("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \n \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> \n<html xmlns=\"http://www.w3.org/1999/xhtml\">\n<head><meta http-equiv=\"Content-Type\" content=\"text/html;charset=UTF-8\"/>\n<title>Hbase Master: ");
|
|
||||||
out.print( master.getMasterAddress());
|
|
||||||
out.write("</title>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/static/hbase.css\" />\n</head>\n\n<body>\n\n<a id=\"logo\" href=\"http://wiki.apache.org/lucene-hadoop/Hbase\"><img src=\"/static/hbase_logo_med.gif\" alt=\"Hbase Logo\" title=\"Hbase Logo\" /></a>\n<h1 id=\"page_title\">Master: ");
|
|
||||||
out.print(master.getMasterAddress());
|
|
||||||
out.write("</h1>\n<p id=\"links_menu\"><a href=\"/hql.jsp\">HQL</a>, <a href=\"/logs/\">Local logs</a>, <a href=\"/stacks\">Thread Dump</a>, <a href=\"/logLevel\">Log Level</a></p>\n<hr id=\"head_rule\" />\n\n<h2>Master Attributes</h2>\n<table>\n<tr><th>Attribute Name</th><th>Value</th><th>Description</th></tr>\n<tr><td>Version</td><td>");
|
|
||||||
out.print( VersionInfo.getVersion() );
|
|
||||||
out.write(',');
|
|
||||||
out.write(' ');
|
|
||||||
out.write('r');
|
|
||||||
out.print( VersionInfo.getRevision() );
|
|
||||||
out.write("</td><td>Hbase version and svn revision</td></tr>\n<tr><td>Compiled</td><td>");
|
|
||||||
out.print( VersionInfo.getDate() );
|
|
||||||
out.write(',');
|
|
||||||
out.write(' ');
|
|
||||||
out.print( VersionInfo.getUser() );
|
|
||||||
out.write("</td><td>When this version was compiled and by whom</td></tr>\n<tr><td>Filesystem</td><td>");
|
|
||||||
out.print( conf.get("fs.default.name") );
|
|
||||||
out.write("</td><td>Filesystem hbase is running on</td></tr>\n<tr><td>Hbase Root Directory</td><td>");
|
|
||||||
out.print( master.getRootDir().toString() );
|
|
||||||
out.write("</td><td>Location of hbase home directory</td></tr>\n</table>\n\n<h2>Online META Regions</h2>\n");
|
|
||||||
if (rootLocation != null) {
|
|
||||||
out.write("\n<table>\n<tr><th>Name</th><th>Server</th></tr>\n<tr><td>");
|
|
||||||
out.print( HConstants.ROOT_TABLE_NAME.toString() );
|
|
||||||
out.write("</td><td>");
|
|
||||||
out.print( rootLocation.toString() );
|
|
||||||
out.write("</td></tr>\n");
|
|
||||||
|
|
||||||
if (onlineRegions != null && onlineRegions.size() > 0) {
|
|
||||||
out.write('\n');
|
|
||||||
out.write(' ');
|
|
||||||
out.write(' ');
|
|
||||||
for (Map.Entry<Text, MetaRegion> e: onlineRegions.entrySet()) {
|
|
||||||
MetaRegion meta = e.getValue();
|
|
||||||
|
|
||||||
out.write("\n <tr><td>");
|
|
||||||
out.print( meta.getRegionName().toString() );
|
|
||||||
out.write("</td><td>");
|
|
||||||
out.print( meta.getServer().toString() );
|
|
||||||
out.write("</td></tr>\n ");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
out.write("\n</table>\n");
|
|
||||||
}
|
|
||||||
out.write("\n\n<h2>Tables</h2>\n");
|
|
||||||
ReturnMsg msg = show.execute(conf);
|
|
||||||
out.write("\n<p>");
|
|
||||||
out.print(msg );
|
|
||||||
out.write("</p>\n\n<h2>Region Servers</h2>\n");
|
|
||||||
if (serverToServerInfos != null && serverToServerInfos.size() > 0) {
|
|
||||||
out.write('\n');
|
|
||||||
int totalRegions = 0;
|
|
||||||
int totalRequests = 0;
|
|
||||||
|
|
||||||
out.write("\n\n<table>\n<tr><th rowspan=");
|
|
||||||
out.print( serverToServerInfos.size() + 1);
|
|
||||||
out.write("></th><th>Address</th><th>Start Code</th><th>Load</th></tr>\n\n");
|
|
||||||
for (Map.Entry<String, HServerInfo> e: serverToServerInfos.entrySet()) {
|
|
||||||
HServerInfo hsi = e.getValue();
|
|
||||||
String url = "http://" +
|
|
||||||
hsi.getServerAddress().getBindAddress().toString() + ":" +
|
|
||||||
hsi.getInfoPort() + "/";
|
|
||||||
String load = hsi.getLoad().toString();
|
|
||||||
totalRegions += hsi.getLoad().getNumberOfRegions();
|
|
||||||
totalRequests += hsi.getLoad().getNumberOfRequests();
|
|
||||||
long startCode = hsi.getStartCode();
|
|
||||||
String address = hsi.getServerAddress().toString();
|
|
||||||
|
|
||||||
out.write("\n<tr><td><a href=\"");
|
|
||||||
out.print( url );
|
|
||||||
out.write('"');
|
|
||||||
out.write('>');
|
|
||||||
out.print( address );
|
|
||||||
out.write("</a></td><td>");
|
|
||||||
out.print( startCode );
|
|
||||||
out.write("</td><td>");
|
|
||||||
out.print( load );
|
|
||||||
out.write("</td></tr>\n");
|
|
||||||
}
|
|
||||||
out.write("\n<tr><th>Total: </th><td>servers: ");
|
|
||||||
out.print( serverToServerInfos.size() );
|
|
||||||
out.write("</td><td> </td><td>requests: ");
|
|
||||||
out.print( totalRequests );
|
|
||||||
out.write(" regions: ");
|
|
||||||
out.print( totalRegions );
|
|
||||||
out.write("</td></tr>\n</table>\n\n<p>Load is requests per <em>hbase.regionsserver.msginterval</em> (");
|
|
||||||
out.print(interval);
|
|
||||||
out.write(" second(s)) and count of regions loaded</p>\n");
|
|
||||||
}
|
|
||||||
out.write("\n</body>\n</html>\n");
|
|
||||||
} catch (Throwable t) {
|
|
||||||
if (!(t instanceof SkipPageException)){
|
|
||||||
out = _jspx_out;
|
|
||||||
if (out != null && out.getBufferSize() != 0)
|
|
||||||
out.clearBuffer();
|
|
||||||
if (_jspx_page_context != null) _jspx_page_context.handlePageException(t);
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
if (_jspxFactory != null) _jspxFactory.releasePageContext(_jspx_page_context);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,100 +0,0 @@
|
||||||
package org.apache.hadoop.hbase.generated.regionserver;
|
|
||||||
|
|
||||||
import javax.servlet.*;
|
|
||||||
import javax.servlet.http.*;
|
|
||||||
import javax.servlet.jsp.*;
|
|
||||||
import java.util.*;
|
|
||||||
import org.apache.hadoop.io.Text;
|
|
||||||
import org.apache.hadoop.util.VersionInfo;
|
|
||||||
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
|
||||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
|
||||||
import org.apache.hadoop.hbase.HServerInfo;
|
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
|
||||||
|
|
||||||
public final class regionserver_jsp extends org.apache.jasper.runtime.HttpJspBase
|
|
||||||
implements org.apache.jasper.runtime.JspSourceDependent {
|
|
||||||
|
|
||||||
private static java.util.Vector _jspx_dependants;
|
|
||||||
|
|
||||||
public java.util.List getDependants() {
|
|
||||||
return _jspx_dependants;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void _jspService(HttpServletRequest request, HttpServletResponse response)
|
|
||||||
throws java.io.IOException, ServletException {
|
|
||||||
|
|
||||||
JspFactory _jspxFactory = null;
|
|
||||||
PageContext pageContext = null;
|
|
||||||
HttpSession session = null;
|
|
||||||
ServletContext application = null;
|
|
||||||
ServletConfig config = null;
|
|
||||||
JspWriter out = null;
|
|
||||||
Object page = this;
|
|
||||||
JspWriter _jspx_out = null;
|
|
||||||
PageContext _jspx_page_context = null;
|
|
||||||
|
|
||||||
|
|
||||||
try {
|
|
||||||
_jspxFactory = JspFactory.getDefaultFactory();
|
|
||||||
response.setContentType("text/html;charset=UTF-8");
|
|
||||||
pageContext = _jspxFactory.getPageContext(this, request, response,
|
|
||||||
null, true, 8192, true);
|
|
||||||
_jspx_page_context = pageContext;
|
|
||||||
application = pageContext.getServletContext();
|
|
||||||
config = pageContext.getServletConfig();
|
|
||||||
session = pageContext.getSession();
|
|
||||||
out = pageContext.getOut();
|
|
||||||
_jspx_out = out;
|
|
||||||
|
|
||||||
|
|
||||||
HRegionServer regionServer = (HRegionServer)getServletContext().getAttribute(HRegionServer.REGIONSERVER);
|
|
||||||
HServerInfo serverInfo = regionServer.getServerInfo();
|
|
||||||
SortedMap<Text, HRegion> onlineRegions = regionServer.getOnlineRegions();
|
|
||||||
|
|
||||||
out.write("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \n \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> \n<html xmlns=\"http://www.w3.org/1999/xhtml\">\n<head><meta http-equiv=\"Content-Type\" content=\"text/html;charset=UTF-8\"/>\n<title>Hbase Region Server: ");
|
|
||||||
out.print( serverInfo.getServerAddress().toString() );
|
|
||||||
out.write("</title>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/static/hbase.css\" />\n</head>\n\n<body>\n<a id=\"logo\" href=\"http://wiki.apache.org/lucene-hadoop/Hbase\"><img src=\"/static/hbase_logo_med.gif\" alt=\"Hbase Logo\" title=\"Hbase Logo\" /></a>\n<h1 id=\"page_title\">Region Server: ");
|
|
||||||
out.print( serverInfo.getServerAddress().toString() );
|
|
||||||
out.write("</h1>\n<p id=\"links_menu\"><a href=\"/logs/\">Local logs</a>, <a href=\"/stacks\">Thread Dump</a>, <a href=\"/logLevel\">Log Level</a></p>\n<hr id=\"head_rule\" />\n\n<h2>Region Server Attributes</h2>\n<table>\n<tr><th>Attribute Name</th><th>Value</th><th>Description</th></tr>\n<tr><td>Version</td><td>");
|
|
||||||
out.print( VersionInfo.getVersion() );
|
|
||||||
out.write(',');
|
|
||||||
out.write(' ');
|
|
||||||
out.write('r');
|
|
||||||
out.print( VersionInfo.getRevision() );
|
|
||||||
out.write("</td><td>Hbase version and svn revision</td></tr>\n<tr><td>Compiled</td><td>");
|
|
||||||
out.print( VersionInfo.getDate() );
|
|
||||||
out.write(',');
|
|
||||||
out.write(' ');
|
|
||||||
out.print( VersionInfo.getUser() );
|
|
||||||
out.write("</td><td>When this version was compiled and by whom</td></tr>\n<tr><td>Load</td><td>");
|
|
||||||
out.print( serverInfo.getLoad().toString() );
|
|
||||||
out.write("</td><td>Requests/<em>hbase.regionserver.msginterval</em> + count of loaded regions</td></tr>\n</table>\n\n<h2>Online Regions</h2>\n");
|
|
||||||
if (onlineRegions != null && onlineRegions.size() > 0) {
|
|
||||||
out.write("\n<table>\n<tr><th>Region Name</th><th>Start Key</th><th>End Key</th></tr>\n");
|
|
||||||
for (HRegion r: onlineRegions.values()) {
|
|
||||||
out.write("\n<tr><td>");
|
|
||||||
out.print( r.getRegionName().toString() );
|
|
||||||
out.write("</td><td>");
|
|
||||||
out.print( r.getStartKey().toString() );
|
|
||||||
out.write("</td><td>");
|
|
||||||
out.print( r.getEndKey().toString() );
|
|
||||||
out.write("</td></tr>\n");
|
|
||||||
}
|
|
||||||
out.write("\n</table>\n<p>Region names are made of the containing table's name, a comma,\nthe start key, a comma, and a randomly generated region id. To illustrate,\nthe region named\n<em>domains,apache.org,5464829424211263407</em> is party to the table \n<em>domains</em>, has an id of <em>5464829424211263407</em> and the first key\nin the region is <em>apache.org</em>. The <em>-ROOT-</em>\nand <em>.META.</em> 'tables' are internal sytem tables.\nThe -ROOT- keeps a list of all regions in the .META. table. The .META. table\nkeeps a list of all regions in the system. The empty key is used to denote\ntable start and table end. A region with an\nempty start key is the first region in a table. If region has both an empty\nstart and an empty end key, its the only region in the table. See\n<a href=\"http://wiki.apache.org/lucene-hadoop/Hbase\">Hbase Home</a> for\nfurther explication.<p>\n");
|
|
||||||
} else {
|
|
||||||
out.write("\n<p>Not serving regions</p>\n");
|
|
||||||
}
|
|
||||||
out.write("\n</body>\n</html>\n");
|
|
||||||
} catch (Throwable t) {
|
|
||||||
if (!(t instanceof SkipPageException)){
|
|
||||||
out = _jspx_out;
|
|
||||||
if (out != null && out.getBufferSize() != 0)
|
|
||||||
out.clearBuffer();
|
|
||||||
if (_jspx_page_context != null) _jspx_page_context.handlePageException(t);
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
if (_jspxFactory != null) _jspxFactory.releasePageContext(_jspx_page_context);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,7 +1,6 @@
|
||||||
<%@ page contentType="text/html;charset=UTF-8"
|
<%@ page contentType="text/html;charset=UTF-8"
|
||||||
import="java.util.*"
|
import="java.util.*"
|
||||||
import="org.apache.hadoop.io.Text"
|
import="org.apache.hadoop.io.Text"
|
||||||
import="org.apache.hadoop.util.VersionInfo"
|
|
||||||
import="org.apache.hadoop.hbase.master.HMaster"
|
import="org.apache.hadoop.hbase.master.HMaster"
|
||||||
import="org.apache.hadoop.hbase.HConstants"
|
import="org.apache.hadoop.hbase.HConstants"
|
||||||
import="org.apache.hadoop.hbase.master.MetaRegion"
|
import="org.apache.hadoop.hbase.master.MetaRegion"
|
||||||
|
@ -28,13 +27,13 @@
|
||||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||||
<head><meta http-equiv="Content-Type" content="text/html;charset=UTF-8"/>
|
<head><meta http-equiv="Content-Type" content="text/html;charset=UTF-8"/>
|
||||||
<title>Hbase Master: <%= master.getMasterAddress()%></title>
|
<title>HBase Master: <%= master.getMasterAddress()%></title>
|
||||||
<link rel="stylesheet" type="text/css" href="/static/hbase.css" />
|
<link rel="stylesheet" type="text/css" href="/static/hbase.css" />
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
|
|
||||||
<a id="logo" href="http://wiki.apache.org/lucene-hadoop/Hbase"><img src="/static/hbase_logo_med.gif" alt="Hbase Logo" title="Hbase Logo" /></a>
|
<a id="logo" href="http://wiki.apache.org/lucene-hadoop/Hbase"><img src="/static/hbase_logo_med.gif" alt="HBase Logo" title="HBase Logo" /></a>
|
||||||
<h1 id="page_title">Master: <%=master.getMasterAddress()%></h1>
|
<h1 id="page_title">Master: <%=master.getMasterAddress()%></h1>
|
||||||
<p id="links_menu"><a href="/hql.jsp">HQL</a>, <a href="/logs/">Local logs</a>, <a href="/stacks">Thread Dump</a>, <a href="/logLevel">Log Level</a></p>
|
<p id="links_menu"><a href="/hql.jsp">HQL</a>, <a href="/logs/">Local logs</a>, <a href="/stacks">Thread Dump</a>, <a href="/logLevel">Log Level</a></p>
|
||||||
<hr id="head_rule" />
|
<hr id="head_rule" />
|
||||||
|
@ -42,10 +41,12 @@
|
||||||
<h2>Master Attributes</h2>
|
<h2>Master Attributes</h2>
|
||||||
<table>
|
<table>
|
||||||
<tr><th>Attribute Name</th><th>Value</th><th>Description</th></tr>
|
<tr><th>Attribute Name</th><th>Value</th><th>Description</th></tr>
|
||||||
<tr><td>Version</td><td><%= VersionInfo.getVersion() %>, r<%= VersionInfo.getRevision() %></td><td>Hbase version and svn revision</td></tr>
|
<tr><td>HBase Version</td><td><%= org.apache.hadoop.hbase.util.VersionInfo.getVersion() %>, r<%= org.apache.hadoop.hbase.util.VersionInfo.getRevision() %></td><td>HBase version and svn revision</td></tr>
|
||||||
<tr><td>Compiled</td><td><%= VersionInfo.getDate() %>, <%= VersionInfo.getUser() %></td><td>When this version was compiled and by whom</td></tr>
|
<tr><td>HBase Compiled</td><td><%= org.apache.hadoop.hbase.util.VersionInfo.getDate() %>, <%= org.apache.hadoop.hbase.util.VersionInfo.getUser() %></td><td>When HBase version was compiled and by whom</td></tr>
|
||||||
<tr><td>Filesystem</td><td><%= conf.get("fs.default.name") %></td><td>Filesystem hbase is running on</td></tr>
|
<tr><td>Hadoop Version</td><td><%= org.apache.hadoop.util.VersionInfo.getVersion() %>, r<%= org.apache.hadoop.util.VersionInfo.getRevision() %></td><td>Hadoop version and svn revision</td></tr>
|
||||||
<tr><td>Hbase Root Directory</td><td><%= master.getRootDir().toString() %></td><td>Location of hbase home directory</td></tr>
|
<tr><td>Hadoop Compiled</td><td><%= org.apache.hadoop.util.VersionInfo.getDate() %>, <%= org.apache.hadoop.util.VersionInfo.getUser() %></td><td>When Hadoop version was compiled and by whom</td></tr>
|
||||||
|
<tr><td>Filesystem</td><td><%= conf.get("fs.default.name") %></td><td>Filesystem HBase is running on</td></tr>
|
||||||
|
<tr><td>HBase Root Directory</td><td><%= master.getRootDir().toString() %></td><td>Location of HBase home directory</td></tr>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
<h2>Online META Regions</h2>
|
<h2>Online META Regions</h2>
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
<%@ page contentType="text/html;charset=UTF-8"
|
<%@ page contentType="text/html;charset=UTF-8"
|
||||||
import="java.util.*"
|
import="java.util.*"
|
||||||
import="org.apache.hadoop.io.Text"
|
import="org.apache.hadoop.io.Text"
|
||||||
import="org.apache.hadoop.util.VersionInfo"
|
|
||||||
import="org.apache.hadoop.hbase.regionserver.HRegionServer"
|
import="org.apache.hadoop.hbase.regionserver.HRegionServer"
|
||||||
import="org.apache.hadoop.hbase.regionserver.HRegion"
|
import="org.apache.hadoop.hbase.regionserver.HRegion"
|
||||||
import="org.apache.hadoop.hbase.HConstants"
|
import="org.apache.hadoop.hbase.HConstants"
|
||||||
|
@ -15,12 +14,12 @@
|
||||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||||
<head><meta http-equiv="Content-Type" content="text/html;charset=UTF-8"/>
|
<head><meta http-equiv="Content-Type" content="text/html;charset=UTF-8"/>
|
||||||
<title>Hbase Region Server: <%= serverInfo.getServerAddress().toString() %></title>
|
<title>HBase Region Server: <%= serverInfo.getServerAddress().toString() %></title>
|
||||||
<link rel="stylesheet" type="text/css" href="/static/hbase.css" />
|
<link rel="stylesheet" type="text/css" href="/static/hbase.css" />
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
<a id="logo" href="http://wiki.apache.org/lucene-hadoop/Hbase"><img src="/static/hbase_logo_med.gif" alt="Hbase Logo" title="Hbase Logo" /></a>
|
<a id="logo" href="http://wiki.apache.org/lucene-hadoop/Hbase"><img src="/static/hbase_logo_med.gif" alt="HBase Logo" title="HBase Logo" /></a>
|
||||||
<h1 id="page_title">Region Server: <%= serverInfo.getServerAddress().toString() %></h1>
|
<h1 id="page_title">Region Server: <%= serverInfo.getServerAddress().toString() %></h1>
|
||||||
<p id="links_menu"><a href="/logs/">Local logs</a>, <a href="/stacks">Thread Dump</a>, <a href="/logLevel">Log Level</a></p>
|
<p id="links_menu"><a href="/logs/">Local logs</a>, <a href="/stacks">Thread Dump</a>, <a href="/logLevel">Log Level</a></p>
|
||||||
<hr id="head_rule" />
|
<hr id="head_rule" />
|
||||||
|
@ -28,8 +27,8 @@
|
||||||
<h2>Region Server Attributes</h2>
|
<h2>Region Server Attributes</h2>
|
||||||
<table>
|
<table>
|
||||||
<tr><th>Attribute Name</th><th>Value</th><th>Description</th></tr>
|
<tr><th>Attribute Name</th><th>Value</th><th>Description</th></tr>
|
||||||
<tr><td>Version</td><td><%= VersionInfo.getVersion() %>, r<%= VersionInfo.getRevision() %></td><td>Hbase version and svn revision</td></tr>
|
<tr><td>HBase Version</td><td><%= org.apache.hadoop.hbase.util.VersionInfo.getVersion() %>, r<%= org.apache.hadoop.hbase.util.VersionInfo.getRevision() %></td><td>HBase version and svn revision</td></tr>
|
||||||
<tr><td>Compiled</td><td><%= VersionInfo.getDate() %>, <%= VersionInfo.getUser() %></td><td>When this version was compiled and by whom</td></tr>
|
<tr><td>HBase Compiled</td><td><%= org.apache.hadoop.hbase.util.VersionInfo.getDate() %>, <%= org.apache.hadoop.hbase.util.VersionInfo.getUser() %></td><td>When HBase version was compiled and by whom</td></tr>
|
||||||
<tr><td>Load</td><td><%= serverInfo.getLoad().toString() %></td><td>Requests/<em>hbase.regionserver.msginterval</em> + count of loaded regions</td></tr>
|
<tr><td>Load</td><td><%= serverInfo.getLoad().toString() %></td><td>Requests/<em>hbase.regionserver.msginterval</em> + count of loaded regions</td></tr>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
|
@ -53,7 +52,7 @@ keeps a list of all regions in the system. The empty key is used to denote
|
||||||
table start and table end. A region with an
|
table start and table end. A region with an
|
||||||
empty start key is the first region in a table. If region has both an empty
|
empty start key is the first region in a table. If region has both an empty
|
||||||
start and an empty end key, its the only region in the table. See
|
start and an empty end key, its the only region in the table. See
|
||||||
<a href="http://wiki.apache.org/lucene-hadoop/Hbase">Hbase Home</a> for
|
<a href="http://wiki.apache.org/lucene-hadoop/Hbase">HBase Home</a> for
|
||||||
further explication.<p>
|
further explication.<p>
|
||||||
<% } else { %>
|
<% } else { %>
|
||||||
<p>Not serving regions</p>
|
<p>Not serving regions</p>
|
||||||
|
|
Loading…
Reference in New Issue