HADOOP-1957 Web UI with report on cluster state and basic browsing of tables

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@582443 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2007-10-06 03:49:43 +00:00
parent 6c9ddf9f37
commit eb22494fb4
20 changed files with 1303 additions and 0 deletions

80
build-webapps.xml Normal file
View File

@ -0,0 +1,80 @@
<?xml version="1.0"?>
<!--
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-->
<!--
This is a script to compile the jsp for the hbase webapps. Currently,
generation of java classes from jsp is done manually and the produced
java classes are then checked in. We do it this way keeping all to do
with jsp in a separate build file because trying to build jsp inline
we trip over the 'famous' commons-logging classloader problem:
org.apache.commons.logging.LogConfigurationException: Invalid
class loader hierarchy. You have more than one version of
'org.apache.commons.logging.Log' visible, which is not allowed.
See http://www.qos.ch/logging/classloader.jsp. Its addressed
with later versions of jasper apparently (Using later versions of
jasper in this hbase subproject is not sufficent), so when hadoop
goes to jetty6 (HADOOP-1650), we should be able to integrate jsp compiling into
general compile (See http://issues.apache.org/bugzilla/show_bug.cgi?id=36968)
Meantime, if changes in jsps, just checkin the product of this script:
the generated java classes and the web.xml. To run, do following:
$ ant -f build-webapps.xml
-->
<project name="build.hbase.jsp" default="jspc">
<property name="lib.dir" value="${basedir}/lib" />
<property name="hadoop.root" location="${basedir}/../../../"/>
<property name="src.webapps" value="${basedir}/src/webapps" />
<property name="generated.webapps.src"
value="${basedir}/src/java"/>
<target name="jspc" >
<path id="jspc.classpath">
<fileset dir="${lib.dir}">
<include name="commons-el*jar" />
</fileset>
<fileset dir="${hadoop.root}/lib/jetty-ext/">
<include name="*jar" />
</fileset>
<fileset dir="${hadoop.root}/lib/">
<include name="servlet-api*jar" />
<include name="commons-logging*jar" />
</fileset>
</path>
<taskdef classname="org.apache.jasper.JspC" name="jspcompiler" >
<classpath refid="jspc.classpath"/>
</taskdef>
<jspcompiler
uriroot="${src.webapps}/master"
outputdir="${generated.webapps.src}"
package="org.apache.hadoop.hbase.generated.master"
webxml="${src.webapps}/master/WEB-INF/web.xml">
</jspcompiler>
<jspcompiler
uriroot="${src.webapps}/regionserver"
outputdir="${generated.webapps.src}"
package="org.apache.hadoop.hbase.generated.regionserver"
webxml="${src.webapps}/regionserver/WEB-INF/web.xml">
</jspcompiler>
</target>
</project>

View File

@ -0,0 +1,93 @@
package org.apache.hadoop.hbase.generated.master;
import javax.servlet.*;
import javax.servlet.http.*;
import javax.servlet.jsp.*;
import java.util.*;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.shell.TableFormatter;
import org.apache.hadoop.hbase.shell.ReturnMsg;
import org.apache.hadoop.hbase.shell.generated.Parser;
import org.apache.hadoop.hbase.shell.Command;
import org.apache.hadoop.hbase.shell.formatter.HtmlTableFormatter;
public final class hql_jsp extends org.apache.jasper.runtime.HttpJspBase
implements org.apache.jasper.runtime.JspSourceDependent {
private static java.util.Vector _jspx_dependants;
public java.util.List getDependants() {
return _jspx_dependants;
}
public void _jspService(HttpServletRequest request, HttpServletResponse response)
throws java.io.IOException, ServletException {
JspFactory _jspxFactory = null;
PageContext pageContext = null;
HttpSession session = null;
ServletContext application = null;
ServletConfig config = null;
JspWriter out = null;
Object page = this;
JspWriter _jspx_out = null;
PageContext _jspx_page_context = null;
try {
_jspxFactory = JspFactory.getDefaultFactory();
response.setContentType("text/html;charset=UTF-8");
pageContext = _jspxFactory.getPageContext(this, request, response,
null, true, 8192, true);
_jspx_page_context = pageContext;
application = pageContext.getServletContext();
config = pageContext.getServletConfig();
session = pageContext.getSession();
out = pageContext.getOut();
_jspx_out = out;
out.write("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \n \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> \n<html xmlns=\"http://www.w3.org/1999/xhtml\">\n<head><meta http-equiv=\"Content-Type\" content=\"text/html;charset=UTF-8\"/>\n<title>HQL</title>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/static/hbase.css\" />\n</head>\n\n<body>\n<h1><a href=\"http://wiki.apache.org/lucene-hadoop/Hbase/HbaseShell\">HQL</a></h1>\n<p><a href=\"/master.jsp\">Home</a></p>\n");
String query = request.getParameter("query");
if (query == null) {
query = "";
}
out.write("\n<form action=\"/hql.jsp\" method=\"post\">\n <p>\n <label for=\"query\">Query: </label>\n <input type=\"text\" name=\"query\" id=\"query\" size=\"40\" value=\"");
out.print( query );
out.write("\" />\n <input type=\"submit\" value=\"submit\" />\n </p>\n </form>\n <p>Enter 'help;' -- thats 'help' plus a semi-colon -- for a list of <em>HQL</em> commands.\n Data Definition, SHELL, INSERTS, DELETES, and UPDATE commands are disabled in this interface\n </p>\n \n ");
if (query.length() > 0) {
out.write("\n <hr/>\n ");
Parser parser = new Parser(query, out, new HtmlTableFormatter(out));
Command cmd = parser.terminatedCommand();
if (cmd.getCommandType() != Command.CommandType.SELECT) {
out.write("\n <p>");
out.print( cmd.getCommandType() );
out.write("-type commands are disabled in this interface.</p>\n ");
} else {
ReturnMsg rm = cmd.execute(new HBaseConfiguration());
String summary = rm == null? "": rm.toString();
out.write("\n <p>");
out.print( summary );
out.write("</p>\n ");
}
}
out.write("\n</body>\n</html>");
} catch (Throwable t) {
if (!(t instanceof SkipPageException)){
out = _jspx_out;
if (out != null && out.getBufferSize() != 0)
out.clearBuffer();
if (_jspx_page_context != null) _jspx_page_context.handlePageException(t);
}
} finally {
if (_jspxFactory != null) _jspxFactory.releasePageContext(_jspx_page_context);
}
}
}

View File

@ -0,0 +1,140 @@
package org.apache.hadoop.hbase.generated.master;
import javax.servlet.*;
import javax.servlet.http.*;
import javax.servlet.jsp.*;
import java.util.*;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.HMaster;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HMaster.MetaRegion;
import org.apache.hadoop.hbase.HBaseAdmin;
import org.apache.hadoop.hbase.HServerInfo;
import org.apache.hadoop.hbase.HServerAddress;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.shell.ShowCommand;
import org.apache.hadoop.hbase.shell.TableFormatter;
import org.apache.hadoop.hbase.shell.ReturnMsg;
import org.apache.hadoop.hbase.shell.formatter.HtmlTableFormatter;
import org.apache.hadoop.hbase.HTableDescriptor;
public final class master_jsp extends org.apache.jasper.runtime.HttpJspBase
implements org.apache.jasper.runtime.JspSourceDependent {
private static java.util.Vector _jspx_dependants;
public java.util.List getDependants() {
return _jspx_dependants;
}
public void _jspService(HttpServletRequest request, HttpServletResponse response)
throws java.io.IOException, ServletException {
JspFactory _jspxFactory = null;
PageContext pageContext = null;
HttpSession session = null;
ServletContext application = null;
ServletConfig config = null;
JspWriter out = null;
Object page = this;
JspWriter _jspx_out = null;
PageContext _jspx_page_context = null;
try {
_jspxFactory = JspFactory.getDefaultFactory();
response.setContentType("text/html;charset=UTF-8");
pageContext = _jspxFactory.getPageContext(this, request, response,
null, true, 8192, true);
_jspx_page_context = pageContext;
application = pageContext.getServletContext();
config = pageContext.getServletConfig();
session = pageContext.getSession();
out = pageContext.getOut();
_jspx_out = out;
HMaster master = (HMaster)getServletContext().getAttribute(HMaster.MASTER);
HBaseConfiguration conf = new HBaseConfiguration();
TableFormatter formatter = new HtmlTableFormatter(out);
ShowCommand show = new ShowCommand(out, formatter, "tables");
HServerAddress rootLocation = master.getRootRegionLocation();
Map<Text, MetaRegion> onlineRegions = master.getOnlineMetaRegions();
Map<String, HServerInfo> serverToServerInfos =
master.getServersToServerInfo();
out.write("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \n \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> \n<html xmlns=\"http://www.w3.org/1999/xhtml\">\n<head><meta http-equiv=\"Content-Type\" content=\"text/html;charset=UTF-8\"/>\n<title>Hbase Master: ");
out.print( master.getMasterAddress());
out.write("</title>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/static/hbase.css\" />\n</head>\n\n<body>\n<h1><a href=\"http://wiki.apache.org/lucene-hadoop/Hbase\">Hbase</a> Master: ");
out.print(master.getMasterAddress());
out.write("</h1>\n<p><a href=\"/hql.jsp\">HQL</a>,\n<a href=\"/logs/\">Local logs</a>, <a href=\"/stacks\">Thread Dump</a></p>\n\n<h2>Master Attributes</h2>\n<table>\n<tr><th>Attribute Name</th><th>Value</th></tr>\n<tr><td>Filesystem</td><td>");
out.print( conf.get("fs.default.name") );
out.write("</td></tr>\n<tr><td>Hbase Root Directory</td><td>");
out.print( master.getRootDir().toString() );
out.write("</td></tr>\n</table>\n\n<h2>Online META Regions</h2>\n");
if (rootLocation != null) {
out.write("\n<table>\n<tr><th>Name</th><th>Server</th></tr>\n<tr><td>");
out.print( HConstants.ROOT_TABLE_NAME.toString() );
out.write("</td><td>");
out.print( rootLocation.toString() );
out.write("</td></tr>\n");
if (onlineRegions != null && onlineRegions.size() > 0) {
out.write('\n');
out.write(' ');
out.write(' ');
for (Map.Entry<Text, HMaster.MetaRegion> e: onlineRegions.entrySet()) {
MetaRegion meta = e.getValue();
out.write("\n <tr><td>");
out.print( meta.getRegionName().toString() );
out.write("</td><td>");
out.print( meta.getServer().toString() );
out.write("</td></tr>\n ");
}
}
out.write("\n</table>\n");
}
out.write("\n\n<h2>Tables</h2>\n");
ReturnMsg msg = show.execute(conf);
out.write("\n<p>");
out.print(msg );
out.write("</p>\n\n<h2>Region Servers</h2>\n");
if (serverToServerInfos != null && serverToServerInfos.size() > 0) {
out.write("\n<table>\n<tr><th>Address</th><th>Start Code</th><th>Load</th></tr>\n");
for (Map.Entry<String, HServerInfo> e: serverToServerInfos.entrySet()) {
HServerInfo hsi = e.getValue();
String url = "http://" +
hsi.getServerAddress().getBindAddress().toString() + ":" +
hsi.getInfoPort() + "/";
String load = hsi.getLoad().toString();
long startCode = hsi.getStartCode();
String address = hsi.getServerAddress().toString();
out.write("\n<tr><td><a href=\"");
out.print( url );
out.write('"');
out.write('>');
out.print( address );
out.write("</a></td><td>");
out.print( startCode );
out.write("</td><td>");
out.print( load );
out.write("</tr>\n");
}
out.write("\n</table>\n");
}
out.write("\n</body>\n</html>");
} catch (Throwable t) {
if (!(t instanceof SkipPageException)){
out = _jspx_out;
if (out != null && out.getBufferSize() != 0)
out.clearBuffer();
if (_jspx_page_context != null) _jspx_page_context.handlePageException(t);
}
} finally {
if (_jspxFactory != null) _jspxFactory.releasePageContext(_jspx_page_context);
}
}
}

View File

@ -0,0 +1,88 @@
package org.apache.hadoop.hbase.generated.regionserver;
import javax.servlet.*;
import javax.servlet.http.*;
import javax.servlet.jsp.*;
import java.util.*;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.HRegionServer;
import org.apache.hadoop.hbase.HRegion;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HServerInfo;
import org.apache.hadoop.hbase.HRegionInfo;
public final class regionserver_jsp extends org.apache.jasper.runtime.HttpJspBase
implements org.apache.jasper.runtime.JspSourceDependent {
private static java.util.Vector _jspx_dependants;
public java.util.List getDependants() {
return _jspx_dependants;
}
public void _jspService(HttpServletRequest request, HttpServletResponse response)
throws java.io.IOException, ServletException {
JspFactory _jspxFactory = null;
PageContext pageContext = null;
HttpSession session = null;
ServletContext application = null;
ServletConfig config = null;
JspWriter out = null;
Object page = this;
JspWriter _jspx_out = null;
PageContext _jspx_page_context = null;
try {
_jspxFactory = JspFactory.getDefaultFactory();
response.setContentType("text/html;charset=UTF-8");
pageContext = _jspxFactory.getPageContext(this, request, response,
null, true, 8192, true);
_jspx_page_context = pageContext;
application = pageContext.getServletContext();
config = pageContext.getServletConfig();
session = pageContext.getSession();
out = pageContext.getOut();
_jspx_out = out;
HRegionServer regionServer = (HRegionServer)getServletContext().getAttribute(HRegionServer.REGIONSERVER);
HServerInfo serverInfo = regionServer.getServerInfo();
SortedMap<Text, HRegion> onlineRegions = regionServer.getOnlineRegions();
out.write("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \n \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> \n<html xmlns=\"http://www.w3.org/1999/xhtml\">\n<head><meta http-equiv=\"Content-Type\" content=\"text/html;charset=UTF-8\"/>\n<title>Hbase Region Server: ");
out.print( serverInfo.getServerAddress().toString() );
out.write("</title>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/static/hbase.css\" />\n</head>\n\n<body>\n<h1><a href=\"http://wiki.apache.org/lucene-hadoop/Hbase\">Hbase</a> Region Server: ");
out.print( serverInfo.getServerAddress().toString() );
out.write("</h1>\n<p><a href=\"/logs/\">Local logs</a>, <a href=\"/stacks\">Thread Dump</a></p>\n\n<h2>Region Server Attributes</h2>\n<table>\n<tr><th>Attribute Name</th><th>Value</th></tr>\n<tr><td>Load</td><td>");
out.print( serverInfo.getLoad().toString() );
out.write("</td></tr>\n</table>\n\n<h2>Online Regions</h2>\n");
if (onlineRegions != null && onlineRegions.size() > 0) {
out.write("\n<table>\n<tr><th>Region Name</th><th>Start Key</th><th>End Key</th></tr>\n");
for (HRegion r: onlineRegions.values()) {
out.write("\n<tr><td>");
out.print( r.getRegionName().toString() );
out.write("</td><td>");
out.print( r.getStartKey().toString() );
out.write("</td><td>");
out.print( r.getEndKey().toString() );
out.write("</td></tr>\n");
}
out.write("\n</table>\n<p>Region names are made of the containing table's name, a comma,\nthe start key, a comma, and a randomly generated region id. To illustrate,\nthe region named\n<em>domains,apache.org,5464829424211263407</em> is party to the table \n<em>domains</em>, has an id of <em>5464829424211263407</em> and the first key\nin the region is <em>apache.org</em>. The <em>-ROOT-</em>\nand <em>.META.</em> 'tables' are internal sytem tables.\nThe -ROOT- keeps a list of all regions in the .META. table. The .META. table\nkeeps a list of all regions in the system. The empty key is used to denote\ntable start and table end. A region with an\nempty start key is the first region in a table. If region has both an empty\nstart and an empty end key, its the only region in the table. See\n<a href=\"http://wiki.apache.org/lucene-hadoop/Hbase\">Hbase Home</a> for\nfurther explication.<p>\n");
} else {
out.write("\n<p>Not serving regions</p>\n");
}
out.write("\n</body>\n</html>");
} catch (Throwable t) {
if (!(t instanceof SkipPageException)){
out = _jspx_out;
if (out != null && out.getBufferSize() != 0)
out.clearBuffer();
if (_jspx_page_context != null) _jspx_page_context.handlePageException(t);
}
} finally {
if (_jspxFactory != null) _jspxFactory.releasePageContext(_jspx_page_context);
}
}
}

View File

@ -0,0 +1,151 @@
package org.apache.hadoop.hbase.shell.formatter;
import java.io.IOException;
import java.io.Writer;
import org.apache.hadoop.hbase.shell.TableFormatter;
/**
* Formatter that outputs data inside an ASCII table.
* If only a single cell result, then no formatting is done. Presumption is
* that client manages serial access outputting tables. Does not close passed
* {@link Writer}.
*/
public class AsciiTableFormatter implements TableFormatter {
private static final String COLUMN_DELIMITER = "| ";
private static final String COLUMN_CLOSER = "|";
private static final int DEFAULT_COLUMN_WIDTH = 26;
// Width is a line of content + delimiter
private int columnWidth = DEFAULT_COLUMN_WIDTH;
// Amount of width to use for a line of content.
private int columnContentWidth =
DEFAULT_COLUMN_WIDTH - COLUMN_DELIMITER.length();
// COLUMN_LINE is put at head and foot of a column and per column, is drawn
// as row delimiter
private String columnHorizLine;
private final String COLUMN_HORIZ_LINE_CLOSER = "+";
// Used padding content to fill column
private final String PADDING_CHAR = " ";
// True if we are to output no formatting.
private boolean noFormatting = false;
private final Writer out;
private final String LINE_SEPARATOR = System.getProperty("line.separator");
// Not instantiable
@SuppressWarnings("unused")
private AsciiTableFormatter() {
this(null);
}
public AsciiTableFormatter(final Writer o) {
this.out = o;
}
public Writer getOut() {
return this.out;
}
/**
* @param titles List of titles. Pass null if no formatting (i.e.
* no header, no footer, etc.
* @throws IOException
*/
public void header(String[] titles) throws IOException {
if (titles == null) {
// print nothing.
setNoFormatting(true);
return;
}
// Calculate width of columns.
this.columnWidth = titles.length == 1? 3 * DEFAULT_COLUMN_WIDTH:
titles.length == 2? 39: DEFAULT_COLUMN_WIDTH;
this.columnContentWidth = this.columnWidth - COLUMN_DELIMITER.length();
// Create the horizontal line to draw across the top of each column.
this.columnHorizLine = calculateColumnHorizLine(this.columnWidth);
// Print out a column topper per column.
printRowDelimiter(titles.length);
row(titles);
}
public void row(String [] cells) throws IOException {
if (isNoFormatting()) {
getOut().write(cells[0]);
getOut().flush();
return;
}
// Ok. Output cells a line at a time w/ delimiters between cells.
int [] indexes = new int[cells.length];
for (int i = 0; i < indexes.length; i++) {
indexes[i] = 0;
}
int allFinished = 0;
while (allFinished < indexes.length) {
StringBuffer sb = new StringBuffer();
for (int i = 0; i < cells.length; i++) {
sb.append(COLUMN_DELIMITER);
int offset = indexes[i];
if (offset + this.columnContentWidth >= cells[i].length()) {
String substr = cells[i].substring(offset);
if (substr.length() > 0) {
// This column is finished
allFinished++;
sb.append(substr);
}
for (int j = 0; j < this.columnContentWidth - substr.length(); j++) {
sb.append(PADDING_CHAR);
}
indexes[i] = cells[i].length();
} else {
String substr = cells[i].substring(indexes[i],
indexes[i] + this.columnContentWidth);
indexes[i] += this.columnContentWidth;
sb.append(substr);
}
}
sb.append(COLUMN_CLOSER);
getOut().write(sb.toString());
getOut().write(LINE_SEPARATOR);
getOut().flush();
}
printRowDelimiter(cells.length);
}
public void footer() throws IOException {
if (isNoFormatting()) {
// If no formatting, output a newline to delimit cell and the
// result summary output at end of every command.
getOut().write(LINE_SEPARATOR);
getOut().flush();
}
// We're done. Clear flag.
setNoFormatting(false);
}
private void printRowDelimiter(final int columnCount) throws IOException {
for (int i = 0; i < columnCount; i++) {
getOut().write(this.columnHorizLine);
}
getOut().write(COLUMN_HORIZ_LINE_CLOSER);
getOut().write(LINE_SEPARATOR);
getOut().flush();
}
private String calculateColumnHorizLine(final int width) {
StringBuffer sb = new StringBuffer();
sb.append("+");
for (int i = 1; i < width; i++) {
sb.append("-");
}
return sb.toString();
}
public boolean isNoFormatting() {
return this.noFormatting;
}
public void setNoFormatting(boolean noFormatting) {
this.noFormatting = noFormatting;
}
}

View File

@ -0,0 +1,111 @@
package org.apache.hadoop.hbase.shell.formatter;
import java.io.IOException;
import java.io.Writer;
import org.apache.hadoop.hbase.shell.TableFormatter;
import org.znerd.xmlenc.LineBreak;
import org.znerd.xmlenc.XMLOutputter;
/**
* Formatter that outputs data inside an HTML table.
* If only a single cell result, then no formatting is done. Presumption is
* that client manages serial access outputting tables. Does not close passed
* {@link Writer}.
* <p>TODO: Uses xmlenc. Hopefully it flushes every so often (Claims its a
* stream-based outputter). Verify.
* <p>For now, invoke it this way (until shell starts to take cmdline params);
* <code>$ HBASE_OPTS='-Dhbaseshell.formatter=org.apache.hadoop.hbase.shell.TableFormatterFactory$HtmlTableFormatter' ./bin/hbase shell</code>
*/
public class HtmlTableFormatter implements TableFormatter {
private final XMLOutputter outputter;
private boolean noFormatting = false;
private final Writer out;
// Uninstantiable
@SuppressWarnings("unused")
private HtmlTableFormatter() {
this(null);
}
public HtmlTableFormatter(final Writer o) {
this.out = o;
try {
// Looking at the xmlenc source, there should be no issue w/ wrapping
// the stream -- i.e. no hanging resources.
this.outputter = new XMLOutputter(this.out, "UTF-8");
String os = System.getProperty("os.name").toLowerCase();
// Shell likes the DOS output.
this.outputter.setLineBreak(os.contains("windows")?
LineBreak.DOS: LineBreak.UNIX);
this.outputter.setIndentation(" ");
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* @param titles List of titles. Pass null if no formatting (i.e.
* no header, no footer, etc.
* @throws IOException
*/
public void header(String[] titles) throws IOException {
if (titles == null) {
// print nothing.
setNoFormatting(true);
return;
}
// Can't add a 'border=1' attribute because its included on the end in
this.outputter.startTag("table");
this.outputter.startTag("tr");
for (int i = 0; i < titles.length; i++) {
this.outputter.startTag("th");
this.outputter.pcdata(titles[i]);
this.outputter.endTag();
}
this.outputter.endTag();
}
public void row(String [] cells) throws IOException{
if (isNoFormatting()) {
this.outputter.pcdata(cells[0]);
return;
}
this.outputter.startTag("tr");
for (int i = 0; i < cells.length; i++) {
this.outputter.startTag("td");
this.outputter.pcdata(cells[i]);
this.outputter.endTag();
}
this.outputter.endTag();
}
public void footer() throws IOException {
if (!isNoFormatting()) {
// To close the table
this.outputter.endTag();
this.outputter.endDocument();
}
// We're done. Clear flag.
this.setNoFormatting(false);
// If no formatting, output a newline to delimit cell and the
// result summary output at end of every command. If html, also emit a
// newline to delimit html and summary line.
getOut().write(System.getProperty("line.separator"));
getOut().flush();
}
public Writer getOut() {
return this.out;
}
public boolean isNoFormatting() {
return this.noFormatting;
}
public void setNoFormatting(boolean noFormatting) {
this.noFormatting = noFormatting;
}
}

View File

@ -0,0 +1,229 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.util;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URL;
import javax.servlet.http.HttpServlet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapred.StatusHttpServer;
import org.mortbay.http.HttpContext;
import org.mortbay.http.SocketListener;
import org.mortbay.http.handler.ResourceHandler;
import org.mortbay.jetty.servlet.WebApplicationContext;
/**
* Create a Jetty embedded server to answer http requests. The primary goal
* is to serve up status information for the server.
* There are three contexts:
* "/stacks/" -> points to stack trace
* "/static/" -> points to common static files (src/webapps/static)
* "/" -> the jsp server code from (src/webapps/<name>)
*/
public class InfoServer {
// Bulk of this class is copied from
// {@link org.apache.hadoop.mapred.StatusHttpServer}. StatusHttpServer
// is not amenable to subclassing. It keeps webAppContext inaccessible
// and will find webapps only in the jar the class StatusHttpServer was
// loaded from.
private static final Log LOG = LogFactory.getLog(InfoServer.class.getName());
private org.mortbay.jetty.Server webServer;
private SocketListener listener;
private boolean findPort;
private WebApplicationContext webAppContext;
/**
* Create a status server on the given port.
* The jsp scripts are taken from src/webapps/<code>name<code>.
* @param name The name of the server
* @param port The port to use on the server
* @param findPort whether the server should start at the given port and
* increment by 1 until it finds a free port.
*/
public InfoServer(String name, String bindAddress, int port, boolean findPort)
throws IOException {
this.webServer = new org.mortbay.jetty.Server();
this.findPort = findPort;
this.listener = new SocketListener();
this.listener.setPort(port);
this.listener.setHost(bindAddress);
this.webServer.addListener(listener);
// Set up the context for "/static/*"
String appDir = getWebAppsPath();
// Set up the context for "/logs/" if "hadoop.log.dir" property is defined.
String logDir = System.getProperty("hadoop.log.dir");
if (logDir != null) {
HttpContext logContext = new HttpContext();
logContext.setContextPath("/logs/*");
logContext.setResourceBase(logDir);
logContext.addHandler(new ResourceHandler());
webServer.addContext(logContext);
}
HttpContext staticContext = new HttpContext();
staticContext.setContextPath("/static/*");
staticContext.setResourceBase(appDir + "/static");
staticContext.addHandler(new ResourceHandler());
this.webServer.addContext(staticContext);
// set up the context for "/" jsp files
String webappDir = null;
try {
webappDir = getWebAppsPath("webapps" + File.separator + name);
} catch (FileNotFoundException e) {
// Retry. Resource may be inside jar on a windows machine.
webappDir = getWebAppsPath("webapps/" + name);
}
this.webAppContext =
this.webServer.addWebApplication("/", webappDir);
addServlet("stacks", "/stacks", StatusHttpServer.StackServlet.class);
}
/**
* Set a value in the webapp context. These values are available to the jsp
* pages as "application.getAttribute(name)".
* @param name The name of the attribute
* @param value The value of the attribute
*/
public void setAttribute(String name, Object value) {
this.webAppContext.setAttribute(name, value);
}
/**
* Add a servlet in the server.
* @param name The name of the servlet (can be passed as null)
* @param pathSpec The path spec for the servlet
* @param servletClass The servlet class
*/
public <T extends HttpServlet> void addServlet(String name, String pathSpec,
Class<T> servletClass) {
WebApplicationContext context = webAppContext;
try {
if (name == null) {
context.addServlet(pathSpec, servletClass.getName());
} else {
context.addServlet(name, pathSpec, servletClass.getName());
}
} catch (ClassNotFoundException ex) {
throw makeRuntimeException("Problem instantiating class", ex);
} catch (InstantiationException ex) {
throw makeRuntimeException("Problem instantiating class", ex);
} catch (IllegalAccessException ex) {
throw makeRuntimeException("Problem instantiating class", ex);
}
}
private static RuntimeException makeRuntimeException(String msg, Throwable cause) {
RuntimeException result = new RuntimeException(msg);
if (cause != null) {
result.initCause(cause);
}
return result;
}
/**
* Get the value in the webapp context.
* @param name The name of the attribute
* @return The value of the attribute
*/
public Object getAttribute(String name) {
return this.webAppContext.getAttribute(name);
}
/**
* Get the pathname to the <code>webapps</code> files.
* @return the pathname as a URL
*/
private static String getWebAppsPath() throws IOException {
return getWebAppsPath("webapps");
}
/**
* Get the pathname to the <code>patch</code> files.
* @param path Path to find.
* @return the pathname as a URL
*/
private static String getWebAppsPath(final String path) throws IOException {
URL url = InfoServer.class.getClassLoader().getResource(path);
if (url == null)
throw new IOException("webapps not found in CLASSPATH");
return url.toString();
}
/**
* Get the port that the server is on
* @return the port
*/
public int getPort() {
return this.listener.getPort();
}
public void setThreads(int min, int max) {
this.listener.setMinThreads(min);
this.listener.setMaxThreads(max);
}
/**
* Start the server. Does not wait for the server to start.
*/
public void start() throws IOException {
try {
while (true) {
try {
this.webServer.start();
break;
} catch (org.mortbay.util.MultiException ex) {
// look for the multi exception containing a bind exception,
// in that case try the next port number.
boolean needNewPort = false;
for(int i=0; i < ex.size(); ++i) {
Exception sub = ex.getException(i);
if (sub instanceof java.net.BindException) {
needNewPort = true;
break;
}
}
if (!findPort || !needNewPort) {
throw ex;
}
this.listener.setPort(listener.getPort() + 1);
}
}
} catch (IOException ie) {
throw ie;
} catch (Exception e) {
IOException ie = new IOException("Problem starting http server");
ie.initCause(e);
throw ie;
}
}
/**
* stop the server
*/
public void stop() throws InterruptedException {
this.webServer.stop();
}
}

View File

@ -81,6 +81,20 @@
Set to -1 if you do not want the info server to run.
</description>
</property>
<property>
<name>hbase.master.info.port</name>
<value>-1</value>
<description>The port for the hbase master web UI
Set to -1 if you do not want the info server to run.
</description>
</property>
<property>
<name>hbase.regionserver.info.port</name>
<value>-1</value>
<description>The port for the hbase regionserver web UI
Set to -1 if you do not want the info server to run.
</description>
</property>
<property>
<name>hbase.master.lease.thread.wakefrequency</name>
<value>3000</value>

View File

@ -0,0 +1,84 @@
/**
* Copyright 2007 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.net.URL;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.Text;
/**
* Testing, info servers are disabled. This test enables then and checks that
* they serve pages.
*/
public class TestInfoServers extends HBaseTestCase {
static final Log LOG = LogFactory.getLog(TestInfoServers.class);
protected void setUp() throws Exception {
super.setUp();
}
protected void tearDown() throws Exception {
super.tearDown();
}
public void testInfoServersAreUp() throws Exception {
// Bring up info servers on 'odd' port numbers in case the test is not
// sourcing the src/test/hbase-default.xml.
this.conf.setInt("hbase.master.info.port", 60011);
this.conf.setInt("hbase.regionserver.info.port", 60031);
MiniHBaseCluster miniHbase = new MiniHBaseCluster(this.conf, 1);
// Create table so info servers are given time to spin up.
HBaseAdmin a = new HBaseAdmin(conf);
a.createTable(new HTableDescriptor(getName()));
assertTrue(a.tableExists(new Text(getName())));
try {
int port = miniHbase.getMasterThread().getMaster().infoServer.getPort();
assertHasExpectedContent(new URL("http://localhost:" + port +
"/index.html"), "Master");
port = miniHbase.getRegionThreads().get(0).getRegionServer().
infoServer.getPort();
assertHasExpectedContent(new URL("http://localhost:" + port +
"/index.html"), "Region Server");
} finally {
miniHbase.shutdown();
}
}
private void assertHasExpectedContent(final URL u, final String expected)
throws IOException {
LOG.info("Testing " + u.toString() + " has " + expected);
java.net.URLConnection c = u.openConnection();
c.connect();
assertTrue(c.getContentLength() > 0);
StringBuilder sb = new StringBuilder(c.getContentLength());
BufferedInputStream bis = new BufferedInputStream(c.getInputStream());
byte [] bytes = new byte[1024];
for (int read = -1; (read = bis.read(bytes)) != -1;) {
sb.append(new String(bytes, 0, read));
}
bis.close();
String content = sb.toString();
content.matches(expected);
}
}

View File

@ -0,0 +1,52 @@
/**
* Copyright 2007 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import junit.framework.TestCase;
public class TestSerialization extends TestCase {
protected void setUp() throws Exception {
super.setUp();
}
protected void tearDown() throws Exception {
super.tearDown();
}
public void testServerInfo() throws Exception {
HServerInfo hsi = new HServerInfo(new HServerAddress("0.0.0.0:123"), -1,
1245);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dao = new DataOutputStream(baos);
hsi.write(dao);
dao.close();
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
DataInputStream dis = new DataInputStream(bais);
HServerInfo deserializedHsi = new HServerInfo();
deserializedHsi.readFields(dis);
assertTrue(hsi.equals(deserializedHsi));
}
}

View File

@ -0,0 +1,33 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
<!DOCTYPE web-app
PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
"http://java.sun.com/dtd/web-app_2_3.dtd">
<!--
Automatically created by Tomcat JspC.
-->
<web-app>
<servlet>
<servlet-name>org.apache.hadoop.hbase.generated.master.hql_jsp</servlet-name>
<servlet-class>org.apache.hadoop.hbase.generated.master.hql_jsp</servlet-class>
</servlet>
<servlet>
<servlet-name>org.apache.hadoop.hbase.generated.master.master_jsp</servlet-name>
<servlet-class>org.apache.hadoop.hbase.generated.master.master_jsp</servlet-class>
</servlet>
<servlet-mapping>
<servlet-name>org.apache.hadoop.hbase.generated.master.hql_jsp</servlet-name>
<url-pattern>/hql.jsp</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>org.apache.hadoop.hbase.generated.master.master_jsp</servlet-name>
<url-pattern>/master.jsp</url-pattern>
</servlet-mapping>
</web-app>

View File

@ -0,0 +1,57 @@
<%@ page contentType="text/html;charset=UTF-8"
import="java.util.*"
import="org.apache.hadoop.hbase.HBaseConfiguration"
import="org.apache.hadoop.hbase.shell.TableFormatter"
import="org.apache.hadoop.hbase.shell.ReturnMsg"
import="org.apache.hadoop.hbase.shell.generated.Parser"
import="org.apache.hadoop.hbase.shell.Command"
import="org.apache.hadoop.hbase.shell.formatter.HtmlTableFormatter"
%><?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head><meta http-equiv="Content-Type" content="text/html;charset=UTF-8"/>
<title>HQL</title>
<link rel="stylesheet" type="text/css" href="/static/hbase.css" />
</head>
<body>
<h1><a href="http://wiki.apache.org/lucene-hadoop/Hbase/HbaseShell">HQL</a></h1>
<p><a href="/master.jsp">Home</a></p>
<% String query = request.getParameter("query");
if (query == null) {
query = "";
}
%>
<form action="/hql.jsp" method="post">
<p>
<label for="query">Query: </label>
<input type="text" name="query" id="query" size="40" value="<%= query %>" />
<input type="submit" value="submit" />
</p>
</form>
<p>Enter 'help;' -- thats 'help' plus a semi-colon -- for a list of <em>HQL</em> commands.
Data Definition, SHELL, INSERTS, DELETES, and UPDATE commands are disabled in this interface
</p>
<%
if (query.length() > 0) {
%>
<hr/>
<%
Parser parser = new Parser(query, out, new HtmlTableFormatter(out));
Command cmd = parser.terminatedCommand();
if (cmd.getCommandType() != Command.CommandType.SELECT) {
%>
<p><%= cmd.getCommandType() %>-type commands are disabled in this interface.</p>
<%
} else {
ReturnMsg rm = cmd.execute(new HBaseConfiguration());
String summary = rm == null? "": rm.toString();
%>
<p><%= summary %></p>
<% }
}
%>
</body>
</html>

View File

@ -0,0 +1 @@
<meta HTTP-EQUIV="REFRESH" content="0;url=master.jsp"/>

View File

@ -0,0 +1,84 @@
<%@ page contentType="text/html;charset=UTF-8"
import="java.util.*"
import="org.apache.hadoop.io.Text"
import="org.apache.hadoop.hbase.HMaster"
import="org.apache.hadoop.hbase.HConstants"
import="org.apache.hadoop.hbase.HMaster.MetaRegion"
import="org.apache.hadoop.hbase.HBaseAdmin"
import="org.apache.hadoop.hbase.HServerInfo"
import="org.apache.hadoop.hbase.HServerAddress"
import="org.apache.hadoop.hbase.HRegionInfo"
import="org.apache.hadoop.hbase.HBaseConfiguration"
import="org.apache.hadoop.hbase.shell.ShowCommand"
import="org.apache.hadoop.hbase.shell.TableFormatter"
import="org.apache.hadoop.hbase.shell.ReturnMsg"
import="org.apache.hadoop.hbase.shell.formatter.HtmlTableFormatter"
import="org.apache.hadoop.hbase.HTableDescriptor" %><%
HMaster master = (HMaster)getServletContext().getAttribute(HMaster.MASTER);
HBaseConfiguration conf = new HBaseConfiguration();
TableFormatter formatter = new HtmlTableFormatter(out);
ShowCommand show = new ShowCommand(out, formatter, "tables");
HServerAddress rootLocation = master.getRootRegionLocation();
Map<Text, MetaRegion> onlineRegions = master.getOnlineMetaRegions();
Map<String, HServerInfo> serverToServerInfos =
master.getServersToServerInfo();
%><?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head><meta http-equiv="Content-Type" content="text/html;charset=UTF-8"/>
<title>Hbase Master: <%= master.getMasterAddress()%></title>
<link rel="stylesheet" type="text/css" href="/static/hbase.css" />
</head>
<body>
<h1><a href="http://wiki.apache.org/lucene-hadoop/Hbase">Hbase</a> Master: <%=master.getMasterAddress()%></h1>
<p><a href="/hql.jsp">HQL</a>,
<a href="/logs/">Local logs</a>, <a href="/stacks">Thread Dump</a></p>
<h2>Master Attributes</h2>
<table>
<tr><th>Attribute Name</th><th>Value</th></tr>
<tr><td>Filesystem</td><td><%= conf.get("fs.default.name") %></td></tr>
<tr><td>Hbase Root Directory</td><td><%= master.getRootDir().toString() %></td></tr>
</table>
<h2>Online META Regions</h2>
<% if (rootLocation != null) { %>
<table>
<tr><th>Name</th><th>Server</th></tr>
<tr><td><%= HConstants.ROOT_TABLE_NAME.toString() %></td><td><%= rootLocation.toString() %></td></tr>
<%
if (onlineRegions != null && onlineRegions.size() > 0) { %>
<% for (Map.Entry<Text, HMaster.MetaRegion> e: onlineRegions.entrySet()) {
MetaRegion meta = e.getValue();
%>
<tr><td><%= meta.getRegionName().toString() %></td><td><%= meta.getServer().toString() %></td></tr>
<% }
} %>
</table>
<% } %>
<h2>Tables</h2>
<% ReturnMsg msg = show.execute(conf); %>
<p><%=msg %></p>
<h2>Region Servers</h2>
<% if (serverToServerInfos != null && serverToServerInfos.size() > 0) { %>
<table>
<tr><th>Address</th><th>Start Code</th><th>Load</th></tr>
<% for (Map.Entry<String, HServerInfo> e: serverToServerInfos.entrySet()) {
HServerInfo hsi = e.getValue();
String url = "http://" +
hsi.getServerAddress().getBindAddress().toString() + ":" +
hsi.getInfoPort() + "/";
String load = hsi.getLoad().toString();
long startCode = hsi.getStartCode();
String address = hsi.getServerAddress().toString();
%>
<tr><td><a href="<%= url %>"><%= address %></a></td><td><%= startCode %></td><td><%= load %></tr>
<% } %>
</table>
<% } %>
</body>
</html>

View File

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
<!DOCTYPE web-app
PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
"http://java.sun.com/dtd/web-app_2_3.dtd">
<!--
Automatically created by Tomcat JspC.
-->
<web-app>
<servlet>
<servlet-name>org.apache.hadoop.hbase.generated.regionserver.regionserver_jsp</servlet-name>
<servlet-class>org.apache.hadoop.hbase.generated.regionserver.regionserver_jsp</servlet-class>
</servlet>
<servlet-mapping>
<servlet-name>org.apache.hadoop.hbase.generated.regionserver.regionserver_jsp</servlet-name>
<url-pattern>/regionserver.jsp</url-pattern>
</servlet-mapping>
</web-app>

View File

@ -0,0 +1 @@
<meta HTTP-EQUIV="REFRESH" content="0;url=regionserver.jsp"/>

View File

@ -0,0 +1,57 @@
<%@ page contentType="text/html;charset=UTF-8"
import="java.util.*"
import="org.apache.hadoop.io.Text"
import="org.apache.hadoop.hbase.HRegionServer"
import="org.apache.hadoop.hbase.HRegion"
import="org.apache.hadoop.hbase.HConstants"
import="org.apache.hadoop.hbase.HServerInfo"
import="org.apache.hadoop.hbase.HRegionInfo" %><%
HRegionServer regionServer = (HRegionServer)getServletContext().getAttribute(HRegionServer.REGIONSERVER);
HServerInfo serverInfo = regionServer.getServerInfo();
SortedMap<Text, HRegion> onlineRegions = regionServer.getOnlineRegions();
%><?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head><meta http-equiv="Content-Type" content="text/html;charset=UTF-8"/>
<title>Hbase Region Server: <%= serverInfo.getServerAddress().toString() %></title>
<link rel="stylesheet" type="text/css" href="/static/hbase.css" />
</head>
<body>
<h1><a href="http://wiki.apache.org/lucene-hadoop/Hbase">Hbase</a> Region Server: <%= serverInfo.getServerAddress().toString() %></h1>
<p><a href="/logs/">Local logs</a>, <a href="/stacks">Thread Dump</a></p>
<h2>Region Server Attributes</h2>
<table>
<tr><th>Attribute Name</th><th>Value</th></tr>
<tr><td>Load</td><td><%= serverInfo.getLoad().toString() %></td></tr>
</table>
<h2>Online Regions</h2>
<% if (onlineRegions != null && onlineRegions.size() > 0) { %>
<table>
<tr><th>Region Name</th><th>Start Key</th><th>End Key</th></tr>
<% for (HRegion r: onlineRegions.values()) { %>
<tr><td><%= r.getRegionName().toString() %></td><td><%= r.getStartKey().toString() %></td><td><%= r.getEndKey().toString() %></td></tr>
<% } %>
</table>
<p>Region names are made of the containing table's name, a comma,
the start key, a comma, and a randomly generated region id. To illustrate,
the region named
<em>domains,apache.org,5464829424211263407</em> is party to the table
<em>domains</em>, has an id of <em>5464829424211263407</em> and the first key
in the region is <em>apache.org</em>. The <em>-ROOT-</em>
and <em>.META.</em> 'tables' are internal sytem tables.
The -ROOT- keeps a list of all regions in the .META. table. The .META. table
keeps a list of all regions in the system. The empty key is used to denote
table start and table end. A region with an
empty start key is the first region in a table. If region has both an empty
start and an empty end key, its the only region in the table. See
<a href="http://wiki.apache.org/lucene-hadoop/Hbase">Hbase Home</a> for
further explication.<p>
<% } else { %>
<p>Not serving regions</p>
<% } %>
</body>
</html>

View File

@ -0,0 +1,5 @@
h1, h2, h3 { color: DarkSlateBlue }
table { border: thin solid DodgerBlue }
tr { border: thin solid DodgerBlue }
td { border: thin solid DodgerBlue }
th { border: thin solid DodgerBlue }