HADOOP-2650 Remove Writables.clone and use WritableUtils.clone from

hadoop instead
HADOOP-2584 Web UI displays an IOException instead of the Tables



git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@613923 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2008-01-21 16:21:46 +00:00
parent 5ea6853f90
commit 6487762f94
12 changed files with 55 additions and 53 deletions

View File

@ -140,6 +140,9 @@ Trunk (unreleased changes)
HADOOP-2619 Compaction errors after a region splits
HADOOP-2621 Memcache flush flushing every 60 secs with out considering
the max memcache size
HADOOP-2584 Web UI displays an IOException instead of the Tables
HADOOP-2650 Remove Writables.clone and use WritableUtils.clone from
hadoop instead
IMPROVEMENTS
HADOOP-2401 Add convenience put method that takes writable

View File

@ -22,13 +22,12 @@ package org.apache.hadoop.hbase;
import java.io.IOException;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.SortedMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.ipc.RemoteException;

View File

@ -249,12 +249,10 @@ public class HConnectionManager implements HConstants {
do {
try{
// turn the start row into a location
metaLocation =
locateRegion(META_TABLE_NAME, startRow);
metaLocation = locateRegion(META_TABLE_NAME, startRow);
// connect to the server hosting the .META. region
server =
getHRegionConnection(metaLocation.getServerAddress());
server = getHRegionConnection(metaLocation.getServerAddress());
// open a scanner over the meta region
scannerId = server.openScanner(
@ -289,8 +287,9 @@ public class HConnectionManager implements HConstants {
// advance the startRow to the end key of the current region
startRow = metaLocation.getRegionInfo().getEndKey();
} catch (IOException e) {
// need retry logic?
throw e;
// Retry once.
metaLocation = relocateRegion(META_TABLE_NAME, startRow);
continue;
}
finally {
if (scannerId != -1L) {

View File

@ -3182,6 +3182,13 @@ public class HMaster extends Thread implements HConstants, HMasterInterface,
}
}
/**
* @return Return configuration being used by this server.
*/
public HBaseConfiguration getConfiguration() {
return this.conf;
}
/*
* Main program
*/

View File

@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.io.BatchOperation;
import org.apache.hadoop.hbase.io.BatchUpdate;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.util.StringUtils;
/**
@ -1460,15 +1461,15 @@ public class HRegion implements HConstants {
this.log.append(regionInfo.getRegionName(),
regionInfo.getTableDesc().getName(), updatesByColumn);
long memcacheSize = 0;
long size = 0;
for (Map.Entry<HStoreKey, byte[]> e: updatesByColumn.entrySet()) {
HStoreKey key = e.getKey();
byte[] val = e.getValue();
memcacheSize = this.memcacheSize.addAndGet(key.getSize() +
size = this.memcacheSize.addAndGet(key.getSize() +
(val == null ? 0 : val.length));
stores.get(HStoreKey.extractFamily(key.getColumn())).add(key, val);
}
if (this.flushListener != null && memcacheSize > this.memcacheFlushSize) {
if (this.flushListener != null && size > this.memcacheFlushSize) {
// Request a cache flush
this.flushListener.flushRequested(this);
}
@ -1626,7 +1627,7 @@ public class HRegion implements HConstants {
// one shared across many rows. See HADOOP-2467.
scanners[i] = stores[i].getScanner(timestamp, cols, firstRow,
(i > 0 && filter != null)?
(RowFilterInterface)Writables.clone(filter, conf): filter);
(RowFilterInterface)WritableUtils.clone(filter, conf): filter);
}
} catch(IOException e) {
for (int i = 0; i < this.scanners.length; i++) {
@ -1758,14 +1759,14 @@ public class HRegion implements HConstants {
}
} finally {
synchronized (activeScannerCount) {
int scanners = activeScannerCount.decrementAndGet();
if (scanners < 0) {
LOG.error("active scanner count less than zero: " + scanners +
int count = activeScannerCount.decrementAndGet();
if (count < 0) {
LOG.error("active scanner count less than zero: " + count +
" resetting to zero");
activeScannerCount.set(0);
scanners = 0;
count = 0;
}
if (scanners == 0) {
if (count == 0) {
activeScannerCount.notifyAll();
}
}

View File

@ -20,17 +20,15 @@
package org.apache.hadoop.hbase;
import java.io.IOException;
import java.util.List;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.atomic.AtomicReferenceArray;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -38,9 +36,9 @@ import org.apache.hadoop.hbase.filter.RowFilterInterface;
import org.apache.hadoop.hbase.filter.StopRowFilter;
import org.apache.hadoop.hbase.filter.WhileMatchRowFilter;
import org.apache.hadoop.hbase.io.BatchUpdate;
import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.ipc.RemoteException;
@ -105,13 +103,16 @@ public class HTable implements HConstants {
}
/**
* Find region location hosting passed row using cached info
* Find region location hosting passed row
* @param row Row to find.
* @param reload If true do not use cache, otherwise bypass.
* @return Location of row.
*/
HRegionLocation getRegionLocation(Text row, boolean reload) throws IOException {
checkClosed();
return this.connection.relocateRegion(this.tableName, row);
return reload?
this.connection.relocateRegion(this.tableName, row):
this.connection.locateRegion(tableName, row);
}
@ -454,7 +455,7 @@ public class HTable implements HConstants {
if (LOG.isDebugEnabled()) {
LOG.debug("reloading table servers because: " + e.getMessage());
}
r = getRegionLocation(row, true);
r = getRegionLocation(row, true);
}
try {
Thread.sleep(this.pause);
@ -832,7 +833,6 @@ public class HTable implements HConstants {
if (LOG.isDebugEnabled()) {
LOG.debug("reloading table servers because: " + e.getMessage());
}
/* tableServers = connection.reloadTableServers(tableName);*/
r = getRegionLocation(row, true);
}
try {

View File

@ -57,7 +57,7 @@ public final class master_jsp extends org.apache.jasper.runtime.HttpJspBase
HMaster master = (HMaster)getServletContext().getAttribute(HMaster.MASTER);
HBaseConfiguration conf = new HBaseConfiguration();
HBaseConfiguration conf = master.getConfiguration();
TableFormatter formatter = new HtmlTableFormatter(out);
ShowCommand show = new ShowCommand(out, formatter, "tables");
HServerAddress rootLocation = master.getRootRegionLocation();

View File

@ -5,6 +5,7 @@ import javax.servlet.http.*;
import javax.servlet.jsp.*;
import java.util.*;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.hbase.HRegionServer;
import org.apache.hadoop.hbase.HRegion;
import org.apache.hadoop.hbase.HConstants;
@ -55,7 +56,18 @@ public final class regionserver_jsp extends org.apache.jasper.runtime.HttpJspBas
out.print( serverInfo.getServerAddress().toString() );
out.write("</title>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/static/hbase.css\" />\n</head>\n\n<body>\n<a id=\"logo\" href=\"http://wiki.apache.org/lucene-hadoop/Hbase\"><img src=\"/static/hbase_logo_med.gif\" alt=\"Hbase Logo\" title=\"Hbase Logo\" /></a>\n<h1 id=\"page_title\">Region Server: ");
out.print( serverInfo.getServerAddress().toString() );
out.write("</h1>\n<p id=\"links_menu\"><a href=\"/logs/\">Local logs</a>, <a href=\"/stacks\">Thread Dump</a>, <a href=\"/logLevel\">Log Level</a></p>\n<hr id=\"head_rule\" />\n\n<h2>Region Server Attributes</h2>\n<table>\n<tr><th>Attribute Name</th><th>Value</th><th>Description</th></tr>\n<tr><td>Load</td><td>");
out.write("</h1>\n<p id=\"links_menu\"><a href=\"/logs/\">Local logs</a>, <a href=\"/stacks\">Thread Dump</a>, <a href=\"/logLevel\">Log Level</a></p>\n<hr id=\"head_rule\" />\n\n<h2>Region Server Attributes</h2>\n<table>\n<tr><th>Attribute Name</th><th>Value</th><th>Description</th></tr>\n<tr><td>Version</td><td>");
out.print( VersionInfo.getVersion() );
out.write(',');
out.write(' ');
out.write('r');
out.print( VersionInfo.getRevision() );
out.write("</td><td>Hbase version and svn revision</td></tr>\n<tr><td>Compiled</td><td>");
out.print( VersionInfo.getDate() );
out.write(',');
out.write(' ');
out.print( VersionInfo.getUser() );
out.write("</td><td>When this version was compiled and by whom</td></tr>\n<tr><td>Load</td><td>");
out.print( serverInfo.getLoad().toString() );
out.write("</td><td>Requests/<em>hbase.regionserver.msginterval</em> + count of loaded regions</td></tr>\n</table>\n\n<h2>Online Regions</h2>\n");
if (onlineRegions != null && onlineRegions.size() > 0) {

View File

@ -24,15 +24,11 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
/**
* Utility class with methods for manipulating Writable objects
@ -94,24 +90,6 @@ public class Writables {
}
}
/**
* Make a copy of a writable object using serialization to a buffer.
* Copied from WritableUtils only <code>conf</code> type is Configurable
* rather than JobConf (Doesn't need to be JobConf -- HADOOP-2469).
* @param orig The object to copy
* @return The copied object
*/
public static Writable clone(Writable orig, Configuration conf) {
try {
Writable newInst =
(Writable)ReflectionUtils.newInstance(orig.getClass(), conf);
WritableUtils.cloneInto(newInst, orig);
return newInst;
} catch (IOException e) {
throw new RuntimeException("Error writing/reading clone buffer", e);
}
}
/**
* @param bytes
* @return A HRegionInfo instance built out of passed <code>bytes</code>.

View File

@ -3,7 +3,7 @@
import="org.apache.hadoop.hbase.HBaseConfiguration"
import="org.apache.hadoop.hbase.hql.TableFormatter"
import="org.apache.hadoop.hbase.hql.ReturnMsg"
import="org.apache.hadoop.hbase.hql.generated.Parser"
import="org.apache.hadoop.hbase.hql.generated.HQLParser"
import="org.apache.hadoop.hbase.hql.Command"
import="org.apache.hadoop.hbase.hql.formatter.HtmlTableFormatter"
%><?xml version="1.0" encoding="UTF-8" ?>

View File

@ -16,7 +16,7 @@
import="org.apache.hadoop.hbase.hql.formatter.HtmlTableFormatter"
import="org.apache.hadoop.hbase.HTableDescriptor" %><%
HMaster master = (HMaster)getServletContext().getAttribute(HMaster.MASTER);
HBaseConfiguration conf = new HBaseConfiguration();
HBaseConfiguration conf = master.getConfiguration();
TableFormatter formatter = new HtmlTableFormatter(out);
ShowCommand show = new ShowCommand(out, formatter, "tables");
HServerAddress rootLocation = master.getRootRegionLocation();

View File

@ -1,6 +1,7 @@
<%@ page contentType="text/html;charset=UTF-8"
import="java.util.*"
import="org.apache.hadoop.io.Text"
import="org.apache.hadoop.util.VersionInfo"
import="org.apache.hadoop.hbase.HRegionServer"
import="org.apache.hadoop.hbase.HRegion"
import="org.apache.hadoop.hbase.HConstants"
@ -27,6 +28,8 @@
<h2>Region Server Attributes</h2>
<table>
<tr><th>Attribute Name</th><th>Value</th><th>Description</th></tr>
<tr><td>Version</td><td><%= VersionInfo.getVersion() %>, r<%= VersionInfo.getRevision() %></td><td>Hbase version and svn revision</td></tr>
<tr><td>Compiled</td><td><%= VersionInfo.getDate() %>, <%= VersionInfo.getUser() %></td><td>When this version was compiled and by whom</td></tr>
<tr><td>Load</td><td><%= serverInfo.getLoad().toString() %></td><td>Requests/<em>hbase.regionserver.msginterval</em> + count of loaded regions</td></tr>
</table>