HBASE-1974 Update to latest on hadoop 0.21 branch (November11th, 2009)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@835241 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2009-11-12 05:48:56 +00:00
parent 6babfad71f
commit d89e5f13b0
11 changed files with 24 additions and 31 deletions

View File

@ -1177,8 +1177,7 @@ public class HRegion implements HConstants, HeapSize { // , Writable{
if (writeToWAL) {
this.log.append(regionInfo.getRegionName(),
regionInfo.getTableDesc().getName(), kvs,
(regionInfo.isMetaRegion() || regionInfo.isRootRegion()), now);
regionInfo.getTableDesc().getName(), kvs, now);
}
flush = isFlushSize(size);
} finally {
@ -1451,8 +1450,7 @@ public class HRegion implements HConstants, HeapSize { // , Writable{
if (writeToWAL) {
long now = System.currentTimeMillis();
this.log.append(regionInfo.getRegionName(),
regionInfo.getTableDesc().getName(), edits,
(regionInfo.isMetaRegion() || regionInfo.isRootRegion()), now);
regionInfo.getTableDesc().getName(), edits, now);
}
long size = 0;
Store store = getStore(family);
@ -2363,8 +2361,7 @@ public class HRegion implements HConstants, HeapSize { // , Writable{
List<KeyValue> edits = new ArrayList<KeyValue>(1);
edits.add(newKv);
this.log.append(regionInfo.getRegionName(),
regionInfo.getTableDesc().getName(), edits,
(regionInfo.isMetaRegion() || regionInfo.isRootRegion()), now);
regionInfo.getTableDesc().getName(), edits, now);
}
// Now request the ICV to the store, this will set the timestamp
@ -2550,4 +2547,4 @@ public class HRegion implements HConstants, HeapSize { // , Writable{
if (bc != null) bc.shutdown();
}
}
}
}

View File

@ -649,9 +649,7 @@ public class HLog implements HConstants, Syncable {
// region being flushed is removed if the sequence number of the flush
// is greater than or equal to the value in lastSeqWritten.
this.lastSeqWritten.putIfAbsent(regionName, Long.valueOf(seqNum));
boolean sync = regionInfo.isMetaRegion() || regionInfo.isRootRegion();
doWrite(logKey, logEdit, sync, logKey.getWriteTime());
doWrite(logKey, logEdit, logKey.getWriteTime());
this.unflushedEntries.incrementAndGet();
this.numEntries.incrementAndGet();
}
@ -682,12 +680,11 @@ public class HLog implements HConstants, Syncable {
* @param regionName
* @param tableName
* @param edits
* @param sync
* @param now
* @throws IOException
*/
public void append(byte [] regionName, byte [] tableName, List<KeyValue> edits,
boolean sync, final long now)
final long now)
throws IOException {
if (this.closed) {
throw new IOException("Cannot append; log is closed");
@ -702,7 +699,7 @@ public class HLog implements HConstants, Syncable {
int counter = 0;
for (KeyValue kv: edits) {
HLogKey logKey = makeKey(regionName, tableName, seqNum[counter++], now);
doWrite(logKey, kv, sync, now);
doWrite(logKey, kv, now);
this.numEntries.incrementAndGet();
}
@ -808,13 +805,7 @@ public class HLog implements HConstants, Syncable {
logSyncerThread.addToSyncQueue(force);
}
/**
* Multiple threads will call sync() at the same time, only the winner
* will actually flush if there is any race or build up.
*
* @throws IOException
*/
protected void hflush() throws IOException {
public void hflush() throws IOException {
synchronized (this.updateLock) {
if (this.closed) {
return;
@ -822,6 +813,7 @@ public class HLog implements HConstants, Syncable {
if (this.forceSync ||
this.unflushedEntries.get() >= this.flushlogentries) {
try {
LOG.info("hflush remove");
this.writer.sync();
if (this.writer_out != null) {
this.writer_out.sync();
@ -837,20 +829,25 @@ public class HLog implements HConstants, Syncable {
}
}
public void hsync() throws IOException {
// Not yet implemented up in hdfs so just call hflush.
hflush();
}
private void requestLogRoll() {
if (this.listener != null) {
this.listener.logRollRequested();
}
}
private void doWrite(HLogKey logKey, KeyValue logEdit, boolean sync,
final long now)
private void doWrite(HLogKey logKey, KeyValue logEdit, final long now)
throws IOException {
if (!this.enabled) {
return;
}
try {
this.editsSize.addAndGet(logKey.heapSize() + logEdit.heapSize());
if (this.numEntries.get() % this.flushlogentries == 0) LOG.info("edit=" + this.numEntries.get() + ", write=" + logKey.toString());
this.writer.append(logKey, logEdit);
long took = System.currentTimeMillis() - now;
if (took > 1000) {

View File

@ -333,7 +333,7 @@ public class Merge extends Configured implements Tool {
*
* @throws IOException
*/
private int parseArgs(String[] args) {
private int parseArgs(String[] args) throws IOException {
GenericOptionsParser parser =
new GenericOptionsParser(this.getConf(), args);

View File

@ -46,7 +46,6 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.mapred.MiniMRCluster;
import com.sun.corba.se.pept.transport.Connection;
/**
* Facility for testing HBase. Added as tool to abet junit4 testing. Replaces
@ -471,4 +470,4 @@ public class HBaseTestingUtility {
((Jdk14Logger) l).getLogger().setLevel(java.util.logging.Level.ALL);
}
}
}
}

View File

@ -92,7 +92,7 @@ public class TestHLog extends HBaseTestCase implements HConstants {
System.currentTimeMillis(), column));
System.out.println("Region " + i + ": " + edit);
log.append(Bytes.toBytes("" + i), tableName, edit,
false, System.currentTimeMillis());
System.currentTimeMillis());
}
}
log.rollWriter();
@ -132,7 +132,7 @@ public class TestHLog extends HBaseTestCase implements HConstants {
for (int i = 0; i < total; i++) {
List<KeyValue> kvs = new ArrayList<KeyValue>();
kvs.add(new KeyValue(Bytes.toBytes(i), bytes, bytes));
wal.append(bytes, bytes, kvs, false, System.currentTimeMillis());
wal.append(bytes, bytes, kvs, System.currentTimeMillis());
}
// Now call sync and try reading. Opening a Reader before you sync just
// gives you EOFE.
@ -150,7 +150,7 @@ public class TestHLog extends HBaseTestCase implements HConstants {
for (int i = 0; i < total; i++) {
List<KeyValue> kvs = new ArrayList<KeyValue>();
kvs.add(new KeyValue(Bytes.toBytes(i), bytes, bytes));
wal.append(bytes, bytes, kvs, false, System.currentTimeMillis());
wal.append(bytes, bytes, kvs, System.currentTimeMillis());
}
reader = HLog.getReader(this.fs, walPath, this.conf);
count = 0;
@ -169,7 +169,7 @@ public class TestHLog extends HBaseTestCase implements HConstants {
for (int i = 0; i < total; i++) {
List<KeyValue> kvs = new ArrayList<KeyValue>();
kvs.add(new KeyValue(Bytes.toBytes(i), bytes, value));
wal.append(bytes, bytes, kvs, false, System.currentTimeMillis());
wal.append(bytes, bytes, kvs, System.currentTimeMillis());
}
// Now I should have written out lots of blocks. Sync then read.
wal.sync();
@ -238,7 +238,7 @@ public class TestHLog extends HBaseTestCase implements HConstants {
Bytes.toBytes(Integer.toString(i)),
timestamp, new byte[] { (byte)(i + '0') }));
}
log.append(regionName, tableName, cols, false, System.currentTimeMillis());
log.append(regionName, tableName, cols, System.currentTimeMillis());
long logSeqId = log.startCacheFlush();
log.completeCacheFlush(regionName, tableName, logSeqId);
log.close();
@ -275,4 +275,4 @@ public class TestHLog extends HBaseTestCase implements HConstants {
}
}
}
}
}