Update hbase trunk to latests on hadoop 0.21 branch so we can all test sync/append; add call out to sync

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@823404 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2009-10-09 04:36:30 +00:00
parent a36d212a9a
commit 3234662c00
2 changed files with 25 additions and 2 deletions

View File

@ -23,6 +23,7 @@ import java.io.EOFException;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collections;
@ -123,6 +124,9 @@ public class HLog implements HConstants, Syncable {
* Current log file.
*/
SequenceFile.Writer writer;
// This is the above writer's output stream. Its private but we use reflection
// to expose it so we can call sync on it.
FSDataOutputStream writer_out;
/*
* Map of all log files but the current one.
@ -351,11 +355,29 @@ public class HLog implements HConstants, Syncable {
protected SequenceFile.Writer createWriter(Path path,
Class<? extends HLogKey> keyClass, Class<? extends KeyValue> valueClass)
throws IOException {
return SequenceFile.createWriter(this.fs, this.conf, path, keyClass,
SequenceFile.Writer writer =
SequenceFile.createWriter(this.fs, this.conf, path, keyClass,
valueClass, fs.getConf().getInt("io.file.buffer.size", 4096),
fs.getDefaultReplication(), this.blocksize,
SequenceFile.CompressionType.NONE, new DefaultCodec(), null,
new Metadata());
// Get at the private FSDataOutputStream inside in SequenceFile so we can
// call sync on it. Make it accessible. Stash it aside for call up in
// the sync method above.
final Field fields[] = writer.getClass().getDeclaredFields();
final String fieldName = "out";
for (int i = 0; i < fields.length; ++i) {
if (fieldName.equals(fields[i].getName())) {
try {
fields[i].setAccessible(true);
this.writer_out = (FSDataOutputStream)fields[i].get(writer);
break;
} catch (IllegalAccessException ex) {
throw new IOException("Accessing " + fieldName, ex);
}
}
}
return writer;
}
/*
@ -614,6 +636,7 @@ public class HLog implements HConstants, Syncable {
}
} else {
this.writer.sync();
if (this.writer_out != null) this.writer_out.sync();
}
this.unflushedEntries.set(0);
}

View File

@ -151,6 +151,7 @@ public class TestHLog extends HBaseTestCase implements HConstants {
timestamp, new byte[] { (byte)(i + '0') }));
}
log.append(regionName, tableName, cols, false, System.currentTimeMillis());
log.sync();
long logSeqId = log.startCacheFlush();
log.completeCacheFlush(regionName, tableName, logSeqId);
log.close();
@ -187,5 +188,4 @@ public class TestHLog extends HBaseTestCase implements HConstants {
}
}
}
}