HBASE-2 hlog numbers should wrap around when they reach 999

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@619657 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2008-02-07 21:48:58 +00:00
parent 1d831be81a
commit 93a31f6a16
4 changed files with 11 additions and 6 deletions

View File

@ -13,6 +13,8 @@ HBase Change Log
(Stu Hood and Bryan Duxbury via Stack) (Stu Hood and Bryan Duxbury via Stack)
HBASE-28 thrift put/mutateRow methods need to throw IllegalArgument exceptions HBASE-28 thrift put/mutateRow methods need to throw IllegalArgument exceptions
(Dave Simpson via Bryan Duxbury via Stack) (Dave Simpson via Bryan Duxbury via Stack)
HBASE-2 hlog numbers should wrap around when they reach 999
(Bryan Duxbury via Stack)
IMPROVEMENTS IMPROVEMENTS
HBASE-415 Rewrite leases to use DelayedBlockingQueue instead of polling HBASE-415 Rewrite leases to use DelayedBlockingQueue instead of polling

View File

@ -23,6 +23,7 @@
# The java implementation to use. Required. # The java implementation to use. Required.
# export JAVA_HOME=/usr/lib/j2sdk1.5-sun # export JAVA_HOME=/usr/lib/j2sdk1.5-sun
export JAVA_HOME=/usr
# Extra Java CLASSPATH elements. Optional. # Extra Java CLASSPATH elements. Optional.
# export HBASE_CLASSPATH= # export HBASE_CLASSPATH=

View File

@ -119,6 +119,7 @@ public class HLog implements HConstants {
volatile long logSeqNum = 0; volatile long logSeqNum = 0;
volatile long filenum = 0; volatile long filenum = 0;
volatile long old_filenum = -1;
volatile int numEntries = 0; volatile int numEntries = 0;
@ -215,7 +216,7 @@ public class HLog implements HConstants {
if (this.writer != null) { if (this.writer != null) {
// Close the current writer, get a new one. // Close the current writer, get a new one.
this.writer.close(); this.writer.close();
Path p = computeFilename(filenum - 1); Path p = computeFilename(old_filenum);
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("Closing current log writer " + p.toString() + LOG.debug("Closing current log writer " + p.toString() +
" to get a new one"); " to get a new one");
@ -226,7 +227,9 @@ public class HLog implements HConstants {
} }
} }
} }
Path newPath = computeFilename(filenum++); old_filenum = filenum;
filenum = System.currentTimeMillis();
Path newPath = computeFilename(filenum);
this.writer = SequenceFile.createWriter(this.fs, this.conf, newPath, this.writer = SequenceFile.createWriter(this.fs, this.conf, newPath,
HLogKey.class, HLogEdit.class, getCompressionType(this.conf)); HLogKey.class, HLogEdit.class, getCompressionType(this.conf));
LOG.info("new log writer created at " + newPath); LOG.info("new log writer created at " + newPath);
@ -294,8 +297,7 @@ public class HLog implements HConstants {
* file-number. * file-number.
*/ */
Path computeFilename(final long fn) { Path computeFilename(final long fn) {
return new Path(dir, return new Path(dir, HLOG_DATFILE + new Long(fn).toString());
HLOG_DATFILE + String.format("%1$03d", Long.valueOf(fn)));
} }
/** /**

View File

@ -110,7 +110,7 @@ public class TestHLog extends HBaseTestCase implements HConstants {
long logSeqId = log.startCacheFlush(); long logSeqId = log.startCacheFlush();
log.completeCacheFlush(regionName, tableName, logSeqId); log.completeCacheFlush(regionName, tableName, logSeqId);
log.close(); log.close();
Path filename = log.computeFilename(log.filenum - 1); Path filename = log.computeFilename(log.filenum);
log = null; log = null;
// Now open a reader on the log and assert append worked. // Now open a reader on the log and assert append worked.
reader = new SequenceFile.Reader(fs, filename, conf); reader = new SequenceFile.Reader(fs, filename, conf);