HBASE-2667 TestHLog.testSplit failing in trunk

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@951471 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2010-06-04 16:08:41 +00:00
parent e3c6b554ab
commit 0abcfa0ed7
4 changed files with 24 additions and 17 deletions

View File

@ -372,6 +372,7 @@ Release 0.21.0 - Unreleased
from web UI from web UI
HBASE-2657 TestTableResource is broken in trunk HBASE-2657 TestTableResource is broken in trunk
HBASE-2662 TestScannerResource.testScannerResource broke in trunk HBASE-2662 TestScannerResource.testScannerResource broke in trunk
HBASE-2667 TestHLog.testSplit failing in trunk
IMPROVEMENTS IMPROVEMENTS
HBASE-1760 Cleanup TODOs in HTable HBASE-1760 Cleanup TODOs in HTable

View File

@ -1552,10 +1552,11 @@ public class HLog implements HConstants, Syncable {
try { try {
int editsCount = 0; int editsCount = 0;
WriterAndPath wap = logWriters.get(region); WriterAndPath wap = logWriters.get(region);
for (ListIterator<Entry> iterator = entries.listIterator(); // We put edits onto the Stack ordered oldest sequence id to newest.
iterator.hasNext();) { // Pop them off starting with the oldest.
Entry logEntry = iterator.next(); for (ListIterator<Entry> iterator = entries.listIterator(entries.size());
iterator.hasPrevious();) {
Entry logEntry = iterator.previous();
if (wap == null) { if (wap == null) {
Path logFile = getRegionLogPath(logEntry, rootDir); Path logFile = getRegionLogPath(logEntry, rootDir);
if (fs.exists(logFile)) { if (fs.exists(logFile)) {
@ -1570,7 +1571,6 @@ public class HLog implements HConstants, Syncable {
LOG.debug("Creating writer path=" + logFile + LOG.debug("Creating writer path=" + logFile +
" region=" + Bytes.toStringBinary(region)); " region=" + Bytes.toStringBinary(region));
} }
wap.w.append(logEntry); wap.w.append(logEntry);
editsCount++; editsCount++;
} }

View File

@ -19,13 +19,14 @@
*/ */
package org.apache.hadoop.hbase.regionserver.wal; package org.apache.hadoop.hbase.regionserver.wal;
import org.apache.hadoop.hbase.HConstants; import java.io.DataInput;
import org.apache.hadoop.hbase.io.HeapSize; import java.io.DataOutput;
import org.apache.hadoop.hbase.util.Bytes; import java.io.EOFException;
import org.apache.hadoop.hbase.util.ClassSize; import java.io.IOException;
import org.apache.hadoop.io.*;
import java.io.*; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.WritableComparable;
/** /**
* A Key for an entry in the change log. * A Key for an entry in the change log.
@ -46,8 +47,6 @@ public class HLogKey implements WritableComparable<HLogKey> {
private byte clusterId; private byte clusterId;
private int scope; private int scope;
private int HEAP_TAX = ClassSize.OBJECT + (2 * ClassSize.ARRAY) +
(2 * Bytes.SIZEOF_LONG) + Bytes.SIZEOF_BYTE + Bytes.SIZEOF_INT;
/** Writable Consructor -- Do not use. */ /** Writable Consructor -- Do not use. */
public HLogKey() { public HLogKey() {

View File

@ -24,6 +24,8 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -38,6 +40,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
/** JUnit test case for HLog */ /** JUnit test case for HLog */
public class TestHLog extends HBaseTestCase implements HConstants { public class TestHLog extends HBaseTestCase implements HConstants {
static final Log LOG = LogFactory.getLog(TestHLog.class);
private Path dir; private Path dir;
private Path oldLogDir; private Path oldLogDir;
private MiniDFSCluster cluster; private MiniDFSCluster cluster;
@ -56,7 +59,7 @@ public class TestHLog extends HBaseTestCase implements HConstants {
if (fs.exists(dir)) { if (fs.exists(dir)) {
fs.delete(dir, true); fs.delete(dir, true);
} }
this.oldLogDir = new Path("/hbase", HConstants.HREGION_OLDLOGDIR_NAME); this.oldLogDir = new Path(this.dir, HConstants.HREGION_OLDLOGDIR_NAME);
} }
@ -78,7 +81,8 @@ public class TestHLog extends HBaseTestCase implements HConstants {
final byte [] tableName = Bytes.toBytes(getName()); final byte [] tableName = Bytes.toBytes(getName());
final byte [] rowName = tableName; final byte [] rowName = tableName;
HLog log = new HLog(this.fs, this.dir, this.oldLogDir, this.conf, null); Path logdir = new Path(this.dir, HConstants.HREGION_LOGDIR_NAME);
HLog log = new HLog(this.fs, logdir, this.oldLogDir, this.conf, null);
final int howmany = 3; final int howmany = 3;
HRegionInfo[] infos = new HRegionInfo[3]; HRegionInfo[] infos = new HRegionInfo[3];
for(int i = 0; i < howmany; i++) { for(int i = 0; i < howmany; i++) {
@ -97,7 +101,7 @@ public class TestHLog extends HBaseTestCase implements HConstants {
byte [] column = Bytes.toBytes("column:" + Integer.toString(j)); byte [] column = Bytes.toBytes("column:" + Integer.toString(j));
edit.add(new KeyValue(rowName, family, qualifier, edit.add(new KeyValue(rowName, family, qualifier,
System.currentTimeMillis(), column)); System.currentTimeMillis(), column));
System.out.println("Region " + i + ": " + edit); LOG.info("Region " + i + ": " + edit);
log.append(infos[i], tableName, edit, log.append(infos[i], tableName, edit,
System.currentTimeMillis()); System.currentTimeMillis());
} }
@ -105,8 +109,9 @@ public class TestHLog extends HBaseTestCase implements HConstants {
log.hflush(); log.hflush();
log.rollWriter(); log.rollWriter();
} }
Path splitsdir = new Path(this.dir, "splits");
List<Path> splits = List<Path> splits =
HLog.splitLog(this.testDir, this.dir, this.oldLogDir, this.fs, this.conf); HLog.splitLog(splitsdir, logdir, this.oldLogDir, this.fs, this.conf);
verifySplits(splits, howmany); verifySplits(splits, howmany);
log = null; log = null;
} finally { } finally {
@ -228,6 +233,7 @@ public class TestHLog extends HBaseTestCase implements HConstants {
throws IOException { throws IOException {
assertEquals(howmany, splits.size()); assertEquals(howmany, splits.size());
for (int i = 0; i < splits.size(); i++) { for (int i = 0; i < splits.size(); i++) {
LOG.info("Verifying=" + splits.get(i));
HLog.Reader reader = HLog.getReader(this.fs, splits.get(i), conf); HLog.Reader reader = HLog.getReader(this.fs, splits.get(i), conf);
try { try {
int count = 0; int count = 0;
@ -242,6 +248,7 @@ public class TestHLog extends HBaseTestCase implements HConstants {
if (previousRegion != null) { if (previousRegion != null) {
assertEquals(previousRegion, region); assertEquals(previousRegion, region);
} }
LOG.info("oldseqno=" + seqno + ", newseqno=" + key.getLogSeqNum());
assertTrue(seqno < key.getLogSeqNum()); assertTrue(seqno < key.getLogSeqNum());
seqno = key.getLogSeqNum(); seqno = key.getLogSeqNum();
previousRegion = region; previousRegion = region;