From f9fc02ba8b484c904a2171c19595f91dd21d9219 Mon Sep 17 00:00:00 2001 From: Michael Stack Date: Mon, 10 Dec 2007 22:36:03 +0000 Subject: [PATCH] HADOOP-2283 AlreadyBeingCreatedException (Was: Stuck replay of failed regionserver edits) git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@603077 13f79535-47bb-0310-9956-ffa450edef68 --- CHANGES.txt | 2 + src/java/org/apache/hadoop/hbase/HLog.java | 16 ++--- src/java/org/apache/hadoop/hbase/HStore.java | 8 ++- .../org/apache/hadoop/hbase/HStoreFile.java | 15 ++--- .../org/apache/hadoop/hbase/TestHLog.java | 60 ++++++++++++++++--- 5 files changed, 76 insertions(+), 25 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 6d23ad2584b..059a3711863 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -68,6 +68,8 @@ Trunk (unreleased changes) (Bryan Duxbury via Stack) HADOOP-2350 Scanner api returns null row names, or skips row names if different column families do not have entries for some rows + HADOOP-2283 AlreadyBeingCreatedException (Was: Stuck replay of failed + regionserver edits) IMPROVEMENTS HADOOP-2401 Add convenience put method that takes writable diff --git a/src/java/org/apache/hadoop/hbase/HLog.java b/src/java/org/apache/hadoop/hbase/HLog.java index ec072a8b2db..327bafe818b 100644 --- a/src/java/org/apache/hadoop/hbase/HLog.java +++ b/src/java/org/apache/hadoop/hbase/HLog.java @@ -160,11 +160,10 @@ public class HLog implements HConstants { " because zero length"); continue; } - SequenceFile.Reader in = - new SequenceFile.Reader(fs, logfiles[i], conf); + HLogKey key = new HLogKey(); + HLogEdit val = new HLogEdit(); + SequenceFile.Reader in = new SequenceFile.Reader(fs, logfiles[i], conf); try { - HLogKey key = new HLogKey(); - HLogEdit val = new HLogEdit(); int count = 0; for (; in.next(key, val); count++) { Text regionName = key.getRegionName(); @@ -174,13 +173,16 @@ public class HLog implements HConstants { HRegionInfo.encodeRegionName(regionName)), HREGION_OLDLOGFILE_NAME); if (LOG.isDebugEnabled()) { - LOG.debug("Creating new log file writer for path " + logfile); + LOG.debug("Creating new log file writer for path " + logfile + + "; map content " + logWriters.toString()); } w = SequenceFile.createWriter(fs, conf, logfile, HLogKey.class, HLogEdit.class); - logWriters.put(regionName, w); + // Use copy of regionName; regionName object is reused inside in + // HStoreKey.getRegionName so its content changes as we iterate. + logWriters.put(new Text(regionName), w); } - if (count % 100 == 0 && count > 0 && LOG.isDebugEnabled()) { + if (count % 10000 == 0 && count > 0 && LOG.isDebugEnabled()) { LOG.debug("Applied " + count + " edits"); } w.append(key, val); diff --git a/src/java/org/apache/hadoop/hbase/HStore.java b/src/java/org/apache/hadoop/hbase/HStore.java index 3a498604e9f..7ee93598b0b 100644 --- a/src/java/org/apache/hadoop/hbase/HStore.java +++ b/src/java/org/apache/hadoop/hbase/HStore.java @@ -823,9 +823,11 @@ class HStore implements HConstants { } FSDataOutputStream out = fs.create(new Path(filterDir, BLOOMFILTER_FILE_NAME)); - - bloomFilter.write(out); - out.close(); + try { + bloomFilter.write(out); + } finally { + out.close(); + } if (LOG.isDebugEnabled()) { LOG.debug("flushed bloom filter for " + this.storeName); } diff --git a/src/java/org/apache/hadoop/hbase/HStoreFile.java b/src/java/org/apache/hadoop/hbase/HStoreFile.java index 137a611ce1b..5b465572970 100644 --- a/src/java/org/apache/hadoop/hbase/HStoreFile.java +++ b/src/java/org/apache/hadoop/hbase/HStoreFile.java @@ -492,11 +492,14 @@ public class HStoreFile implements HConstants, WritableComparable { throw new IOException("File already exists " + p.toString()); } FSDataOutputStream out = fs.create(p); - out.writeUTF(getReference().getEncodedRegionName()); - getReference().getMidkey().write(out); - out.writeLong(getReference().getFileId()); - out.writeBoolean(isTopFileRegion(getReference().getFileRegion())); - out.close(); + try { + out.writeUTF(getReference().getEncodedRegionName()); + getReference().getMidkey().write(out); + out.writeLong(getReference().getFileId()); + out.writeBoolean(isTopFileRegion(getReference().getFileRegion())); + } finally { + out.close(); + } } /* @@ -559,7 +562,6 @@ public class HStoreFile implements HConstants, WritableComparable { in.close(); } } - } finally { out.close(); } @@ -867,7 +869,6 @@ public class HStoreFile implements HConstants, WritableComparable { static class Writer extends MapFile.Writer { private final Filter bloomFilter; - /** * Constructor * diff --git a/src/test/org/apache/hadoop/hbase/TestHLog.java b/src/test/org/apache/hadoop/hbase/TestHLog.java index ac200287ca0..549df3ba3a7 100644 --- a/src/test/org/apache/hadoop/hbase/TestHLog.java +++ b/src/test/org/apache/hadoop/hbase/TestHLog.java @@ -30,16 +30,63 @@ import org.apache.hadoop.io.SequenceFile.Reader; /** JUnit test case for HLog */ public class TestHLog extends HBaseTestCase implements HConstants { + private Path dir; + private FileSystem fs; + + @Override + protected void setUp() throws Exception { + super.setUp(); + this.dir = getUnitTestdir(getName()); + this.fs = FileSystem.get(this.conf); + if (fs.exists(dir)) { + fs.delete(dir); + } + } + + @Override + protected void tearDown() throws Exception { + if (this.fs.exists(this.dir)) { + this.fs.delete(this.dir); + } + super.tearDown(); + } + + /** + * Just write multiple logs then split. Before fix for HADOOP-2283, this + * would fail. + * @throws IOException + */ + public void testSplit() throws IOException { + final Text tableName = new Text(getName()); + final Text rowName = tableName; + HLog log = new HLog(this.fs, this.dir, this.conf, null); + // Add edits for three regions. + try { + for (int ii = 0; ii < 3; ii++) { + for (int i = 0; i < 3; i++) { + for (int j = 0; j < 3; j++) { + TreeMap edit = new TreeMap(); + Text column = new Text(Integer.toString(j)); + edit.put( + new HStoreKey(rowName, column, System.currentTimeMillis()), + column.getBytes()); + log.append(new Text(Integer.toString(i)), tableName, edit); + } + } + log.rollWriter(); + } + HLog.splitLog(this.testDir, this.dir, this.fs, this.conf); + } finally { + if (log != null) { + log.closeAndDelete(); + } + } + } /** * @throws IOException */ public void testAppend() throws IOException { - Path dir = getUnitTestdir(getName()); - FileSystem fs = FileSystem.get(this.conf); - if (fs.exists(dir)) { - fs.delete(dir); - } final int COL_COUNT = 10; final Text regionName = new Text("regionname"); final Text tableName = new Text("tablename"); @@ -89,9 +136,6 @@ public class TestHLog extends HBaseTestCase implements HConstants { if (reader != null) { reader.close(); } - if (fs.exists(dir)) { - fs.delete(dir); - } } }