From 9b93724dd8a9acade0ffef90bc60fbb35f15cc08 Mon Sep 17 00:00:00 2001 From: jxiang Date: Tue, 22 Oct 2013 20:57:33 +0000 Subject: [PATCH] HBASE-9813 Log splitting doesn't prevent RS creating new hlog file git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1534785 13f79535-47bb-0310-9956-ffa450edef68 --- .../hbase/regionserver/wal/Compressor.java | 19 ++++++++----------- .../hadoop/hbase/regionserver/wal/FSHLog.java | 2 +- .../hadoop/hbase/regionserver/wal/HLog.java | 6 +++--- .../hbase/regionserver/wal/HLogFactory.java | 17 +++++++++++++---- .../hbase/regionserver/wal/HLogSplitter.java | 2 +- .../regionserver/wal/ProtobufLogWriter.java | 8 ++++---- .../hbase/snapshot/SnapshotLogSplitter.java | 2 +- .../hbase/regionserver/TestHRegion.java | 6 +++--- .../wal/SequenceFileLogWriter.java | 7 +++---- .../hbase/regionserver/wal/TestHLog.java | 6 ++++-- .../hbase/regionserver/wal/TestHLogSplit.java | 12 ++++++------ .../wal/TestReadOldRootAndMetaEdits.java | 2 +- .../replication/TestReplicationSource.java | 4 ++-- .../snapshot/TestSnapshotLogSplitter.java | 2 +- 14 files changed, 51 insertions(+), 44 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/Compressor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/Compressor.java index 04046f1a797..47b55deebbd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/Compressor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/Compressor.java @@ -17,24 +17,21 @@ */ package org.apache.hadoop.hbase.regionserver.wal; -import org.apache.hadoop.classification.InterfaceAudience; - import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.io.util.Dictionary; -import org.apache.hadoop.hbase.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.regionserver.wal.HLog; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.io.WritableUtils; +import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.io.util.Dictionary; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.io.WritableUtils; + import com.google.common.base.Preconditions; -import com.google.protobuf.ByteString; /** * A set of static functions for running our custom WAL compression/decompression. @@ -81,7 +78,7 @@ public class Compressor { } boolean compress = ((ReaderBase)in).hasCompression(); conf.setBoolean(HConstants.ENABLE_WAL_COMPRESSION, !compress); - out = HLogFactory.createWriter(outFS, output, conf); + out = HLogFactory.createWALWriter(outFS, output, conf); HLog.Entry e = null; while ((e = in.next()) != null) out.append(e); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java index 78d03adb120..d7839bdab96 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java @@ -563,7 +563,7 @@ class FSHLog implements HLog, Syncable { if (forMeta) { //TODO: set a higher replication for the hlog files (HBASE-6773) } - return HLogFactory.createWriter(fs, path, conf); + return HLogFactory.createWALWriter(fs, path, conf); } /* diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java index 262fb020f19..9fff26b4816 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java @@ -33,11 +33,10 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer; -import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost; import org.apache.hadoop.io.Writable; @@ -95,7 +94,7 @@ public interface HLog { } interface Writer { - void init(FileSystem fs, Path path, Configuration c) throws IOException; + void init(FileSystem fs, Path path, Configuration c, boolean overwritable) throws IOException; void close() throws IOException; @@ -173,6 +172,7 @@ public interface HLog { } @Override + @SuppressWarnings("deprecation") public void write(DataOutput dataOutput) throws IOException { this.key.write(dataOutput); this.edit.write(dataOutput); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogFactory.java index fb97f221dc5..2b9130cde0f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogFactory.java @@ -172,8 +172,18 @@ public class HLogFactory { * @return A WAL writer. Close when done with it. * @throws IOException */ - public static HLog.Writer createWriter(final FileSystem fs, - final Path path, Configuration conf) + public static HLog.Writer createWALWriter(final FileSystem fs, + final Path path, Configuration conf) throws IOException { + return createWriter(fs, path, conf, false); + } + + public static HLog.Writer createRecoveredEditsWriter(final FileSystem fs, + final Path path, Configuration conf) throws IOException { + return createWriter(fs, path, conf, true); + } + + private static HLog.Writer createWriter(final FileSystem fs, + final Path path, Configuration conf, boolean overwritable) throws IOException { try { if (logWriterClass == null) { @@ -181,11 +191,10 @@ public class HLogFactory { ProtobufLogWriter.class, Writer.class); } HLog.Writer writer = (HLog.Writer)logWriterClass.newInstance(); - writer.init(fs, path, conf); + writer.init(fs, path, conf, overwritable); return writer; } catch (Exception e) { throw new IOException("cannot get log writer", e); } } - } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java index 5ff65759a03..0009abdff86 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java @@ -634,7 +634,7 @@ public class HLogSplitter { */ protected Writer createWriter(FileSystem fs, Path logfile, Configuration conf) throws IOException { - return HLogFactory.createWriter(fs, logfile, conf); + return HLogFactory.createRecoveredEditsWriter(fs, logfile, conf); } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java index 1174847e9a0..64ea9ef01b2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java @@ -28,12 +28,11 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader; -import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer; +import org.apache.hadoop.hbase.util.FSUtils; /** * Writer for protobuf-based WAL. @@ -55,7 +54,8 @@ public class ProtobufLogWriter extends WriterBase { } @Override - public void init(FileSystem fs, Path path, Configuration conf) throws IOException { + @SuppressWarnings("deprecation") + public void init(FileSystem fs, Path path, Configuration conf, boolean overwritable) throws IOException { assert this.output == null; boolean doCompress = initializeCompressionContext(conf, path); this.trailerWarnSize = conf.getInt(HLog.WAL_TRAILER_WARN_SIZE, @@ -65,7 +65,7 @@ public class ProtobufLogWriter extends WriterBase { "hbase.regionserver.hlog.replication", FSUtils.getDefaultReplication(fs, path)); long blockSize = conf.getLong("hbase.regionserver.hlog.blocksize", FSUtils.getDefaultBlockSize(fs, path)); - output = fs.create(path, true, bufferSize, replication, blockSize); + output = fs.createNonRecursive(path, overwritable, bufferSize, replication, blockSize, null); output.write(ProtobufLogReader.PB_WAL_MAGIC); WALHeader.newBuilder().setHasCompression(doCompress).build().writeDelimitedTo(output); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotLogSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotLogSplitter.java index 8cac45cf0c1..155e3d00766 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotLogSplitter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotLogSplitter.java @@ -66,7 +66,7 @@ class SnapshotLogSplitter implements Closeable { public LogWriter(final Configuration conf, final FileSystem fs, final Path logDir, long seqId) throws IOException { logFile = new Path(logDir, logFileName(seqId, true)); - this.writer = HLogFactory.createWriter(fs, logFile, conf); + this.writer = HLogFactory.createRecoveredEditsWriter(fs, logFile, conf); this.seqId = seqId; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 9525d8bb38f..ebd5f15f838 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -293,7 +293,7 @@ public class TestHRegion { for (long i = minSeqId; i <= maxSeqId; i += 10) { Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", i)); fs.create(recoveredEdits); - HLog.Writer writer = HLogFactory.createWriter(fs, recoveredEdits, conf); + HLog.Writer writer = HLogFactory.createRecoveredEditsWriter(fs, recoveredEdits, conf); long time = System.nanoTime(); WALEdit edit = new WALEdit(); @@ -343,7 +343,7 @@ public class TestHRegion { for (long i = minSeqId; i <= maxSeqId; i += 10) { Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", i)); fs.create(recoveredEdits); - HLog.Writer writer = HLogFactory.createWriter(fs, recoveredEdits, conf); + HLog.Writer writer = HLogFactory.createRecoveredEditsWriter(fs, recoveredEdits, conf); long time = System.nanoTime(); WALEdit edit = new WALEdit(); @@ -468,7 +468,7 @@ public class TestHRegion { Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", 1000)); fs.create(recoveredEdits); - HLog.Writer writer = HLogFactory.createWriter(fs, recoveredEdits, conf); + HLog.Writer writer = HLogFactory.createRecoveredEditsWriter(fs, recoveredEdits, conf); long time = System.nanoTime(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java index d7701b77c5c..0c36795f258 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java @@ -31,13 +31,12 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer; +import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.io.SequenceFile; -import org.apache.hadoop.io.Text; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.SequenceFile.Metadata; +import org.apache.hadoop.io.Text; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.DefaultCodec; @@ -84,7 +83,7 @@ public class SequenceFileLogWriter extends WriterBase { } @Override - public void init(FileSystem fs, Path path, Configuration conf) + public void init(FileSystem fs, Path path, Configuration conf, boolean overwritable) throws IOException { boolean compress = initializeCompressionContext(conf, path); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java index c130ffbc81d..e295a42ccd9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java @@ -795,7 +795,7 @@ public class TestHLog { fs.mkdirs(dir); // Write log in pre-PB format. sflw = new SequenceFileLogWriter(); - sflw.init(fs, path, conf); + sflw.init(fs, path, conf, false); for (int i = 0; i < recordCount; ++i) { HLogKey key = new HLogKey( hri.getEncodedNameAsBytes(), tableName, i, timestamp, HConstants.DEFAULT_CLUSTER_ID); @@ -870,6 +870,8 @@ public class TestHLog { final byte[] row = Bytes.toBytes("row"); long timestamp = System.currentTimeMillis(); Path path = new Path(dir, "temphlog"); + // delete the log if already exists, for test only + fs.delete(path, true); HLog.Writer writer = null; HLog.Reader reader = null; try { @@ -878,7 +880,7 @@ public class TestHLog { HTableDescriptor htd = new HTableDescriptor(tableName); fs.mkdirs(dir); // Write log in pb format. - writer = HLogFactory.createWriter(fs, path, conf); + writer = HLogFactory.createWALWriter(fs, path, conf); for (int i = 0; i < recordCount; ++i) { HLogKey key = new HLogKey( hri.getEncodedNameAsBytes(), tableName, i, timestamp, HConstants.DEFAULT_CLUSTER_ID); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java index 9a6da6138dc..05ca1fd07de 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java @@ -376,7 +376,7 @@ public class TestHLogSplit { Path p = HLogSplitter.getRegionSplitEditsPath(fs, entry, HBASEDIR, true); String parentOfParent = p.getParent().getParent().getName(); assertEquals(parentOfParent, HRegionInfo.FIRST_META_REGIONINFO.getEncodedName()); - HLogFactory.createWriter(fs, p, conf).close(); + HLogFactory.createRecoveredEditsWriter(fs, p, conf).close(); } @Test (timeout=300000) @@ -1168,8 +1168,8 @@ public class TestHLogSplit { } fs.mkdirs(new Path(tableDir, region)); - HLog.Writer writer = HLogFactory.createWriter(fs, - julietLog, conf); + HLog.Writer writer = HLogFactory.createWALWriter(fs, + julietLog, conf); appendEntry(writer, TableName.valueOf("juliet"), ("juliet").getBytes(), ("r").getBytes(), FAMILY, QUALIFIER, VALUE, 0); writer.close(); @@ -1289,7 +1289,7 @@ public class TestHLogSplit { conf, HBASEDIR, fs, null, null) { protected HLog.Writer createWriter(FileSystem fs, Path logfile, Configuration conf) throws IOException { - HLog.Writer writer = HLogFactory.createWriter(fs, logfile, conf); + HLog.Writer writer = HLogFactory.createRecoveredEditsWriter(fs, logfile, conf); // After creating writer, simulate region's // replayRecoveredEditsIfAny() which gets SplitEditFiles of this // region and delete them, excluding files with '.temp' suffix. @@ -1350,7 +1350,7 @@ public class TestHLogSplit { HLog.Writer [] ws = new HLog.Writer[writers]; int seq = 0; for (int i = 0; i < writers; i++) { - ws[i] = HLogFactory.createWriter(dfs, new Path(HLOGDIR, HLOG_FILE_PREFIX + i), dfs.getConf()); + ws[i] = HLogFactory.createWALWriter(dfs, new Path(HLOGDIR, HLOG_FILE_PREFIX + i), dfs.getConf()); for (int j = 0; j < entries; j++) { int prefix = 0; for (String region : REGIONS) { @@ -1505,7 +1505,7 @@ public class TestHLogSplit { private void injectEmptyFile(String suffix, boolean closeFile) throws IOException { - HLog.Writer writer = HLogFactory.createWriter( + HLog.Writer writer = HLogFactory.createWALWriter( fs, new Path(HLOGDIR, HLOG_FILE_PREFIX + suffix), conf); if (closeFile) writer.close(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestReadOldRootAndMetaEdits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestReadOldRootAndMetaEdits.java index 268a409a61e..8e88f7bd1c9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestReadOldRootAndMetaEdits.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestReadOldRootAndMetaEdits.java @@ -112,7 +112,7 @@ public class TestReadOldRootAndMetaEdits { HConstants.DEFAULT_CLUSTER_ID), kvs); // write above entries - writer = HLogFactory.createWriter(fs, path, conf); + writer = HLogFactory.createWALWriter(fs, path, conf); writer.append(tEntry); writer.append(rootEntry); writer.append(oldMetaEntry); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java index 5866a195ae1..4c5ece5e269 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java @@ -79,8 +79,8 @@ public class TestReplicationSource { Path logPath = new Path(logDir, "log"); if (!FS.exists(logDir)) FS.mkdirs(logDir); if (!FS.exists(oldLogDir)) FS.mkdirs(oldLogDir); - HLog.Writer writer = HLogFactory.createWriter(FS, - logPath, conf); + HLog.Writer writer = HLogFactory.createWALWriter(FS, + logPath, conf); for(int i = 0; i < 3; i++) { byte[] b = Bytes.toBytes(Integer.toString(i)); KeyValue kv = new KeyValue(b,b,b); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotLogSplitter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotLogSplitter.java index 99715f524c7..d66623cacc9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotLogSplitter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotLogSplitter.java @@ -144,7 +144,7 @@ public class TestSnapshotLogSplitter { */ private void writeTestLog(final Path logFile) throws IOException { fs.mkdirs(logFile.getParent()); - HLog.Writer writer = HLogFactory.createWriter(fs, logFile, conf); + HLog.Writer writer = HLogFactory.createWALWriter(fs, logFile, conf); try { for (int i = 0; i < 7; ++i) { TableName tableName = getTableName(i);