HBASE-5995 Fix and reenable TestLogRolling.testLogRollOnPipelineRestart

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1483004 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Enis Soztutar 2013-05-15 18:52:20 +00:00
parent f729fcd4ec
commit a1aa8537a2
2 changed files with 56 additions and 17 deletions

View File

@ -475,11 +475,14 @@ class FSHLog implements HLog, Syncable {
return null; return null;
} }
byte [][] regionsToFlush = null; byte [][] regionsToFlush = null;
if (closed) {
LOG.debug("HLog closed. Skipping rolling of writer");
return null;
}
try { try {
this.logRollRunning = true; this.logRollRunning = true;
boolean isClosed = closed; if (!closeBarrier.beginOp()) {
if (isClosed || !closeBarrier.beginOp()) { LOG.debug("HLog closing. Skipping rolling of writer");
LOG.debug("HLog " + (isClosed ? "closed" : "closing") + ". Skipping rolling of writer");
return regionsToFlush; return regionsToFlush;
} }
// Do all the preparation outside of the updateLock to block // Do all the preparation outside of the updateLock to block
@ -955,6 +958,7 @@ class FSHLog implements HLog, Syncable {
} catch (IOException e) { } catch (IOException e) {
LOG.error("Error while syncing, requesting close of hlog ", e); LOG.error("Error while syncing, requesting close of hlog ", e);
requestLogRoll(); requestLogRoll();
Threads.sleep(this.optionalFlushInterval);
} }
} }
} catch (InterruptedException e) { } catch (InterruptedException e) {
@ -1081,7 +1085,7 @@ class FSHLog implements HLog, Syncable {
} }
} }
} catch (IOException e) { } catch (IOException e) {
LOG.fatal("Could not sync. Requesting close of hlog", e); LOG.fatal("Could not sync. Requesting roll of hlog", e);
requestLogRoll(); requestLogRoll();
throw e; throw e;
} }
@ -1160,18 +1164,22 @@ class FSHLog implements HLog, Syncable {
return this.getNumCurrentReplicas != null; return this.getNumCurrentReplicas != null;
} }
@Override
public void hsync() throws IOException { public void hsync() throws IOException {
syncer(); syncer();
} }
@Override
public void hflush() throws IOException { public void hflush() throws IOException {
syncer(); syncer();
} }
@Override
public void sync() throws IOException { public void sync() throws IOException {
syncer(); syncer();
} }
@Override
public void sync(long txid) throws IOException { public void sync(long txid) throws IOException {
syncer(txid); syncer(txid);
} }

View File

@ -18,33 +18,60 @@
*/ */
package org.apache.hadoop.hbase.regionserver.wal; package org.apache.hadoop.hbase.regionserver.wal;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.EOFException;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger; import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.LargeTests;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.exceptions.FailedLogCloseException; import org.apache.hadoop.hbase.exceptions.FailedLogCloseException;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.util.*; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.JVMClusterUtil;
import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.namenode.LeaseManager; import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.*; import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import java.io.*;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
import static org.junit.Assert.*;
/** /**
* Test log deletion as logs are rolled. * Test log deletion as logs are rolled.
*/ */
@ -403,7 +430,7 @@ public class TestLogRolling {
* restarted. * restarted.
* @throws Exception * @throws Exception
*/ */
//DISABLED BECAUSE FLAKEY @Test @Test
public void testLogRollOnPipelineRestart() throws Exception { public void testLogRollOnPipelineRestart() throws Exception {
LOG.info("Starting testLogRollOnPipelineRestart"); LOG.info("Starting testLogRollOnPipelineRestart");
assertTrue("This test requires HLog file replication.", assertTrue("This test requires HLog file replication.",
@ -504,11 +531,15 @@ public class TestLogRolling {
// read back the data written // read back the data written
Set<String> loggedRows = new HashSet<String>(); Set<String> loggedRows = new HashSet<String>();
FSUtils fsUtils = FSUtils.getInstance(fs, TEST_UTIL.getConfiguration());
for (Path p : paths) { for (Path p : paths) {
LOG.debug("recovering lease for " + p);
fsUtils.recoverFileLease(((HFileSystem)fs).getBackingFs(), p, TEST_UTIL.getConfiguration(), null);
LOG.debug("Reading HLog "+FSUtils.getPath(p)); LOG.debug("Reading HLog "+FSUtils.getPath(p));
HLog.Reader reader = null; HLog.Reader reader = null;
try { try {
reader = HLogFactory.createReader(fs, p, reader = HLogFactory.createReader(fs, p,
TEST_UTIL.getConfiguration()); TEST_UTIL.getConfiguration());
HLog.Entry entry; HLog.Entry entry;
while ((entry = reader.next()) != null) { while ((entry = reader.next()) != null) {