HBASE-25888 Backup tests are categorically flakey (#3279)

cleanup backup tests setup and teardown

Signed-off-by: Duo Zhang <zhangduo@apache.org>
This commit is contained in:
Mallikarjun 2021-05-24 07:05:40 +05:30 committed by GitHub
parent 7f6b778c14
commit fe70fced07
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 70 additions and 37 deletions

View File

@ -18,15 +18,15 @@
package org.apache.hadoop.hbase.backup.mapreduce; package org.apache.hadoop.hbase.backup.mapreduce;
import static org.apache.hadoop.hbase.backup.util.BackupUtils.succeeded; import static org.apache.hadoop.hbase.backup.util.BackupUtils.succeeded;
import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.Stack; import java.util.Stack;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
@ -240,10 +240,9 @@ public class MapReduceBackupMergeJob implements BackupMergeJob {
* @throws IOException exception * @throws IOException exception
*/ */
protected void copyFile(FileSystem fs, Path p, Path newPath) throws IOException { protected void copyFile(FileSystem fs, Path p, Path newPath) throws IOException {
File f = File.createTempFile("data", "meta"); try (InputStream in = fs.open(p); OutputStream out = fs.create(newPath, true)) {
Path localPath = new Path(f.getAbsolutePath()); IOUtils.copy(in, out);
fs.copyToLocalFile(p, localPath); }
fs.copyFromLocalFile(localPath, newPath);
boolean exists = fs.exists(newPath); boolean exists = fs.exists(newPath);
if (!exists) { if (!exists) {
throw new IOException("Failed to copy meta file to: " + newPath); throw new IOException("Failed to copy meta file to: " + newPath);

View File

@ -66,7 +66,7 @@ import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.wal.WALFactory; import org.apache.hadoop.hbase.wal.WALFactory;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.BeforeClass;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -78,9 +78,9 @@ import org.slf4j.LoggerFactory;
public class TestBackupBase { public class TestBackupBase {
private static final Logger LOG = LoggerFactory.getLogger(TestBackupBase.class); private static final Logger LOG = LoggerFactory.getLogger(TestBackupBase.class);
protected static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); protected static HBaseTestingUtility TEST_UTIL;
protected static HBaseTestingUtility TEST_UTIL2; protected static HBaseTestingUtility TEST_UTIL2;
protected static Configuration conf1 = TEST_UTIL.getConfiguration(); protected static Configuration conf1;
protected static Configuration conf2; protected static Configuration conf2;
protected static TableName table1 = TableName.valueOf("table1"); protected static TableName table1 = TableName.valueOf("table1");
@ -98,14 +98,13 @@ public class TestBackupBase {
protected static final byte[] qualName = Bytes.toBytes("q1"); protected static final byte[] qualName = Bytes.toBytes("q1");
protected static final byte[] famName = Bytes.toBytes("f"); protected static final byte[] famName = Bytes.toBytes("f");
protected static String BACKUP_ROOT_DIR = Path.SEPARATOR +"backupUT"; protected static String BACKUP_ROOT_DIR;
protected static String BACKUP_REMOTE_ROOT_DIR = Path.SEPARATOR + "backupUT"; protected static String BACKUP_REMOTE_ROOT_DIR;
protected static String provider = "defaultProvider"; protected static String provider = "defaultProvider";
protected static boolean secure = false; protected static boolean secure = false;
protected static boolean autoRestoreOnFailure = true; protected static boolean autoRestoreOnFailure;
protected static boolean setupIsDone = false; protected static boolean useSecondCluster;
protected static boolean useSecondCluster = false;
static class IncrementalTableBackupClientForTest extends IncrementalTableBackupClient { static class IncrementalTableBackupClientForTest extends IncrementalTableBackupClient {
public IncrementalTableBackupClientForTest() { public IncrementalTableBackupClientForTest() {
@ -270,14 +269,10 @@ public class TestBackupBase {
} }
} }
/** public static void setUpHelper() throws Exception {
* @throws Exception if starting the mini cluster or setting up the tables fails BACKUP_ROOT_DIR = Path.SEPARATOR +"backupUT";
*/ BACKUP_REMOTE_ROOT_DIR = Path.SEPARATOR + "backupUT";
@Before
public void setUp() throws Exception {
if (setupIsDone) {
return;
}
if (secure) { if (secure) {
// set the always on security provider // set the always on security provider
UserProvider.setUserProviderForTesting(TEST_UTIL.getConfiguration(), UserProvider.setUserProviderForTesting(TEST_UTIL.getConfiguration(),
@ -324,9 +319,24 @@ public class TestBackupBase {
} }
createTables(); createTables();
populateFromMasterConfig(TEST_UTIL.getHBaseCluster().getMaster().getConfiguration(), conf1); populateFromMasterConfig(TEST_UTIL.getHBaseCluster().getMaster().getConfiguration(), conf1);
setupIsDone = true;
} }
/**
* Setup Cluster with appropriate configurations before running tests.
*
* @throws Exception if starting the mini cluster or setting up the tables fails
*/
@BeforeClass
public static void setUp() throws Exception {
TEST_UTIL = new HBaseTestingUtility();
conf1 = TEST_UTIL.getConfiguration();
autoRestoreOnFailure = true;
useSecondCluster = false;
setUpHelper();
}
private static void populateFromMasterConfig(Configuration masterConf, Configuration conf) { private static void populateFromMasterConfig(Configuration masterConf, Configuration conf) {
Iterator<Entry<String, String>> it = masterConf.iterator(); Iterator<Entry<String, String>> it = masterConf.iterator();
while (it.hasNext()) { while (it.hasNext()) {
@ -350,6 +360,8 @@ public class TestBackupBase {
} }
TEST_UTIL.shutdownMiniCluster(); TEST_UTIL.shutdownMiniCluster();
TEST_UTIL.shutdownMiniMapReduceCluster(); TEST_UTIL.shutdownMiniMapReduceCluster();
autoRestoreOnFailure = true;
useSecondCluster = false;
} }
Table insertIntoTable(Connection conn, TableName table, byte[] family, int id, int numRows) Table insertIntoTable(Connection conn, TableName table, byte[] family, int id, int numRows)

View File

@ -27,6 +27,7 @@ import java.util.Optional;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.impl.BackupSystemTable; import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
@ -41,7 +42,7 @@ import org.apache.hadoop.hbase.coprocessor.MasterObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.junit.Before; import org.junit.BeforeClass;
import org.junit.ClassRule; import org.junit.ClassRule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -113,15 +114,18 @@ public class TestBackupDeleteWithFailures extends TestBackupBase{
} }
/** /**
* Setup Cluster with appropriate configurations before running tests.
*
* @throws Exception if starting the mini cluster or setting up the tables fails * @throws Exception if starting the mini cluster or setting up the tables fails
*/ */
@Override @BeforeClass
@Before public static void setUp() throws Exception {
public void setUp() throws Exception { TEST_UTIL = new HBaseTestingUtility();
conf1 = TEST_UTIL.getConfiguration();
conf1.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, conf1.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY,
MasterSnapshotObserver.class.getName()); MasterSnapshotObserver.class.getName());
conf1.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1); conf1.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1);
super.setUp(); setUpHelper();
} }
private MasterSnapshotObserver getMasterSnapshotObserver() { private MasterSnapshotObserver getMasterSnapshotObserver() {

View File

@ -22,6 +22,7 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.backup.util.BackupUtils;
@ -38,6 +39,7 @@ import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils;
import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Assert; import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.ClassRule; import org.junit.ClassRule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -55,11 +57,18 @@ public class TestRemoteBackup extends TestBackupBase {
private static final Logger LOG = LoggerFactory.getLogger(TestRemoteBackup.class); private static final Logger LOG = LoggerFactory.getLogger(TestRemoteBackup.class);
@Override /**
public void setUp() throws Exception { * Setup Cluster with appropriate configurations before running tests.
useSecondCluster = true; *
* @throws Exception if starting the mini cluster or setting up the tables fails
*/
@BeforeClass
public static void setUp() throws Exception {
TEST_UTIL = new HBaseTestingUtility();
conf1 = TEST_UTIL.getConfiguration();
conf1.setInt(HConstants.REGION_SERVER_HANDLER_COUNT, 10); conf1.setInt(HConstants.REGION_SERVER_HANDLER_COUNT, 10);
super.setUp(); useSecondCluster = true;
setUpHelper();
} }
/** /**

View File

@ -20,10 +20,12 @@ package org.apache.hadoop.hbase.backup;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.junit.BeforeClass;
import org.junit.ClassRule; import org.junit.ClassRule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -39,10 +41,17 @@ public class TestRemoteRestore extends TestBackupBase {
private static final Logger LOG = LoggerFactory.getLogger(TestRemoteRestore.class); private static final Logger LOG = LoggerFactory.getLogger(TestRemoteRestore.class);
@Override /**
public void setUp() throws Exception { * Setup Cluster with appropriate configurations before running tests.
*
* @throws Exception if starting the mini cluster or setting up the tables fails
*/
@BeforeClass
public static void setUp() throws Exception {
TEST_UTIL = new HBaseTestingUtility();
conf1 = TEST_UTIL.getConfiguration();
useSecondCluster = true; useSecondCluster = true;
super.setUp(); setUpHelper();
} }
/** /**