diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.java index f71b62a1835..375f34b4830 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.java @@ -18,15 +18,15 @@ package org.apache.hadoop.hbase.backup.mapreduce; import static org.apache.hadoop.hbase.backup.util.BackupUtils.succeeded; - -import java.io.File; import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.Stack; - +import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; @@ -240,13 +240,12 @@ public class MapReduceBackupMergeJob implements BackupMergeJob { * @throws IOException exception */ protected void copyFile(FileSystem fs, Path p, Path newPath) throws IOException { - File f = File.createTempFile("data", "meta"); - Path localPath = new Path(f.getAbsolutePath()); - fs.copyToLocalFile(p, localPath); - fs.copyFromLocalFile(localPath, newPath); + try (InputStream in = fs.open(p); OutputStream out = fs.create(newPath, true)) { + IOUtils.copy(in, out); + } boolean exists = fs.exists(newPath); if (!exists) { - throw new IOException("Failed to copy meta file to: "+ newPath); + throw new IOException("Failed to copy meta file to: " + newPath); } } diff --git a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java index 12060dcc1b8..692c21be26e 100644 --- a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java +++ b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java @@ -66,7 +66,7 @@ import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.wal.WALFactory; import org.junit.AfterClass; -import org.junit.Before; +import org.junit.BeforeClass; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -78,9 +78,9 @@ import org.slf4j.LoggerFactory; public class TestBackupBase { private static final Logger LOG = LoggerFactory.getLogger(TestBackupBase.class); - protected static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); + protected static HBaseTestingUtility TEST_UTIL; protected static HBaseTestingUtility TEST_UTIL2; - protected static Configuration conf1 = TEST_UTIL.getConfiguration(); + protected static Configuration conf1; protected static Configuration conf2; protected static TableName table1 = TableName.valueOf("table1"); @@ -98,14 +98,13 @@ public class TestBackupBase { protected static final byte[] qualName = Bytes.toBytes("q1"); protected static final byte[] famName = Bytes.toBytes("f"); - protected static String BACKUP_ROOT_DIR = Path.SEPARATOR +"backupUT"; - protected static String BACKUP_REMOTE_ROOT_DIR = Path.SEPARATOR + "backupUT"; + protected static String BACKUP_ROOT_DIR; + protected static String BACKUP_REMOTE_ROOT_DIR; protected static String provider = "defaultProvider"; protected static boolean secure = false; - protected static boolean autoRestoreOnFailure = true; - protected static boolean setupIsDone = false; - protected static boolean useSecondCluster = false; + protected static boolean autoRestoreOnFailure; + protected static boolean useSecondCluster; static class IncrementalTableBackupClientForTest extends IncrementalTableBackupClient { public IncrementalTableBackupClientForTest() { @@ -270,14 +269,10 @@ public class TestBackupBase { } } - /** - * @throws Exception if starting the mini cluster or setting up the tables fails - */ - @Before - public void setUp() throws Exception { - if (setupIsDone) { - return; - } + public static void setUpHelper() throws Exception { + BACKUP_ROOT_DIR = Path.SEPARATOR +"backupUT"; + BACKUP_REMOTE_ROOT_DIR = Path.SEPARATOR + "backupUT"; + if (secure) { // set the always on security provider UserProvider.setUserProviderForTesting(TEST_UTIL.getConfiguration(), @@ -324,9 +319,24 @@ public class TestBackupBase { } createTables(); populateFromMasterConfig(TEST_UTIL.getHBaseCluster().getMaster().getConfiguration(), conf1); - setupIsDone = true; } + + /** + * Setup Cluster with appropriate configurations before running tests. + * + * @throws Exception if starting the mini cluster or setting up the tables fails + */ + @BeforeClass + public static void setUp() throws Exception { + TEST_UTIL = new HBaseTestingUtility(); + conf1 = TEST_UTIL.getConfiguration(); + autoRestoreOnFailure = true; + useSecondCluster = false; + setUpHelper(); + } + + private static void populateFromMasterConfig(Configuration masterConf, Configuration conf) { Iterator> it = masterConf.iterator(); while (it.hasNext()) { @@ -350,6 +360,8 @@ public class TestBackupBase { } TEST_UTIL.shutdownMiniCluster(); TEST_UTIL.shutdownMiniMapReduceCluster(); + autoRestoreOnFailure = true; + useSecondCluster = false; } Table insertIntoTable(Connection conn, TableName table, byte[] family, int id, int numRows) diff --git a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDeleteWithFailures.java b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDeleteWithFailures.java index 27cf07a83a8..04650330b7a 100644 --- a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDeleteWithFailures.java +++ b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDeleteWithFailures.java @@ -27,6 +27,7 @@ import java.util.Optional; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.backup.impl.BackupSystemTable; @@ -41,7 +42,7 @@ import org.apache.hadoop.hbase.coprocessor.MasterObserver; import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.util.ToolRunner; -import org.junit.Before; +import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -113,15 +114,18 @@ public class TestBackupDeleteWithFailures extends TestBackupBase{ } /** + * Setup Cluster with appropriate configurations before running tests. + * * @throws Exception if starting the mini cluster or setting up the tables fails */ - @Override - @Before - public void setUp() throws Exception { + @BeforeClass + public static void setUp() throws Exception { + TEST_UTIL = new HBaseTestingUtility(); + conf1 = TEST_UTIL.getConfiguration(); conf1.set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, MasterSnapshotObserver.class.getName()); conf1.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1); - super.setUp(); + setUpHelper(); } private MasterSnapshotObserver getMasterSnapshotObserver() { diff --git a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestRemoteBackup.java b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestRemoteBackup.java index 136c4b26973..6db63495e09 100644 --- a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestRemoteBackup.java +++ b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestRemoteBackup.java @@ -22,6 +22,7 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.concurrent.CountDownLatch; import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.backup.util.BackupUtils; @@ -38,6 +39,7 @@ import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; +import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -55,11 +57,18 @@ public class TestRemoteBackup extends TestBackupBase { private static final Logger LOG = LoggerFactory.getLogger(TestRemoteBackup.class); - @Override - public void setUp() throws Exception { - useSecondCluster = true; + /** + * Setup Cluster with appropriate configurations before running tests. + * + * @throws Exception if starting the mini cluster or setting up the tables fails + */ + @BeforeClass + public static void setUp() throws Exception { + TEST_UTIL = new HBaseTestingUtility(); + conf1 = TEST_UTIL.getConfiguration(); conf1.setInt(HConstants.REGION_SERVER_HANDLER_COUNT, 10); - super.setUp(); + useSecondCluster = true; + setUpHelper(); } /** diff --git a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestRemoteRestore.java b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestRemoteRestore.java index d6701449fb2..9b08d2549cc 100644 --- a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestRemoteRestore.java +++ b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestRemoteRestore.java @@ -20,10 +20,12 @@ package org.apache.hadoop.hbase.backup; import static org.junit.Assert.assertTrue; import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.testclassification.LargeTests; +import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -39,10 +41,17 @@ public class TestRemoteRestore extends TestBackupBase { private static final Logger LOG = LoggerFactory.getLogger(TestRemoteRestore.class); - @Override - public void setUp() throws Exception { + /** + * Setup Cluster with appropriate configurations before running tests. + * + * @throws Exception if starting the mini cluster or setting up the tables fails + */ + @BeforeClass + public static void setUp() throws Exception { + TEST_UTIL = new HBaseTestingUtility(); + conf1 = TEST_UTIL.getConfiguration(); useSecondCluster = true; - super.setUp(); + setUpHelper(); } /**