HBASE-5760 Unit tests should write only under /target

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1312043 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2012-04-10 23:33:05 +00:00
parent a31cb26e5a
commit 135478ca21
5 changed files with 83 additions and 33 deletions

View File

@ -39,7 +39,6 @@ import java.util.Map;
import java.util.NavigableSet;
import java.util.Random;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -85,6 +84,7 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.NodeExistsException;
@ -1277,10 +1277,16 @@ public class HBaseTestingUtility {
*/
public void startMiniMapReduceCluster(final int servers) throws IOException {
LOG.info("Starting mini mapreduce cluster...");
if (dataTestDir == null) {
setupDataTestDir();
}
// These are needed for the new and improved Map/Reduce framework
conf.set("mapred.output.dir", conf.get("hadoop.tmp.dir"));
mrCluster = new MiniMRCluster(servers,
FileSystem.get(conf).getUri().toString(), 1);
mrCluster = new MiniMRCluster(0, 0, servers,
FileSystem.get(conf).getUri().toString(), 1, null, null, null, new JobConf(conf));
mrCluster.getJobTrackerRunner().getJobTracker().getConf().set("mapred.local.dir",
conf.get("mapred.local.dir")); //Hadoop MiniMR overwrites this while it should not
LOG.info("Mini mapreduce cluster started");
conf.set("mapred.job.tracker",
mrCluster.createJobConf().get("mapred.job.tracker"));

View File

@ -20,6 +20,8 @@
package org.apache.hadoop.hbase.coprocessor;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@ -28,11 +30,21 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.regionserver.SplitTransaction;
import org.apache.hadoop.hbase.regionserver.Store;
@ -42,14 +54,12 @@ import org.apache.hadoop.hbase.util.PairOfSameType;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import static org.mockito.Mockito.when;
@Category(SmallTests.class)
public class TestCoprocessorInterface extends HBaseTestCase {
static final Log LOG = LogFactory.getLog(TestCoprocessorInterface.class);
static final String DIR = "test/build/data/TestCoprocessorInterface/";
private static final HBaseTestingUtility TEST_UTIL =
new HBaseTestingUtility();
static final Path DIR = TEST_UTIL.getDataTestDir();
private static class CustomScanner implements RegionScanner {
@ -63,9 +73,9 @@ public class TestCoprocessorInterface extends HBaseTestCase {
public boolean next(List<KeyValue> results) throws IOException {
return delegate.next(results);
}
@Override
public boolean next(List<KeyValue> results, String metric)
public boolean next(List<KeyValue> results, String metric)
throws IOException {
return delegate.next(results, metric);
}
@ -74,9 +84,9 @@ public class TestCoprocessorInterface extends HBaseTestCase {
public boolean next(List<KeyValue> result, int limit) throws IOException {
return delegate.next(result, limit);
}
@Override
public boolean next(List<KeyValue> result, int limit, String metric)
public boolean next(List<KeyValue> result, int limit, String metric)
throws IOException {
return delegate.next(result, limit, metric);
}

View File

@ -20,6 +20,11 @@
package org.apache.hadoop.hbase.coprocessor;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.ArrayList;
@ -29,8 +34,24 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
@ -39,18 +60,14 @@ import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.JVMClusterUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.*;
@Category(MediumTests.class)
public class TestRegionObserverInterface {
static final Log LOG = LogFactory.getLog(TestRegionObserverInterface.class);
static final String DIR = "test/build/data/TestRegionObserver/";
public static final byte[] TEST_TABLE = Bytes.toBytes("TestTable");
public final static byte[] A = Bytes.toBytes("a");
@ -299,21 +316,21 @@ public class TestRegionObserverInterface {
public boolean next(List<KeyValue> results) throws IOException {
return next(results, -1);
}
@Override
public boolean next(List<KeyValue> results, String metric)
public boolean next(List<KeyValue> results, String metric)
throws IOException {
return next(results, -1, metric);
}
@Override
public boolean next(List<KeyValue> results, int limit)
public boolean next(List<KeyValue> results, int limit)
throws IOException{
return next(results, limit, null);
}
@Override
public boolean next(List<KeyValue> results, int limit, String metric)
public boolean next(List<KeyValue> results, int limit, String metric)
throws IOException {
List<KeyValue> internalResults = new ArrayList<KeyValue>();
boolean hasMore;

View File

@ -21,24 +21,30 @@
package org.apache.hadoop.hbase.coprocessor;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.util.Bytes;
import junit.framework.TestCase;
import org.junit.experimental.categories.Category;
@Category(SmallTests.class)
public class TestRegionObserverStacking extends TestCase {
static final String DIR = "test/build/data/TestRegionObserverStacking/";
private static HBaseTestingUtility TEST_UTIL
= new HBaseTestingUtility();
static final Path DIR = TEST_UTIL.getDataTestDir();
public static class ObserverA extends BaseRegionObserver {
long id;

View File

@ -33,17 +33,25 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.Callable;
import java.util.Random;
import java.util.concurrent.Callable;
import junit.framework.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.LargeTests;
import org.apache.hadoop.hbase.PerformanceEvaluation;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
@ -272,7 +280,7 @@ public class TestHFileOutputFormat {
// verify that the file has the proper FileInfo.
writer.close(context);
// the generated file lives 1 directory down from the attempt directory
// the generated file lives 1 directory down from the attempt directory
// and is the only file, e.g.
// _attempt__0000_r_000000_0/b/1979617994050536795
FileSystem fs = FileSystem.get(conf);
@ -341,7 +349,8 @@ public class TestHFileOutputFormat {
@Test
public void testJobConfiguration() throws Exception {
Job job = new Job();
Job job = new Job(util.getConfiguration());
job.setWorkingDirectory(util.getDataTestDir("testJobConfiguration"));
HTable table = Mockito.mock(HTable.class);
setupMockStartKeys(table);
HFileOutputFormat.configureIncrementalLoad(job, table);
@ -466,6 +475,7 @@ public class TestHFileOutputFormat {
Configuration conf, HTable table, Path outDir)
throws Exception {
Job job = new Job(conf, "testLocalMRIncrementalLoad");
job.setWorkingDirectory(util.getDataTestDir("runIncrementalPELoad"));
setupRandomGeneratorMapper(job);
HFileOutputFormat.configureIncrementalLoad(job, table);
FileOutputFormat.setOutputPath(job, outDir);
@ -586,6 +596,7 @@ public class TestHFileOutputFormat {
// pollutes the GZip codec pool with an incompatible compressor.
conf.set("io.seqfile.compression.type", "NONE");
Job job = new Job(conf, "testLocalMRIncrementalLoad");
job.setWorkingDirectory(util.getDataTestDir("testColumnFamilyCompression"));
setupRandomGeneratorMapper(job);
HFileOutputFormat.configureIncrementalLoad(job, table);
FileOutputFormat.setOutputPath(job, dir);