HADOOP-9470. eliminate duplicate FQN tests in different Hadoop modules (Ivan A. Veselovsky via daryn)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1530667 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Daryn Sharp 2013-10-09 15:09:37 +00:00
parent 643b1a4019
commit b858b812b7
13 changed files with 16 additions and 13 deletions

View File

@ -351,6 +351,9 @@ Release 2.3.0 - UNRELEASED
HADOOP-9199. Cover package org.apache.hadoop.io with unit tests (Andrey HADOOP-9199. Cover package org.apache.hadoop.io with unit tests (Andrey
Klochkov via jeagles) Klochkov via jeagles)
HADOOP-9470. eliminate duplicate FQN tests in different Hadoop modules
(Ivan A. Veselovsky via daryn)
OPTIMIZATIONS OPTIMIZATIONS
HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn) HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)

View File

@ -42,7 +42,7 @@ import org.junit.Test;
* This class tests the logic for displaying the binary formats supported * This class tests the logic for displaying the binary formats supported
* by the Text command. * by the Text command.
*/ */
public class TestTextCommand { public class TestHdfsTextCommand {
private static final String TEST_ROOT_DIR = private static final String TEST_ROOT_DIR =
System.getProperty("test.build.data", "build/test/data/") + "/testText"; System.getProperty("test.build.data", "build/test/data/") + "/testText";
private static final Path AVRO_FILENAME = new Path(TEST_ROOT_DIR, "weather.avro"); private static final Path AVRO_FILENAME = new Path(TEST_ROOT_DIR, "weather.avro");

View File

@ -39,7 +39,7 @@ import org.junit.Test;
/** /**
* This class checks that RPCs can use specialized socket factories. * This class checks that RPCs can use specialized socket factories.
*/ */
public class TestSocketFactory { public class TestMRCJCSocketFactory {
/** /**
* Check that we can reach a NameNode or Resource Manager using a specific * Check that we can reach a NameNode or Resource Manager using a specific

View File

@ -38,7 +38,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
public class TestFileInputFormat extends TestCase { public class TestMRCJCFileInputFormat extends TestCase {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
MiniDFSCluster dfs = null; MiniDFSCluster dfs = null;

View File

@ -29,7 +29,7 @@ import org.apache.hadoop.mapred.JobContextImpl;
import org.apache.hadoop.mapred.TaskAttemptContextImpl; import org.apache.hadoop.mapred.TaskAttemptContextImpl;
import org.apache.hadoop.mapreduce.JobStatus; import org.apache.hadoop.mapreduce.JobStatus;
public class TestFileOutputCommitter extends TestCase { public class TestMRCJCFileOutputCommitter extends TestCase {
private static Path outDir = new Path( private static Path outDir = new Path(
System.getProperty("test.build.data", "/tmp"), "output"); System.getProperty("test.build.data", "/tmp"), "output");

View File

@ -31,7 +31,7 @@ import org.apache.hadoop.mapreduce.tools.CLI;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.junit.Ignore; import org.junit.Ignore;
@Ignore @Ignore
public class TestJobClient extends TestMRJobClient { public class TestMRCJCJobClient extends TestMRJobClient {
private String runJob() throws Exception { private String runJob() throws Exception {
OutputStream os = getFileSystem().create(new Path(getInputDir(), OutputStream os = getFileSystem().create(new Path(getInputDir(),
@ -71,7 +71,7 @@ public class TestJobClient extends TestMRJobClient {
static void verifyJobPriority(String jobId, String priority, static void verifyJobPriority(String jobId, String priority,
JobConf conf) throws Exception { JobConf conf) throws Exception {
TestJobClient test = new TestJobClient(); TestMRCJCJobClient test = new TestMRCJCJobClient();
test.verifyJobPriority(jobId, priority, conf, test.createJobClient()); test.verifyJobPriority(jobId, priority, conf, test.createJobClient());
} }

View File

@ -32,7 +32,7 @@ import org.apache.hadoop.util.ClassUtil;
import static org.junit.Assert.*; import static org.junit.Assert.*;
@Ignore @Ignore
public class TestJobConf { public class TestMRCJCJobConf {
private static final String JAR_RELATIVE_PATH = private static final String JAR_RELATIVE_PATH =
"build/test/mapred/testjar/testjob.jar"; "build/test/mapred/testjar/testjob.jar";
private static final String CLASSNAME = "testjar.ClassWordCount"; private static final String CLASSNAME = "testjar.ClassWordCount";

View File

@ -42,7 +42,7 @@ import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptContext;
public class TestFileInputFormat { public class TestMRCJCFileInputFormat {
@Test @Test
public void testAddInputPath() throws IOException { public void testAddInputPath() throws IOException {

View File

@ -38,7 +38,7 @@ import org.apache.hadoop.mapreduce.task.JobContextImpl;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
public class TestFileOutputCommitter extends TestCase { public class TestMRCJCFileOutputCommitter extends TestCase {
private static Path outDir = new Path(System.getProperty("test.build.data", private static Path outDir = new Path(System.getProperty("test.build.data",
"/tmp"), "output"); "/tmp"), "output");

View File

@ -30,7 +30,7 @@ import org.junit.Test;
* Test for the JobConf-related parts of common's ReflectionUtils * Test for the JobConf-related parts of common's ReflectionUtils
* class. * class.
*/ */
public class TestReflectionUtils { public class TestMRCJCReflectionUtils {
@Before @Before
public void setUp() { public void setUp() {
ReflectionUtils.clearCache(); ReflectionUtils.clearCache();

View File

@ -33,7 +33,7 @@ import org.junit.Test;
/** /**
* A test to rest the RunJar class. * A test to rest the RunJar class.
*/ */
public class TestRunJar { public class TestMRCJCRunJar {
private static String TEST_ROOT_DIR = new Path(System.getProperty( private static String TEST_ROOT_DIR = new Path(System.getProperty(
"test.build.data", "/tmp")).toString(); "test.build.data", "/tmp")).toString();

View File

@ -37,7 +37,7 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequ
import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerResponse;
import org.junit.Test; import org.junit.Test;
public class TestRPCFactories { public class TestYSCRPCFactories {

View File

@ -27,7 +27,7 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.NodeHeartbeatRequestPBImpl; import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.NodeHeartbeatRequestPBImpl;
import org.junit.Test; import org.junit.Test;
public class TestRecordFactory { public class TestYSCRecordFactory {
@Test @Test
public void testPbRecordFactory() { public void testPbRecordFactory() {