HADOOP-9470. eliminate duplicate FQN tests in different Hadoop modules (Ivan A. Veselovsky via daryn)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1530667 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
643b1a4019
commit
b858b812b7
|
@ -351,6 +351,9 @@ Release 2.3.0 - UNRELEASED
|
|||
HADOOP-9199. Cover package org.apache.hadoop.io with unit tests (Andrey
|
||||
Klochkov via jeagles)
|
||||
|
||||
HADOOP-9470. eliminate duplicate FQN tests in different Hadoop modules
|
||||
(Ivan A. Veselovsky via daryn)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)
|
||||
|
|
|
@ -42,7 +42,7 @@ import org.junit.Test;
|
|||
* This class tests the logic for displaying the binary formats supported
|
||||
* by the Text command.
|
||||
*/
|
||||
public class TestTextCommand {
|
||||
public class TestHdfsTextCommand {
|
||||
private static final String TEST_ROOT_DIR =
|
||||
System.getProperty("test.build.data", "build/test/data/") + "/testText";
|
||||
private static final Path AVRO_FILENAME = new Path(TEST_ROOT_DIR, "weather.avro");
|
|
@ -39,7 +39,7 @@ import org.junit.Test;
|
|||
/**
|
||||
* This class checks that RPCs can use specialized socket factories.
|
||||
*/
|
||||
public class TestSocketFactory {
|
||||
public class TestMRCJCSocketFactory {
|
||||
|
||||
/**
|
||||
* Check that we can reach a NameNode or Resource Manager using a specific
|
|
@ -38,7 +38,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
|
|||
import org.apache.hadoop.io.Text;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class TestFileInputFormat extends TestCase {
|
||||
public class TestMRCJCFileInputFormat extends TestCase {
|
||||
|
||||
Configuration conf = new Configuration();
|
||||
MiniDFSCluster dfs = null;
|
|
@ -29,7 +29,7 @@ import org.apache.hadoop.mapred.JobContextImpl;
|
|||
import org.apache.hadoop.mapred.TaskAttemptContextImpl;
|
||||
import org.apache.hadoop.mapreduce.JobStatus;
|
||||
|
||||
public class TestFileOutputCommitter extends TestCase {
|
||||
public class TestMRCJCFileOutputCommitter extends TestCase {
|
||||
private static Path outDir = new Path(
|
||||
System.getProperty("test.build.data", "/tmp"), "output");
|
||||
|
|
@ -31,7 +31,7 @@ import org.apache.hadoop.mapreduce.tools.CLI;
|
|||
import org.apache.hadoop.util.Tool;
|
||||
import org.junit.Ignore;
|
||||
@Ignore
|
||||
public class TestJobClient extends TestMRJobClient {
|
||||
public class TestMRCJCJobClient extends TestMRJobClient {
|
||||
|
||||
private String runJob() throws Exception {
|
||||
OutputStream os = getFileSystem().create(new Path(getInputDir(),
|
||||
|
@ -71,7 +71,7 @@ public class TestJobClient extends TestMRJobClient {
|
|||
|
||||
static void verifyJobPriority(String jobId, String priority,
|
||||
JobConf conf) throws Exception {
|
||||
TestJobClient test = new TestJobClient();
|
||||
TestMRCJCJobClient test = new TestMRCJCJobClient();
|
||||
test.verifyJobPriority(jobId, priority, conf, test.createJobClient());
|
||||
}
|
||||
|
|
@ -32,7 +32,7 @@ import org.apache.hadoop.util.ClassUtil;
|
|||
|
||||
import static org.junit.Assert.*;
|
||||
@Ignore
|
||||
public class TestJobConf {
|
||||
public class TestMRCJCJobConf {
|
||||
private static final String JAR_RELATIVE_PATH =
|
||||
"build/test/mapred/testjar/testjob.jar";
|
||||
private static final String CLASSNAME = "testjar.ClassWordCount";
|
|
@ -42,7 +42,7 @@ import org.apache.hadoop.mapreduce.JobContext;
|
|||
import org.apache.hadoop.mapreduce.RecordReader;
|
||||
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
||||
|
||||
public class TestFileInputFormat {
|
||||
public class TestMRCJCFileInputFormat {
|
||||
|
||||
@Test
|
||||
public void testAddInputPath() throws IOException {
|
|
@ -38,7 +38,7 @@ import org.apache.hadoop.mapreduce.task.JobContextImpl;
|
|||
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
|
||||
|
||||
|
||||
public class TestFileOutputCommitter extends TestCase {
|
||||
public class TestMRCJCFileOutputCommitter extends TestCase {
|
||||
private static Path outDir = new Path(System.getProperty("test.build.data",
|
||||
"/tmp"), "output");
|
||||
|
|
@ -30,7 +30,7 @@ import org.junit.Test;
|
|||
* Test for the JobConf-related parts of common's ReflectionUtils
|
||||
* class.
|
||||
*/
|
||||
public class TestReflectionUtils {
|
||||
public class TestMRCJCReflectionUtils {
|
||||
@Before
|
||||
public void setUp() {
|
||||
ReflectionUtils.clearCache();
|
|
@ -33,7 +33,7 @@ import org.junit.Test;
|
|||
/**
|
||||
* A test to rest the RunJar class.
|
||||
*/
|
||||
public class TestRunJar {
|
||||
public class TestMRCJCRunJar {
|
||||
|
||||
private static String TEST_ROOT_DIR = new Path(System.getProperty(
|
||||
"test.build.data", "/tmp")).toString();
|
|
@ -37,7 +37,7 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequ
|
|||
import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerResponse;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestRPCFactories {
|
||||
public class TestYSCRPCFactories {
|
||||
|
||||
|
||||
|
|
@ -27,7 +27,7 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest;
|
|||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.NodeHeartbeatRequestPBImpl;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestRecordFactory {
|
||||
public class TestYSCRecordFactory {
|
||||
|
||||
@Test
|
||||
public void testPbRecordFactory() {
|
Loading…
Reference in New Issue