diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml
index 727797aa660..1ae481d283b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml
@@ -113,6 +113,11 @@
system-rules
test
+
+ org.assertj
+ assertj-core
+ test
+
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java
index 29d0952a081..150ef1fff25 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapred/TestTaskAttemptListenerImpl.java
@@ -68,6 +68,7 @@ import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
@@ -219,7 +220,7 @@ public class TestTaskAttemptListenerImpl {
JVMId.forName("jvm_001_002_m_004_006");
fail();
} catch (IllegalArgumentException e) {
- assertEquals(e.getMessage(),
+ assertThat(e.getMessage()).isEqualTo(
"TaskId string : jvm_001_002_m_004_006 is not properly formed");
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestEvents.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestEvents.java
index e2713191ac7..08896b7b2cc 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestEvents.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestEvents.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce.jobhistory;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@@ -59,16 +60,16 @@ public class TestEvents {
TaskAttemptFinishedEvent test = new TaskAttemptFinishedEvent(taskAttemptId,
TaskType.REDUCE, "TEST", 123L, "RAKNAME", "HOSTNAME", "STATUS",
counters, 234);
- assertEquals(test.getAttemptId().toString(), taskAttemptId.toString());
-
- assertEquals(test.getCounters(), counters);
- assertEquals(test.getFinishTime(), 123L);
- assertEquals(test.getHostname(), "HOSTNAME");
- assertEquals(test.getRackName(), "RAKNAME");
- assertEquals(test.getState(), "STATUS");
- assertEquals(test.getTaskId(), tid);
- assertEquals(test.getTaskStatus(), "TEST");
- assertEquals(test.getTaskType(), TaskType.REDUCE);
+ assertThat(test.getAttemptId().toString())
+ .isEqualTo(taskAttemptId.toString());
+ assertThat(test.getCounters()).isEqualTo(counters);
+ assertThat(test.getFinishTime()).isEqualTo(123L);
+ assertThat(test.getHostname()).isEqualTo("HOSTNAME");
+ assertThat(test.getRackName()).isEqualTo("RAKNAME");
+ assertThat(test.getState()).isEqualTo("STATUS");
+ assertThat(test.getTaskId()).isEqualTo(tid);
+ assertThat(test.getTaskStatus()).isEqualTo("TEST");
+ assertThat(test.getTaskType()).isEqualTo(TaskType.REDUCE);
assertEquals(234, test.getStartTime());
}
@@ -83,8 +84,8 @@ public class TestEvents {
org.apache.hadoop.mapreduce.JobID jid = new JobID("001", 1);
JobPriorityChangeEvent test = new JobPriorityChangeEvent(jid,
JobPriority.LOW);
- assertEquals(test.getJobId().toString(), jid.toString());
- assertEquals(test.getPriority(), JobPriority.LOW);
+ assertThat(test.getJobId().toString()).isEqualTo(jid.toString());
+ assertThat(test.getPriority()).isEqualTo(JobPriority.LOW);
}
@@ -93,8 +94,8 @@ public class TestEvents {
org.apache.hadoop.mapreduce.JobID jid = new JobID("001", 1);
JobQueueChangeEvent test = new JobQueueChangeEvent(jid,
"newqueue");
- assertEquals(test.getJobId().toString(), jid.toString());
- assertEquals(test.getJobQueueName(), "newqueue");
+ assertThat(test.getJobId().toString()).isEqualTo(jid.toString());
+ assertThat(test.getJobQueueName()).isEqualTo("newqueue");
}
/**
@@ -107,8 +108,8 @@ public class TestEvents {
JobID jid = new JobID("001", 1);
TaskID tid = new TaskID(jid, TaskType.REDUCE, 2);
TaskUpdatedEvent test = new TaskUpdatedEvent(tid, 1234L);
- assertEquals(test.getTaskId().toString(), tid.toString());
- assertEquals(test.getFinishTime(), 1234L);
+ assertThat(test.getTaskId().toString()).isEqualTo(tid.toString());
+ assertThat(test.getFinishTime()).isEqualTo(1234L);
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java
index b9cb8046f94..c9a7d2d0831 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce.jobhistory;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
@@ -412,8 +413,9 @@ public class TestJobHistoryEventHandler {
JobStateInternal.FAILED.toString())));
// verify the value of the sensitive property in job.xml is restored.
- Assert.assertEquals(sensitivePropertyName + " is modified.",
- conf.get(sensitivePropertyName), sensitivePropertyValue);
+ assertThat(conf.get(sensitivePropertyName))
+ .isEqualTo(sensitivePropertyValue)
+ .withFailMessage(sensitivePropertyName + " is modified.");
// load the job_conf.xml in JHS directory and verify property redaction.
Path jhsJobConfFile = getJobConfInIntermediateDoneDir(conf, params.jobId);
@@ -543,19 +545,19 @@ public class TestJobHistoryEventHandler {
JobHistoryEventHandler.MetaInfo mi =
JobHistoryEventHandler.fileMap.get(t.jobId);
- Assert.assertEquals(mi.getJobIndexInfo().getSubmitTime(), 100);
- Assert.assertEquals(mi.getJobIndexInfo().getJobStartTime(), 200);
- Assert.assertEquals(mi.getJobSummary().getJobSubmitTime(), 100);
- Assert.assertEquals(mi.getJobSummary().getJobLaunchTime(), 200);
+ assertThat(mi.getJobIndexInfo().getSubmitTime()).isEqualTo(100);
+ assertThat(mi.getJobIndexInfo().getJobStartTime()).isEqualTo(200);
+ assertThat(mi.getJobSummary().getJobSubmitTime()).isEqualTo(100);
+ assertThat(mi.getJobSummary().getJobLaunchTime()).isEqualTo(200);
handleEvent(jheh, new JobHistoryEvent(t.jobId,
new JobUnsuccessfulCompletionEvent(TypeConverter.fromYarn(t.jobId), 0,
0, 0, 0, 0, 0, 0, JobStateInternal.FAILED.toString())));
- Assert.assertEquals(mi.getJobIndexInfo().getSubmitTime(), 100);
- Assert.assertEquals(mi.getJobIndexInfo().getJobStartTime(), 200);
- Assert.assertEquals(mi.getJobSummary().getJobSubmitTime(), 100);
- Assert.assertEquals(mi.getJobSummary().getJobLaunchTime(), 200);
+ assertThat(mi.getJobIndexInfo().getSubmitTime()).isEqualTo(100);
+ assertThat(mi.getJobIndexInfo().getJobStartTime()).isEqualTo(200);
+ assertThat(mi.getJobSummary().getJobSubmitTime()).isEqualTo(100);
+ assertThat(mi.getJobSummary().getJobLaunchTime()).isEqualTo(200);
verify(jheh, times(1)).processDoneFiles(t.jobId);
mockWriter = jheh.getEventWriter();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestTaskAttemptReport.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestTaskAttemptReport.java
index cd7f7583b58..c8d81aea99b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestTaskAttemptReport.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestTaskAttemptReport.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.yarn.util.Records;
import org.junit.Test;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
@@ -69,8 +70,8 @@ public class TestTaskAttemptReport {
// Create basic class
TaskAttemptReport report = Records.newRecord(TaskAttemptReport.class);
// Verify properties initialized to null
- assertEquals(null, report.getCounters());
- assertEquals(null, report.getRawCounters());
+ assertThat(report.getCounters()).isNull();
+ assertThat(report.getRawCounters()).isNull();
}
@Test
@@ -80,8 +81,8 @@ public class TestTaskAttemptReport {
// Set raw counters to null
report.setRawCounters(null);
// Verify properties still null
- assertEquals(null, report.getCounters());
- assertEquals(null, report.getRawCounters());
+ assertThat(report.getCounters()).isNull();
+ assertThat(report.getRawCounters()).isNull();
}
@@ -92,8 +93,8 @@ public class TestTaskAttemptReport {
// Set raw counters to null
report.setCounters(null);
// Verify properties still null
- assertEquals(null, report.getCounters());
- assertEquals(null, report.getRawCounters());
+ assertThat(report.getCounters()).isNull();
+ assertThat(report.getRawCounters()).isNull();
}
@Test
@@ -108,8 +109,8 @@ public class TestTaskAttemptReport {
assertNotEquals(null, counters);
// Clear counters to null and then verify
report.setCounters(null);
- assertEquals(null, report.getCounters());
- assertEquals(null, report.getRawCounters());
+ assertThat(report.getCounters()).isNull();
+ assertThat(report.getRawCounters()).isNull();
}
@Test
@@ -124,8 +125,8 @@ public class TestTaskAttemptReport {
assertNotEquals(null, counters);
// Clear counters to null and then verify
report.setRawCounters(null);
- assertEquals(null, report.getCounters());
- assertEquals(null, report.getRawCounters());
+ assertThat(report.getCounters()).isNull();
+ assertThat(report.getRawCounters()).isNull();
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestTaskReport.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestTaskReport.java
index 68016612695..a9b34eea7cf 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestTaskReport.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestTaskReport.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.yarn.util.Records;
import org.junit.Test;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
@@ -68,7 +69,7 @@ public class TestTaskReport {
report.setCounters(altCounters);
// Verify real counters has priority over raw
Counters counters = report.getCounters();
- assertNotEquals(null, counters);
+ assertThat(counters).isNotNull();
assertNotEquals(rCounters, altCounters);
assertEquals(counters, altCounters);
}
@@ -78,8 +79,8 @@ public class TestTaskReport {
// Create basic class
TaskReport report = Records.newRecord(TaskReport.class);
// Verify properties initialized to null
- assertEquals(null, report.getCounters());
- assertEquals(null, report.getRawCounters());
+ assertThat(report.getCounters()).isNull();
+ assertThat(report.getRawCounters()).isNull();
}
@Test
@@ -89,8 +90,8 @@ public class TestTaskReport {
// Set raw counters to null
report.setRawCounters(null);
// Verify properties still null
- assertEquals(null, report.getCounters());
- assertEquals(null, report.getRawCounters());
+ assertThat(report.getCounters()).isNull();
+ assertThat(report.getRawCounters()).isNull();
}
@@ -101,8 +102,8 @@ public class TestTaskReport {
// Set raw counters to null
report.setCounters(null);
// Verify properties still null
- assertEquals(null, report.getCounters());
- assertEquals(null, report.getRawCounters());
+ assertThat(report.getCounters()).isNull();
+ assertThat(report.getRawCounters()).isNull();
}
@Test
@@ -117,8 +118,8 @@ public class TestTaskReport {
assertNotEquals(null, counters);
// Clear counters to null and then verify
report.setCounters(null);
- assertEquals(null, report.getCounters());
- assertEquals(null, report.getRawCounters());
+ assertThat(report.getCounters()).isNull();
+ assertThat(report.getRawCounters()).isNull();
}
@Test
@@ -133,7 +134,7 @@ public class TestTaskReport {
assertNotEquals(null, counters);
// Clear counters to null and then verify
report.setRawCounters(null);
- assertEquals(null, report.getCounters());
- assertEquals(null, report.getRawCounters());
+ assertThat(report.getCounters()).isNull();
+ assertThat(report.getRawCounters()).isNull();
}
}
\ No newline at end of file
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFetchFailure.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFetchFailure.java
index 67a89014eab..dd6f810b7ed 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFetchFailure.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFetchFailure.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce.v2.app;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
@@ -356,8 +357,8 @@ public class TestFetchFailure {
Assert.assertEquals("Map TaskAttempt state not correct",
TaskAttemptState.FAILED, mapAttempt1.getState());
- Assert.assertEquals(mapAttempt1.getDiagnostics().get(0),
- "Too many fetch failures. Failing the attempt. "
+ assertThat(mapAttempt1.getDiagnostics().get(0))
+ .isEqualTo("Too many fetch failures. Failing the attempt. "
+ "Last failure reported by "
+ reduceAttempt3.getID().toString() + " from host host3");
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java
index cb1963cdfa6..5bf51022327 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.mapreduce.v2.app;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@@ -550,8 +551,8 @@ public class TestMRAppMaster {
.handleEvent(captor.capture());
HistoryEvent event = captor.getValue().getHistoryEvent();
assertTrue(event instanceof JobUnsuccessfulCompletionEvent);
- assertEquals(((JobUnsuccessfulCompletionEvent) event).getStatus()
- , expectedJobState);
+ assertThat(((JobUnsuccessfulCompletionEvent) event).getStatus())
+ .isEqualTo(expectedJobState);
}
}
class MRAppMasterTest extends MRAppMaster {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRClientService.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRClientService.java
index ca3c28cbaf5..9906def3ac9 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRClientService.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRClientService.java
@@ -285,7 +285,7 @@ public class TestMRClientService {
Assert.assertEquals(1, amInfo.getContainerId().getApplicationAttemptId()
.getAttemptId());
Assert.assertTrue(amInfo.getStartTime() > 0);
- Assert.assertEquals(false, jr.isUber());
+ Assert.assertFalse(jr.isUber());
}
private void verifyTaskAttemptReport(TaskAttemptReport tar) {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java
index b2807c1f4e3..dce69e41a1d 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce.v2.app;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@@ -2075,7 +2076,7 @@ public class TestRecovery {
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = slurp(expectedFile);
- Assert.assertEquals(output, expectedOutput.toString());
+ assertThat(output).isEqualTo(expectedOutput.toString());
}
public static String slurp(File f) throws IOException {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java
index ed70a986976..d20b01212a5 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java
@@ -82,6 +82,9 @@ import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.offset;
+
@SuppressWarnings({"unchecked", "rawtypes"})
public class TestRuntimeEstimators {
@@ -151,10 +154,10 @@ public class TestRuntimeEstimators {
500L, speculator.getSoonestRetryAfterNoSpeculate());
Assert.assertEquals("wrong SPECULATIVE_RETRY_AFTER_SPECULATE value",
5000L, speculator.getSoonestRetryAfterSpeculate());
- Assert.assertEquals(speculator.getProportionRunningTasksSpeculatable(),
- 0.1, 0.00001);
- Assert.assertEquals(speculator.getProportionTotalTasksSpeculatable(),
- 0.001, 0.00001);
+ assertThat(speculator.getProportionRunningTasksSpeculatable())
+ .isCloseTo(0.1, offset(0.00001));
+ assertThat(speculator.getProportionTotalTasksSpeculatable())
+ .isCloseTo(0.001, offset(0.00001));
Assert.assertEquals("wrong SPECULATIVE_MINIMUM_ALLOWED_TASKS value",
5, speculator.getMinimumAllowedSpeculativeTasks());
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java
index 2efa9103f16..1f0ce2309e2 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce.v2.app;
+import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
@@ -111,10 +112,10 @@ import org.junit.Test;
appMaster.shutDownJob();
((RunningAppContext) appMaster.getContext()).resetIsLastAMRetry();
if (shouldHaveDeleted) {
- Assert.assertEquals(new Boolean(true), appMaster.isLastAMRetry());
+ assertTrue(appMaster.isLastAMRetry());
verify(fs).delete(stagingJobPath, true);
} else {
- Assert.assertEquals(new Boolean(false), appMaster.isLastAMRetry());
+ assertFalse(appMaster.isLastAMRetry());
verify(fs, never()).delete(stagingJobPath, true);
}
}
@@ -141,7 +142,7 @@ import org.junit.Test;
appMaster.start();
appMaster.shutDownJob();
//test whether notifyIsLastAMRetry called
- Assert.assertEquals(true, ((TestMRApp)appMaster).getTestIsLastAMRetry());
+ assertTrue(((TestMRApp)appMaster).getTestIsLastAMRetry());
verify(fs).delete(stagingJobPath, true);
}
@@ -165,7 +166,7 @@ import org.junit.Test;
//shutdown the job, not the lastRetry
appMaster.shutDownJob();
//test whether notifyIsLastAMRetry called
- Assert.assertEquals(false, ((TestMRApp)appMaster).getTestIsLastAMRetry());
+ assertFalse(((TestMRApp)appMaster).getTestIsLastAMRetry());
verify(fs, times(0)).delete(stagingJobPath, true);
}
@@ -192,7 +193,7 @@ import org.junit.Test;
//shutdown the job, is lastRetry
appMaster.shutDownJob();
//test whether notifyIsLastAMRetry called
- Assert.assertEquals(true, ((TestMRApp)appMaster).getTestIsLastAMRetry());
+ assertTrue(((TestMRApp)appMaster).getTestIsLastAMRetry());
verify(fs).delete(stagingJobPath, true);
}
@@ -276,7 +277,7 @@ import org.junit.Test;
appMaster.start();
appMaster.shutDownJob();
//test whether notifyIsLastAMRetry called
- Assert.assertEquals(true, ((TestMRApp) appMaster).getTestIsLastAMRetry());
+ assertTrue(((TestMRApp) appMaster).getTestIsLastAMRetry());
verify(fs, times(0)).delete(stagingJobPath, true);
}
@@ -304,7 +305,7 @@ import org.junit.Test;
appMaster.start();
appMaster.shutDownJob();
//test whether notifyIsLastAMRetry called
- Assert.assertEquals(true, ((TestMRApp) appMaster).getTestIsLastAMRetry());
+ assertTrue(((TestMRApp) appMaster).getTestIsLastAMRetry());
verify(fs, times(0)).delete(stagingJobPath, true);
}
@@ -330,7 +331,7 @@ import org.junit.Test;
appMaster.start();
appMaster.shutDownJob();
//test whether notifyIsLastAMRetry called
- Assert.assertEquals(true, ((TestMRApp) appMaster).getTestIsLastAMRetry());
+ assertTrue(((TestMRApp) appMaster).getTestIsLastAMRetry());
//Staging dir should be deleted because it is not matched with
//PRESERVE_FILES_PATTERN
verify(fs, times(1)).delete(stagingJobPath, true);
@@ -361,7 +362,7 @@ import org.junit.Test;
appMaster.start();
appMaster.shutDownJob();
//test whether notifyIsLastAMRetry called
- Assert.assertEquals(true, ((TestMRApp) appMaster).getTestIsLastAMRetry());
+ assertTrue(((TestMRApp) appMaster).getTestIsLastAMRetry());
verify(fs, times(0)).delete(stagingJobPath, true);
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttempt.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttempt.java
index 11f16a8046a..d09531c641c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttempt.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttempt.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.mapreduce.v2.app.job.impl;
import static org.apache.hadoop.test.GenericTestUtils.waitFor;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@@ -377,10 +378,10 @@ public class TestTaskAttempt{
.getEventHandler()
.handle(new TaskAttemptEvent(rta.getID(), TaskAttemptEventType.TA_DONE));
app.waitForState(job, JobState.SUCCEEDED);
- Assert.assertEquals(mta.getFinishTime(), 11);
- Assert.assertEquals(mta.getLaunchTime(), 10);
- Assert.assertEquals(rta.getFinishTime(), 11);
- Assert.assertEquals(rta.getLaunchTime(), 10);
+ assertThat(mta.getFinishTime()).isEqualTo(11);
+ assertThat(mta.getLaunchTime()).isEqualTo(10);
+ assertThat(rta.getFinishTime()).isEqualTo(11);
+ assertThat(rta.getLaunchTime()).isEqualTo(10);
Counters counters = job.getAllCounters();
int memoryMb = (int) containerResource.getMemorySize();
@@ -683,8 +684,9 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId,
container, mock(Map.class)));
taImpl.handle(new TaskAttemptContainerLaunchedEvent(attemptId, 0));
- assertEquals("Task attempt is not in running state", taImpl.getState(),
- TaskAttemptState.RUNNING);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in RUNNING state")
+ .isEqualTo(TaskAttemptState.RUNNING);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_CONTAINER_CLEANED));
assertFalse("InternalError occurred trying to handle TA_CONTAINER_CLEANED",
@@ -744,8 +746,9 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_COMMIT_PENDING));
- assertEquals("Task attempt is not in commit pending state", taImpl.getState(),
- TaskAttemptState.COMMIT_PENDING);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in COMMIT_PENDING state")
+ .isEqualTo(TaskAttemptState.COMMIT_PENDING);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_CONTAINER_CLEANED));
assertFalse("InternalError occurred trying to handle TA_CONTAINER_CLEANED",
@@ -810,16 +813,19 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_CONTAINER_COMPLETED));
- assertEquals("Task attempt is not in succeeded state", taImpl.getState(),
- TaskAttemptState.SUCCEEDED);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in SUCCEEDED state")
+ .isEqualTo(TaskAttemptState.SUCCEEDED);
taImpl.handle(new TaskAttemptTooManyFetchFailureEvent(attemptId,
reduceTAId, "Host"));
- assertEquals("Task attempt is not in FAILED state", taImpl.getState(),
- TaskAttemptState.FAILED);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in FAILED state")
+ .isEqualTo(TaskAttemptState.FAILED);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_TOO_MANY_FETCH_FAILURE));
- assertEquals("Task attempt is not in FAILED state, still", taImpl.getState(),
- TaskAttemptState.FAILED);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in FAILED state, still")
+ .isEqualTo(TaskAttemptState.FAILED);
assertFalse("InternalError occurred trying to handle TA_CONTAINER_CLEANED",
eventHandler.internalError);
}
@@ -937,16 +943,19 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_CONTAINER_COMPLETED));
- assertEquals("Task attempt is not in succeeded state", taImpl.getState(),
- TaskAttemptState.SUCCEEDED);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in SUCCEEDED state")
+ .isEqualTo(TaskAttemptState.SUCCEEDED);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_KILL));
- assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
- TaskAttemptState.KILLED);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in KILLED state")
+ .isEqualTo(TaskAttemptState.KILLED);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_TOO_MANY_FETCH_FAILURE));
- assertEquals("Task attempt is not in KILLED state, still", taImpl.getState(),
- TaskAttemptState.KILLED);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in KILLED state, still")
+ .isEqualTo(TaskAttemptState.KILLED);
assertFalse("InternalError occurred trying to handle TA_CONTAINER_CLEANED",
eventHandler.internalError);
}
@@ -1053,8 +1062,9 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_CONTAINER_COMPLETED));
- assertEquals("Task attempt is not in succeeded state", taImpl.getState(),
- TaskAttemptState.SUCCEEDED);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in SUCCEEDED state")
+ .isEqualTo(TaskAttemptState.SUCCEEDED);
assertTrue("Task Attempt finish time is not greater than 0",
taImpl.getFinishTime() > 0);
@@ -1064,8 +1074,9 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptTooManyFetchFailureEvent(attemptId,
reduceTAId, "Host"));
- assertEquals("Task attempt is not in Too Many Fetch Failure state",
- taImpl.getState(), TaskAttemptState.FAILED);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in FAILED state")
+ .isEqualTo(TaskAttemptState.FAILED);
assertEquals("After TA_TOO_MANY_FETCH_FAILURE,"
+ " Task attempt finish time is not the same ",
@@ -1090,10 +1101,13 @@ public class TestTaskAttempt{
TaskAttemptEventType.TA_SCHEDULE));
}
taImpl.handle(new TaskAttemptKillEvent(taImpl.getID(),"", true));
- assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
- TaskAttemptState.KILLED);
- assertEquals("Task attempt's internal state is not KILLED",
- taImpl.getInternalState(), TaskAttemptStateInternal.KILLED);
+
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in KILLED state")
+ .isEqualTo(TaskAttemptState.KILLED);
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task attempt's internal state is not KILLED")
+ .isEqualTo(TaskAttemptStateInternal.KILLED);
assertFalse("InternalError occurred", eventHandler.internalError);
TaskEvent event = eventHandler.lastTaskEvent;
assertEquals(TaskEventType.T_ATTEMPT_KILLED, event.getType());
@@ -1156,13 +1170,15 @@ public class TestTaskAttempt{
TaskAttemptEventType.TA_SCHEDULE));
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId, container,
mock(Map.class)));
- assertEquals("Task attempt is not in assinged state",
- taImpl.getInternalState(), TaskAttemptStateInternal.ASSIGNED);
+
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task attempt is not in ASSIGNED state")
+ .isEqualTo(TaskAttemptStateInternal.ASSIGNED);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_KILL));
- assertEquals("Task should be in KILL_CONTAINER_CLEANUP state",
- TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP,
- taImpl.getInternalState());
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task should be in KILL_CONTAINER_CLEANUP state")
+ .isEqualTo(TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP);
}
@Test
@@ -1211,15 +1227,16 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId, container,
mock(Map.class)));
taImpl.handle(new TaskAttemptContainerLaunchedEvent(attemptId, 0));
- assertEquals("Task attempt is not in running state", taImpl.getState(),
- TaskAttemptState.RUNNING);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in RUNNING state")
+ .isEqualTo(TaskAttemptState.RUNNING);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_KILL));
assertFalse("InternalError occurred trying to handle TA_KILL",
eventHandler.internalError);
- assertEquals("Task should be in KILL_CONTAINER_CLEANUP state",
- TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP,
- taImpl.getInternalState());
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task should be in KILL_CONTAINER_CLEANUP state")
+ .isEqualTo(TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP);
}
@Test
@@ -1268,19 +1285,21 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId, container,
mock(Map.class)));
taImpl.handle(new TaskAttemptContainerLaunchedEvent(attemptId, 0));
- assertEquals("Task attempt is not in running state", taImpl.getState(),
- TaskAttemptState.RUNNING);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in RUNNING state")
+ .isEqualTo(TaskAttemptState.RUNNING);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_COMMIT_PENDING));
- assertEquals("Task should be in COMMIT_PENDING state",
- TaskAttemptStateInternal.COMMIT_PENDING, taImpl.getInternalState());
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task should be in COMMIT_PENDING state")
+ .isEqualTo(TaskAttemptStateInternal.COMMIT_PENDING);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_KILL));
assertFalse("InternalError occurred trying to handle TA_KILL",
eventHandler.internalError);
- assertEquals("Task should be in KILL_CONTAINER_CLEANUP state",
- TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP,
- taImpl.getInternalState());
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task should be in KILL_CONTAINER_CLEANUP state")
+ .isEqualTo(TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP);
}
@Test
@@ -1291,33 +1310,37 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_DONE));
- assertEquals("Task attempt is not in SUCCEEDED state", taImpl.getState(),
- TaskAttemptState.SUCCEEDED);
- assertEquals("Task attempt's internal state is not " +
- "SUCCESS_FINISHING_CONTAINER", taImpl.getInternalState(),
- TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
+ // this is where we are
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in SUCCEEDED state")
+ .isEqualTo(TaskAttemptState.SUCCEEDED);
+ assertThat(taImpl.getInternalState()).withFailMessage(
+ "Task attempt's internal state is not SUCCESS_FINISHING_CONTAINER")
+ .isEqualTo(TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
// If the map task is killed when it is in SUCCESS_FINISHING_CONTAINER
// state, the state will move to KILL_CONTAINER_CLEANUP
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_KILL));
- assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
- TaskAttemptState.KILLED);
- assertEquals("Task attempt's internal state is not KILL_CONTAINER_CLEANUP",
- taImpl.getInternalState(),
- TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in KILLED state")
+ .isEqualTo(TaskAttemptState.KILLED);
+ assertThat(taImpl.getInternalState()).withFailMessage(
+ "Task attempt's internal state is not KILL_CONTAINER_CLEANUP")
+ .isEqualTo(TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CONTAINER_CLEANED));
- assertEquals("Task attempt's internal state is not KILL_TASK_CLEANUP",
- taImpl.getInternalState(),
- TaskAttemptStateInternal.KILL_TASK_CLEANUP);
+ assertThat(taImpl.getInternalState()).withFailMessage(
+ "Task attempt's internal state is not KILL_TASK_CLEANUP")
+ .isEqualTo(TaskAttemptStateInternal.KILL_TASK_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CLEANUP_DONE));
- assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
- TaskAttemptState.KILLED);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in KILLED state")
+ .isEqualTo(TaskAttemptState.KILLED);
assertFalse("InternalError occurred", eventHandler.internalError);
}
@@ -1366,21 +1389,25 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_DONE));
- assertEquals("Task attempt is not in SUCCEEDED state", taImpl.getState(),
- TaskAttemptState.SUCCEEDED);
- assertEquals("Task attempt's internal state is not " +
- "SUCCESS_FINISHING_CONTAINER", taImpl.getInternalState(),
- TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in SUCCEEDED state")
+ .isEqualTo(TaskAttemptState.SUCCEEDED);
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task attempt's internal state is not " +
+ "SUCCESS_FINISHING_CONTAINER")
+ .isEqualTo(TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CONTAINER_CLEANED));
// Send a map task attempt kill event indicating next map attempt has to be
// reschedule
taImpl.handle(new TaskAttemptKillEvent(taImpl.getID(), "", true));
- assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
- TaskAttemptState.KILLED);
- assertEquals("Task attempt's internal state is not KILLED",
- taImpl.getInternalState(), TaskAttemptStateInternal.KILLED);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in KILLED state")
+ .isEqualTo(TaskAttemptState.KILLED);
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task attempt's internal state is not KILLED")
+ .isEqualTo(TaskAttemptStateInternal.KILLED);
assertFalse("InternalError occurred", eventHandler.internalError);
TaskEvent event = eventHandler.lastTaskEvent;
assertEquals(TaskEventType.T_ATTEMPT_KILLED, event.getType());
@@ -1424,39 +1451,46 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptFailEvent(taImpl.getID()));
- assertEquals("Task attempt is not in FAILED state", taImpl.getState(),
- TaskAttemptState.FAILED);
- assertEquals("Task attempt's internal state is not " +
- "FAIL_FINISHING_CONTAINER", taImpl.getInternalState(),
- TaskAttemptStateInternal.FAIL_FINISHING_CONTAINER);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in FAILED state")
+ .isEqualTo(TaskAttemptState.FAILED);
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task attempt's internal state is not " +
+ "FAIL_FINISHING_CONTAINER")
+ .isEqualTo(TaskAttemptStateInternal.FAIL_FINISHING_CONTAINER);
// If the map task is killed when it is in FAIL_FINISHING_CONTAINER state,
// the state will stay in FAIL_FINISHING_CONTAINER.
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_KILL));
- assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
- TaskAttemptState.FAILED);
- assertEquals("Task attempt's internal state is not " +
- "FAIL_FINISHING_CONTAINER", taImpl.getInternalState(),
- TaskAttemptStateInternal.FAIL_FINISHING_CONTAINER);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in FAILED state")
+ .isEqualTo(TaskAttemptState.FAILED);
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task attempt's internal state is not " +
+ "FAIL_FINISHING_CONTAINER")
+ .isEqualTo(TaskAttemptStateInternal.FAIL_FINISHING_CONTAINER);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_TIMED_OUT));
- assertEquals("Task attempt's internal state is not FAIL_CONTAINER_CLEANUP",
- taImpl.getInternalState(),
- TaskAttemptStateInternal.FAIL_CONTAINER_CLEANUP);
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task attempt's internal state is not " +
+ "FAIL_CONTAINER_CLEANUP")
+ .isEqualTo(TaskAttemptStateInternal.FAIL_CONTAINER_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CONTAINER_CLEANED));
- assertEquals("Task attempt's internal state is not FAIL_TASK_CLEANUP",
- taImpl.getInternalState(),
- TaskAttemptStateInternal.FAIL_TASK_CLEANUP);
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task attempt's internal state is not " +
+ "FAIL_TASK_CLEANUP")
+ .isEqualTo(TaskAttemptStateInternal.FAIL_TASK_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CLEANUP_DONE));
- assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
- TaskAttemptState.FAILED);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in FAILED state")
+ .isEqualTo(TaskAttemptState.FAILED);
assertFalse("InternalError occurred", eventHandler.internalError);
}
@@ -1469,23 +1503,27 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_FAILMSG_BY_CLIENT));
- assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
- TaskAttemptState.FAILED);
- assertEquals("Task attempt's internal state is not " +
- "FAIL_CONTAINER_CLEANUP", taImpl.getInternalState(),
- TaskAttemptStateInternal.FAIL_CONTAINER_CLEANUP);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in FAILED state")
+ .isEqualTo(TaskAttemptState.FAILED);
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task attempt's internal state is not " +
+ "FAIL_CONTAINER_CLEANUP")
+ .isEqualTo(TaskAttemptStateInternal.FAIL_CONTAINER_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CONTAINER_CLEANED));
- assertEquals("Task attempt's internal state is not FAIL_TASK_CLEANUP",
- taImpl.getInternalState(),
- TaskAttemptStateInternal.FAIL_TASK_CLEANUP);
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task attempt's internal state is not " +
+ "FAIL_TASK_CLEANUP")
+ .isEqualTo(TaskAttemptStateInternal.FAIL_TASK_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CLEANUP_DONE));
- assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
- TaskAttemptState.FAILED);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in FAILED state")
+ .isEqualTo(TaskAttemptState.FAILED);
assertFalse("InternalError occurred", eventHandler.internalError);
}
@@ -1498,20 +1536,24 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_DONE));
- assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
- TaskAttemptState.SUCCEEDED);
- assertEquals("Task attempt's internal state is not " +
- "SUCCESS_FINISHING_CONTAINER", taImpl.getInternalState(),
- TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in SUCCEEDED state")
+ .isEqualTo(TaskAttemptState.SUCCEEDED);
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task attempt's internal state is not " +
+ "SUCCESS_FINISHING_CONTAINER")
+ .isEqualTo(TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
// TA_DIAGNOSTICS_UPDATE doesn't change state
taImpl.handle(new TaskAttemptDiagnosticsUpdateEvent(taImpl.getID(),
"Task got updated"));
- assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
- TaskAttemptState.SUCCEEDED);
- assertEquals("Task attempt's internal state is not " +
- "SUCCESS_FINISHING_CONTAINER", taImpl.getInternalState(),
- TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in SUCCEEDED state")
+ .isEqualTo(TaskAttemptState.SUCCEEDED);
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task attempt's internal state is not " +
+ "SUCCESS_FINISHING_CONTAINER")
+ .isEqualTo(TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
assertFalse("InternalError occurred", eventHandler.internalError);
}
@@ -1524,21 +1566,25 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_DONE));
- assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
- TaskAttemptState.SUCCEEDED);
- assertEquals("Task attempt's internal state is not " +
- "SUCCESS_FINISHING_CONTAINER", taImpl.getInternalState(),
- TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in SUCCEEDED state")
+ .isEqualTo(TaskAttemptState.SUCCEEDED);
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task attempt's internal state is not " +
+ "SUCCESS_FINISHING_CONTAINER")
+ .isEqualTo(TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
// If the task stays in SUCCESS_FINISHING_CONTAINER for too long,
// TaskAttemptListenerImpl will time out the attempt.
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_TIMED_OUT));
- assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
- TaskAttemptState.SUCCEEDED);
- assertEquals("Task attempt's internal state is not " +
- "SUCCESS_CONTAINER_CLEANUP", taImpl.getInternalState(),
- TaskAttemptStateInternal.SUCCESS_CONTAINER_CLEANUP);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in SUCCEEDED state")
+ .isEqualTo(TaskAttemptState.SUCCEEDED);
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task attempt's internal state is not " +
+ "SUCCESS_CONTAINER_CLEANUP")
+ .isEqualTo(TaskAttemptStateInternal.SUCCESS_CONTAINER_CLEANUP);
assertFalse("InternalError occurred", eventHandler.internalError);
}
@@ -1550,19 +1596,22 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptFailEvent(taImpl.getID()));
- assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
- TaskAttemptState.FAILED);
- assertEquals("Task attempt's internal state is not " +
- "FAIL_FINISHING_CONTAINER", taImpl.getInternalState(),
- TaskAttemptStateInternal.FAIL_FINISHING_CONTAINER);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in FAILED state")
+ .isEqualTo(TaskAttemptState.FAILED);
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task attempt's internal state is not " +
+ "FAIL_FINISHING_CONTAINER")
+ .isEqualTo(TaskAttemptStateInternal.FAIL_FINISHING_CONTAINER);
// If the task stays in FAIL_FINISHING_CONTAINER for too long,
// TaskAttemptListenerImpl will time out the attempt.
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_TIMED_OUT));
- assertEquals("Task attempt's internal state is not FAIL_CONTAINER_CLEANUP",
- taImpl.getInternalState(),
- TaskAttemptStateInternal.FAIL_CONTAINER_CLEANUP);
+ assertThat(taImpl.getInternalState())
+ .withFailMessage("Task attempt's internal state is not " +
+ "FAIL_CONTAINER_CLEANUP")
+ .isEqualTo(TaskAttemptStateInternal.FAIL_CONTAINER_CLEANUP);
assertFalse("InternalError occurred", eventHandler.internalError);
}
@@ -1784,8 +1833,9 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_SCHEDULE));
- assertEquals("Task attempt is not in STARTING state", taImpl.getState(),
- TaskAttemptState.STARTING);
+ assertThat(taImpl.getState())
+ .withFailMessage("Task attempt is not in STARTING state")
+ .isEqualTo(TaskAttemptState.STARTING);
ArgumentCaptor captor = ArgumentCaptor.forClass(Event.class);
verify(eventHandler, times(2)).handle(captor.capture());
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncher.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncher.java
index 222c2ae39b8..dda93b682b3 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncher.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncher.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce.v2.app.launcher;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import java.io.IOException;
@@ -124,7 +125,7 @@ public class TestContainerLauncher {
ThreadPoolExecutor threadPool = containerLauncher.getThreadPool();
// No events yet
- Assert.assertEquals(containerLauncher.initialPoolSize,
+ assertThat(containerLauncher.initialPoolSize).isEqualTo(
MRJobConfig.DEFAULT_MR_AM_CONTAINERLAUNCHER_THREADPOOL_INITIAL_SIZE);
Assert.assertEquals(0, threadPool.getPoolSize());
Assert.assertEquals(containerLauncher.initialPoolSize,
@@ -190,7 +191,7 @@ public class TestContainerLauncher {
20);
containerLauncher = new CustomContainerLauncher(context);
containerLauncher.init(conf);
- Assert.assertEquals(containerLauncher.initialPoolSize, 20);
+ assertThat(containerLauncher.initialPoolSize).isEqualTo(20);
}
@Test(timeout = 5000)
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java
index a2869941618..de4977205b0 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.mapreduce.v2.app.local;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.isA;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
@@ -202,8 +203,8 @@ public class TestLocalContainerAllocator {
Container container = containerAssignedCaptor.getValue().getContainer();
Resource containerResource = container.getResource();
Assert.assertNotNull(containerResource);
- Assert.assertEquals(containerResource.getMemorySize(), 0);
- Assert.assertEquals(containerResource.getVirtualCores(), 0);
+ assertThat(containerResource.getMemorySize()).isEqualTo(0);
+ assertThat(containerResource.getVirtualCores()).isEqualTo(0);
}
private static ContainerAllocatorEvent createContainerRequestEvent() {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java
index 439be485a9f..4b5fa0adee3 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.mapreduce.v2.app.rm;
import static org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestCreator.createRequest;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyFloat;
@@ -686,7 +687,7 @@ public class TestRMContainerAllocator {
rm.drainEvents();
}
// only 1 allocated container should be assigned
- Assert.assertEquals(assignedContainer, 1);
+ assertThat(assignedContainer).isEqualTo(1);
}
@Test
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
index 430b09db797..582022669d0 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
@@ -46,6 +46,11 @@
org.apache.hadoop
hadoop-mapreduce-client-core
+
+ org.assertj
+ assertj-core
+ test
+
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java
index d047b6b6428..d8ca74aec2f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java
@@ -90,7 +90,7 @@ public class TestLocalModeWithNewApis {
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, inDir);
FileOutputFormat.setOutputPath(job, outDir);
- assertEquals(job.waitForCompletion(true), true);
+ assertTrue(job.waitForCompletion(true));
String output = readOutput(outDir, conf);
assertEquals("The\t1\nbrown\t1\nfox\t2\nhas\t1\nmany\t1\n" +
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
index 629a2467653..737918473ee 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.mapreduce;
import org.apache.hadoop.util.StringUtils;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -189,8 +190,9 @@ public class TestTypeConverter {
TypeConverter.fromYarn(queueInfo, new Configuration());
//Verify that the converted queue has the 1 child we had added
- Assert.assertEquals("QueueInfo children weren't properly converted",
- returned.getQueueChildren().size(), 1);
+ assertThat(returned.getQueueChildren().size())
+ .withFailMessage("QueueInfo children weren't properly converted")
+ .isEqualTo(1);
}
@Test
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml
index 336829dcf8e..1bbd52058dc 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml
@@ -66,6 +66,11 @@
com.fasterxml.jackson.core
jackson-databind
+
+ org.assertj
+ assertj-core
+ test
+
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java
index 999561aeb68..bb5c30e9511 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java
@@ -24,6 +24,7 @@ import java.io.IOException;
import java.net.URI;
import org.junit.Test;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import org.junit.Assert;
@@ -181,7 +182,7 @@ public class TestFileOutputCommitter {
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = slurp(expectedFile);
- assertEquals(output, expectedOutput.toString());
+ assertThat(output).isEqualTo(expectedOutput.toString());
}
private void validateMapFileOutputContent(
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestIndexCache.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestIndexCache.java
index 0cc3c662207..dabce770e82 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestIndexCache.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestIndexCache.java
@@ -244,7 +244,7 @@ public class TestIndexCache {
}
getInfoThread.join();
removeMapThread.join();
- assertEquals(true, cache.checkTotalMemoryUsed());
+ assertTrue(cache.checkTotalMemoryUsed());
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobConf.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobConf.java
index a493d167be9..40a8db4e4e7 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobConf.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobConf.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.mapred;
import java.util.regex.Pattern;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import org.apache.hadoop.fs.Path;
@@ -99,9 +100,9 @@ public class TestJobConf {
assertEquals(70, conf.getMaxReduceTaskFailuresPercent());
// by default
- assertEquals(JobPriority.DEFAULT.name(), conf.getJobPriority().name());
+ assertThat(conf.getJobPriority()).isEqualTo(JobPriority.DEFAULT);
conf.setJobPriority(JobPriority.HIGH);
- assertEquals(JobPriority.HIGH.name(), conf.getJobPriority().name());
+ assertThat(conf.getJobPriority()).isEqualTo(JobPriority.HIGH);
assertNull(conf.getJobSubmitHostName());
conf.setJobSubmitHostName("hostname");
@@ -152,10 +153,10 @@ public class TestJobConf {
// make sure mapreduce.map|reduce.java.opts are not set by default
// so that they won't override mapred.child.java.opts
- assertEquals("mapreduce.map.java.opts should not be set by default",
- null, conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS));
- assertEquals("mapreduce.reduce.java.opts should not be set by default",
- null, conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS));
+ assertNull("mapreduce.map.java.opts should not be set by default",
+ conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS));
+ assertNull("mapreduce.reduce.java.opts should not be set by default",
+ conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS));
}
/**
@@ -225,44 +226,44 @@ public class TestJobConf {
JobConf configuration = new JobConf();
configuration.set(MRJobConfig.MAP_MEMORY_MB,String.valueOf(300));
configuration.set(MRJobConfig.REDUCE_MEMORY_MB,String.valueOf(300));
- Assert.assertEquals(configuration.getMemoryForMapTask(),300);
- Assert.assertEquals(configuration.getMemoryForReduceTask(),300);
+ assertThat(configuration.getMemoryForMapTask()).isEqualTo(300);
+ assertThat(configuration.getMemoryForReduceTask()).isEqualTo(300);
configuration.set("mapred.task.maxvmem" , String.valueOf(2*1024 * 1024));
configuration.set(MRJobConfig.MAP_MEMORY_MB,String.valueOf(300));
configuration.set(MRJobConfig.REDUCE_MEMORY_MB,String.valueOf(300));
- Assert.assertEquals(configuration.getMemoryForMapTask(),2);
- Assert.assertEquals(configuration.getMemoryForReduceTask(),2);
+ assertThat(configuration.getMemoryForMapTask()).isEqualTo(2);
+ assertThat(configuration.getMemoryForReduceTask()).isEqualTo(2);
configuration = new JobConf();
configuration.set("mapred.task.maxvmem" , "-1");
configuration.set(MRJobConfig.MAP_MEMORY_MB,String.valueOf(300));
configuration.set(MRJobConfig.REDUCE_MEMORY_MB,String.valueOf(400));
- Assert.assertEquals(configuration.getMemoryForMapTask(), 300);
- Assert.assertEquals(configuration.getMemoryForReduceTask(), 400);
+ assertThat(configuration.getMemoryForMapTask()).isEqualTo(300);
+ assertThat(configuration.getMemoryForReduceTask()).isEqualTo(400);
configuration = new JobConf();
configuration.set("mapred.task.maxvmem" , String.valueOf(2*1024 * 1024));
configuration.set(MRJobConfig.MAP_MEMORY_MB,"-1");
configuration.set(MRJobConfig.REDUCE_MEMORY_MB,"-1");
- Assert.assertEquals(configuration.getMemoryForMapTask(),2);
- Assert.assertEquals(configuration.getMemoryForReduceTask(),2);
+ assertThat(configuration.getMemoryForMapTask()).isEqualTo(2);
+ assertThat(configuration.getMemoryForReduceTask()).isEqualTo(2);
configuration = new JobConf();
configuration.set("mapred.task.maxvmem" , String.valueOf(-1));
configuration.set(MRJobConfig.MAP_MEMORY_MB,"-1");
configuration.set(MRJobConfig.REDUCE_MEMORY_MB,"-1");
- Assert.assertEquals(configuration.getMemoryForMapTask(),
+ assertThat(configuration.getMemoryForMapTask()).isEqualTo(
MRJobConfig.DEFAULT_MAP_MEMORY_MB);
- Assert.assertEquals(configuration.getMemoryForReduceTask(),
+ assertThat(configuration.getMemoryForReduceTask()).isEqualTo(
MRJobConfig.DEFAULT_REDUCE_MEMORY_MB);
configuration = new JobConf();
configuration.set("mapred.task.maxvmem" , String.valueOf(2*1024 * 1024));
configuration.set(MRJobConfig.MAP_MEMORY_MB, "3");
configuration.set(MRJobConfig.REDUCE_MEMORY_MB, "3");
- Assert.assertEquals(configuration.getMemoryForMapTask(),2);
- Assert.assertEquals(configuration.getMemoryForReduceTask(),2);
+ assertThat(configuration.getMemoryForMapTask()).isEqualTo(2);
+ assertThat(configuration.getMemoryForReduceTask()).isEqualTo(2);
}
/**
@@ -305,46 +306,47 @@ public class TestJobConf {
* Test deprecated accessor and mutator method for mapred.task.maxvmem
*/
@Test
+ @SuppressWarnings("deprecation")
public void testMaxVirtualMemoryForTask() {
JobConf configuration = new JobConf();
//get test case
configuration.set(MRJobConfig.MAP_MEMORY_MB, String.valueOf(300));
configuration.set(MRJobConfig.REDUCE_MEMORY_MB, String.valueOf(-1));
- Assert.assertEquals(
- configuration.getMaxVirtualMemoryForTask(), 1024 * 1024 * 1024);
+ assertThat(configuration.getMaxVirtualMemoryForTask())
+ .isEqualTo(1024 * 1024 * 1024);
configuration = new JobConf();
configuration.set(MRJobConfig.MAP_MEMORY_MB, String.valueOf(-1));
configuration.set(MRJobConfig.REDUCE_MEMORY_MB, String.valueOf(200));
- Assert.assertEquals(
- configuration.getMaxVirtualMemoryForTask(), 1024 * 1024 * 1024);
+ assertThat(configuration.getMaxVirtualMemoryForTask())
+ .isEqualTo(1024 * 1024 * 1024);
configuration = new JobConf();
configuration.set(MRJobConfig.MAP_MEMORY_MB, String.valueOf(-1));
configuration.set(MRJobConfig.REDUCE_MEMORY_MB, String.valueOf(-1));
configuration.set("mapred.task.maxvmem", String.valueOf(1 * 1024 * 1024));
- Assert.assertEquals(
- configuration.getMaxVirtualMemoryForTask(), 1 * 1024 * 1024);
+ assertThat(configuration.getMaxVirtualMemoryForTask())
+ .isEqualTo(1 * 1024 * 1024);
configuration = new JobConf();
configuration.set("mapred.task.maxvmem", String.valueOf(1 * 1024 * 1024));
- Assert.assertEquals(
- configuration.getMaxVirtualMemoryForTask(), 1 * 1024 * 1024);
+ assertThat(configuration.getMaxVirtualMemoryForTask())
+ .isEqualTo(1 * 1024 * 1024);
//set test case
configuration = new JobConf();
configuration.setMaxVirtualMemoryForTask(2 * 1024 * 1024);
- Assert.assertEquals(configuration.getMemoryForMapTask(), 2);
- Assert.assertEquals(configuration.getMemoryForReduceTask(), 2);
+ assertThat(configuration.getMemoryForMapTask()).isEqualTo(2);
+ assertThat(configuration.getMemoryForReduceTask()).isEqualTo(2);
configuration = new JobConf();
configuration.set(MRJobConfig.MAP_MEMORY_MB, String.valueOf(300));
configuration.set(MRJobConfig.REDUCE_MEMORY_MB, String.valueOf(400));
configuration.setMaxVirtualMemoryForTask(2 * 1024 * 1024);
- Assert.assertEquals(configuration.getMemoryForMapTask(), 2);
- Assert.assertEquals(configuration.getMemoryForReduceTask(), 2);
+ assertThat(configuration.getMemoryForMapTask()).isEqualTo(2);
+ assertThat(configuration.getMemoryForReduceTask()).isEqualTo(2);
}
/**
@@ -386,37 +388,35 @@ public class TestJobConf {
JobConf conf = new JobConf();
// by default
- assertEquals(JobPriority.DEFAULT.name(), conf.getJobPriority().name());
+ assertThat(conf.getJobPriority()).isEqualTo(JobPriority.DEFAULT);
assertEquals(0, conf.getJobPriorityAsInteger());
// Set JobPriority.LOW using old API, and verify output from both getter
conf.setJobPriority(JobPriority.LOW);
- assertEquals(JobPriority.LOW.name(), conf.getJobPriority().name());
+ assertThat(conf.getJobPriority()).isEqualTo(JobPriority.LOW);
assertEquals(2, conf.getJobPriorityAsInteger());
// Set JobPriority.VERY_HIGH using old API, and verify output
conf.setJobPriority(JobPriority.VERY_HIGH);
- assertEquals(JobPriority.VERY_HIGH.name(), conf.getJobPriority().name());
+ assertThat(conf.getJobPriority()).isEqualTo(JobPriority.VERY_HIGH);
assertEquals(5, conf.getJobPriorityAsInteger());
// Set 3 as priority using new API, and verify output from both getter
conf.setJobPriorityAsInteger(3);
- assertEquals(JobPriority.NORMAL.name(), conf.getJobPriority().name());
+ assertThat(conf.getJobPriority()).isEqualTo(JobPriority.NORMAL);
assertEquals(3, conf.getJobPriorityAsInteger());
// Set 4 as priority using new API, and verify output
conf.setJobPriorityAsInteger(4);
- assertEquals(JobPriority.HIGH.name(), conf.getJobPriority().name());
+ assertThat(conf.getJobPriority()).isEqualTo(JobPriority.HIGH);
assertEquals(4, conf.getJobPriorityAsInteger());
// Now set some high integer values and verify output from old api
conf.setJobPriorityAsInteger(57);
- assertEquals(JobPriority.UNDEFINED_PRIORITY.name(), conf.getJobPriority()
- .name());
+ assertThat(conf.getJobPriority()).isEqualTo(JobPriority.UNDEFINED_PRIORITY);
assertEquals(57, conf.getJobPriorityAsInteger());
// Error case where UNDEFINED_PRIORITY is set explicitly
conf.setJobPriority(JobPriority.UNDEFINED_PRIORITY);
- assertEquals(JobPriority.UNDEFINED_PRIORITY.name(), conf.getJobPriority()
- .name());
+ assertThat(conf.getJobPriority()).isEqualTo(JobPriority.UNDEFINED_PRIORITY);
// As UNDEFINED_PRIORITY cannot be mapped to any integer value, resetting
// to default as 0.
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobInfo.java
index 507b9782fe0..20edcbb426f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobInfo.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestJobInfo.java
@@ -30,9 +30,10 @@ import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.mapreduce.JobStatus.State;
-import org.junit.Assert;
import org.junit.Test;
-import static org.junit.Assert.*;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.Assert.assertEquals;
/**
* test class JobInfo
@@ -69,9 +70,9 @@ public class TestJobInfo {
new org.apache.hadoop.mapred.TaskReport(tid1, 0.0f,
State.FAILED.toString(), null, TIPStatus.FAILED, 100, 100,
new org.apache.hadoop.mapred.Counters());
- Assert
- .assertEquals(treport.getTaskId(), "task_1014873536921_0006_m_000000");
- Assert.assertEquals(treport.getTaskID().toString(),
- "task_1014873536921_0006_m_000000");
+ assertThat(treport.getTaskId()).isEqualTo(
+ "task_1014873536921_0006_m_000000");
+ assertThat(treport.getTaskID().toString()).isEqualTo(
+ "task_1014873536921_0006_m_000000");
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestLineRecordReader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestLineRecordReader.java
index a84f6cc5f79..f4f2d18c382 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestLineRecordReader.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestLineRecordReader.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.mapred;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
@@ -565,7 +566,7 @@ public class TestLineRecordReader {
reader = new LineRecordReader(conf, split, recordDelimiterBytes);
// Get first record: "abcd|efgh" always possible
assertTrue("Expected record got nothing", reader.next(key, value));
- assertTrue("abcd|efgh".equals(value.toString()));
+ assertThat(value.toString()).isEqualTo("abcd|efgh");
assertEquals("Wrong position after record read", 9, value.getLength());
// Position should be 12 right after "|+|"
int recordPos = 12;
@@ -574,7 +575,7 @@ public class TestLineRecordReader {
// get the next record: "ij|kl" if the split/buffer allows it
if (reader.next(key, value)) {
// check the record info: "ij|kl"
- assertTrue("ij|kl".equals(value.toString()));
+ assertThat(value.toString()).isEqualTo("ij|kl");
// Position should be 20 right after "|+|"
recordPos = 20;
assertEquals("Wrong position after record read", recordPos,
@@ -583,7 +584,7 @@ public class TestLineRecordReader {
// get the third record: "mno|pqr" if the split/buffer allows it
if (reader.next(key, value)) {
// check the record info: "mno|pqr"
- assertTrue("mno|pqr".equals(value.toString()));
+ assertThat(value.toString()).isEqualTo("mno|pqr");
// Position should be 27 at the end of the string now
recordPos = inputData.length();
assertEquals("Wrong position after record read", recordPos,
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestMaster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestMaster.java
index 0f4ebeef896..37a56b08d6f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestMaster.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestMaster.java
@@ -20,7 +20,8 @@ package org.apache.hadoop.mapred;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import static org.junit.Assert.assertEquals;
+
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
import org.junit.Test;
@@ -46,6 +47,6 @@ public class TestMaster {
// Change master address to a valid value
conf.set(MRConfig.MASTER_ADDRESS, "bar.com:8042");
String masterHostname = Master.getMasterAddress(conf);
- assertEquals(masterHostname, "bar.com");
+ assertThat(masterHostname).isEqualTo("bar.com");
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestOldMethodsJobID.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestOldMethodsJobID.java
index 3b0311d336a..e13be2282b1 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestOldMethodsJobID.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestOldMethodsJobID.java
@@ -27,6 +27,8 @@ import java.io.IOException;
import org.apache.hadoop.mapred.TaskCompletionEvent.Status;
import org.apache.hadoop.mapreduce.TaskType;
import org.junit.Test;
+
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
/**
@@ -44,14 +46,14 @@ public class TestOldMethodsJobID {
public void testDepricatedMethods() throws IOException {
JobID jid = new JobID();
TaskID test = new TaskID(jid, true, 1);
- assertEquals(test.getTaskType(), TaskType.MAP);
+ assertThat(test.getTaskType()).isEqualTo(TaskType.MAP);
test = new TaskID(jid, false, 1);
- assertEquals(test.getTaskType(), TaskType.REDUCE);
+ assertThat(test.getTaskType()).isEqualTo(TaskType.REDUCE);
test = new TaskID("001", 1, false, 1);
- assertEquals(test.getTaskType(), TaskType.REDUCE);
+ assertThat(test.getTaskType()).isEqualTo(TaskType.REDUCE);
test = new TaskID("001", 1, true, 1);
- assertEquals(test.getTaskType(), TaskType.MAP);
+ assertThat(test.getTaskType()).isEqualTo(TaskType.MAP);
ByteArrayOutputStream out = new ByteArrayOutputStream();
test.write(new DataOutputStream(out));
@@ -100,9 +102,9 @@ public class TestOldMethodsJobID {
assertEquals(Status.OBSOLETE.toString(), testEl.getStatus().toString());
testEl.setTaskRunTime(20);
- assertEquals(testEl.getTaskRunTime(), 20);
+ assertThat(testEl.getTaskRunTime()).isEqualTo(20);
testEl.setEventId(16);
- assertEquals(testEl.getEventId(), 16);
+ assertThat(testEl.getEventId()).isEqualTo(16);
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestQueue.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestQueue.java
index b2908bf623b..56881c63c55 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestQueue.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestQueue.java
@@ -35,6 +35,7 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
@@ -71,7 +72,7 @@ public class TestQueue {
manager.setSchedulerInfo("first", "queueInfo");
manager.setSchedulerInfo("second", "queueInfoqueueInfo");
Queue root = manager.getRoot();
- assertTrue(root.getChildren().size() == 2);
+ assertThat(root.getChildren().size()).isEqualTo(2);
Iterator iterator = root.getChildren().iterator();
Queue firstSubQueue = iterator.next();
assertEquals("first", firstSubQueue.getName());
@@ -81,11 +82,15 @@ public class TestQueue {
"Users [user1, user2] and members of the groups [group1, group2] are allowed");
Queue secondSubQueue = iterator.next();
assertEquals("second", secondSubQueue.getName());
- assertEquals(secondSubQueue.getProperties().getProperty("key"), "value");
- assertEquals(secondSubQueue.getProperties().getProperty("key1"), "value1");
+ assertThat(secondSubQueue.getProperties().getProperty("key"))
+ .isEqualTo("value");
+ assertThat(secondSubQueue.getProperties().getProperty("key1"))
+ .isEqualTo("value1");
// test status
- assertEquals(firstSubQueue.getState().getStateName(), "running");
- assertEquals(secondSubQueue.getState().getStateName(), "stopped");
+ assertThat(firstSubQueue.getState().getStateName())
+ .isEqualTo("running");
+ assertThat(secondSubQueue.getState().getStateName())
+ .isEqualTo("stopped");
Set template = new HashSet();
template.add("first");
@@ -105,7 +110,7 @@ public class TestQueue {
assertTrue(manager.hasAccess("first", QueueACL.ADMINISTER_JOBS, mockUGI));
QueueAclsInfo[] qai = manager.getQueueAcls(mockUGI);
- assertEquals(qai.length, 1);
+ assertThat(qai.length).isEqualTo(1);
// test refresh queue
manager.refreshQueues(getConfiguration(), null);
@@ -113,21 +118,28 @@ public class TestQueue {
Queue firstSubQueue1 = iterator.next();
Queue secondSubQueue1 = iterator.next();
// tets equal method
- assertTrue(firstSubQueue.equals(firstSubQueue1));
- assertEquals(firstSubQueue1.getState().getStateName(), "running");
- assertEquals(secondSubQueue1.getState().getStateName(), "stopped");
+ assertThat(firstSubQueue).isEqualTo(firstSubQueue1);
+ assertThat(firstSubQueue1.getState().getStateName())
+ .isEqualTo("running");
+ assertThat(secondSubQueue1.getState().getStateName())
+ .isEqualTo("stopped");
- assertEquals(firstSubQueue1.getSchedulingInfo(), "queueInfo");
- assertEquals(secondSubQueue1.getSchedulingInfo(), "queueInfoqueueInfo");
+ assertThat(firstSubQueue1.getSchedulingInfo())
+ .isEqualTo("queueInfo");
+ assertThat(secondSubQueue1.getSchedulingInfo())
+ .isEqualTo("queueInfoqueueInfo");
// test JobQueueInfo
- assertEquals(firstSubQueue.getJobQueueInfo().getQueueName(), "first");
- assertEquals(firstSubQueue.getJobQueueInfo().getQueueState(), "running");
- assertEquals(firstSubQueue.getJobQueueInfo().getSchedulingInfo(),
- "queueInfo");
- assertEquals(secondSubQueue.getJobQueueInfo().getChildren().size(), 0);
+ assertThat(firstSubQueue.getJobQueueInfo().getQueueName())
+ .isEqualTo("first");
+ assertThat(firstSubQueue.getJobQueueInfo().getState().toString())
+ .isEqualTo("running");
+ assertThat(firstSubQueue.getJobQueueInfo().getSchedulingInfo())
+ .isEqualTo("queueInfo");
+ assertThat(secondSubQueue.getJobQueueInfo().getChildren().size())
+ .isEqualTo(0);
// test
- assertEquals(manager.getSchedulerInfo("first"), "queueInfo");
+ assertThat(manager.getSchedulerInfo("first")).isEqualTo("queueInfo");
Set queueJobQueueInfos = new HashSet();
for(JobQueueInfo jobInfo : manager.getJobQueueInfos()){
queueJobQueueInfos.add(jobInfo.getQueueName());
@@ -138,8 +150,8 @@ public class TestQueue {
}
assertEquals(queueJobQueueInfos, rootJobQueueInfos);
// test getJobQueueInfoMapping
- assertEquals(
- manager.getJobQueueInfoMapping().get("first").getQueueName(), "first");
+ assertThat(manager.getJobQueueInfoMapping().get("first").getQueueName())
+ .isEqualTo("first");
// test dumpConfiguration
Writer writer = new StringWriter();
@@ -185,7 +197,7 @@ public class TestQueue {
@Test (timeout=5000)
public void testDefaultConfig() {
QueueManager manager = new QueueManager(true);
- assertEquals(manager.getRoot().getChildren().size(), 2);
+ assertThat(manager.getRoot().getChildren().size()).isEqualTo(2);
}
/**
@@ -209,27 +221,27 @@ public class TestQueue {
Iterator iterator = root.getChildren().iterator();
Queue firstSubQueue = iterator.next();
assertEquals("first", firstSubQueue.getName());
- assertEquals(
+ assertThat(
firstSubQueue.getAcls().get("mapred.queue.first.acl-submit-job")
- .toString(),
- "Users [user1, user2] and members of the groups [group1, group2] are allowed");
+ .toString()).isEqualTo(
+ "Users [user1, user2] and members of " +
+ "the groups [group1, group2] are allowed");
Queue secondSubQueue = iterator.next();
assertEquals("second", secondSubQueue.getName());
- assertEquals(firstSubQueue.getState().getStateName(), "running");
- assertEquals(secondSubQueue.getState().getStateName(), "stopped");
+ assertThat(firstSubQueue.getState().getStateName()).isEqualTo("running");
+ assertThat(secondSubQueue.getState().getStateName()).isEqualTo("stopped");
assertTrue(manager.isRunning("first"));
assertFalse(manager.isRunning("second"));
- assertEquals(firstSubQueue.getSchedulingInfo(), "queueInfo");
- assertEquals(secondSubQueue.getSchedulingInfo(), "queueInfoqueueInfo");
-// test leaf queue
+ assertThat(firstSubQueue.getSchedulingInfo()).isEqualTo("queueInfo");
+ assertThat(secondSubQueue.getSchedulingInfo())
+ .isEqualTo("queueInfoqueueInfo");
+ // test leaf queue
Set template = new HashSet();
template.add("first");
template.add("second");
assertEquals(manager.getLeafQueueNames(), template);
-
-
}
/**
* write cofiguration
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskLog.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskLog.java
index ee3bd81e9bd..7c4965cbdd6 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskLog.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskLog.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.mapred;
-import static org.junit.Assert.assertEquals;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -58,7 +58,7 @@ public class TestTaskLog {
// test TaskLog
System.setProperty(
YarnConfiguration.YARN_APP_CONTAINER_LOG_DIR, "testString");
- assertEquals(TaskLog.getMRv2LogDir(), "testString");
+ assertThat(TaskLog.getMRv2LogDir()).isEqualTo("testString");
TaskAttemptID taid = mock(TaskAttemptID.class);
JobID jid = new JobID("job", 1);
@@ -137,7 +137,7 @@ public class TestTaskLog {
// test TaskLog
- assertEquals(TaskLog.getMRv2LogDir(), null);
+ assertThat(TaskLog.getMRv2LogDir()).isNull();
TaskAttemptID taid = mock(TaskAttemptID.class);
JobID jid = new JobID("job", 1);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskLogAppender.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskLogAppender.java
index 89cf07c6824..52e9d7be356 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskLogAppender.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskLogAppender.java
@@ -29,7 +29,10 @@ import org.apache.log4j.PatternLayout;
import org.apache.log4j.Priority;
import org.apache.log4j.spi.LoggingEvent;
import org.junit.Test;
-import static org.junit.Assert.*;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
public class TestTaskLogAppender {
/**
@@ -43,9 +46,9 @@ public class TestTaskLogAppender {
System.setProperty(TaskLogAppender.TASKID_PROPERTY,"attempt_01_02_m03_04_001");
System.setProperty(TaskLogAppender.LOGSIZE_PROPERTY, "1003");
appender.activateOptions();
- assertEquals(appender.getTaskId(), "attempt_01_02_m03_04_001");
- assertEquals(appender.getTotalLogFileSize(),1000);
- assertEquals(appender.getIsCleanup(),false);
+ assertThat(appender.getTaskId()).isEqualTo("attempt_01_02_m03_04_001");
+ assertThat(appender.getTotalLogFileSize()).isEqualTo(1000);
+ assertFalse(appender.getIsCleanup());
// test writer
Writer writer= new StringWriter();
@@ -63,7 +66,7 @@ public class TestTaskLogAppender {
appender= new TaskLogAppender();
appender.setIsCleanup(true);
appender.activateOptions();
- assertEquals(appender.getIsCleanup(),true);
+ assertTrue(appender.getIsCleanup());
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskProgressReporter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskProgressReporter.java
index e5ff64e3179..6d34270e12e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskProgressReporter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskProgressReporter.java
@@ -37,6 +37,8 @@ import org.apache.hadoop.util.ExitUtil;
import org.junit.Assert;
import org.junit.Test;
+import static org.assertj.core.api.Assertions.assertThat;
+
public class TestTaskProgressReporter {
private static int statusUpdateTimes = 0;
@@ -262,7 +264,7 @@ public class TestTaskProgressReporter {
task.setTaskDone();
reporter.resetDoneFlag();
t.join();
- Assert.assertEquals(statusUpdateTimes, 2);
+ assertThat(statusUpdateTimes).isEqualTo(2);
}
@Test(timeout=10000)
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
index dd717a66cd7..526485df934 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
@@ -58,6 +58,7 @@ import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
@@ -233,7 +234,7 @@ public class TestFileOutputCommitter {
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = slurp(expectedFile);
- assertEquals(output, expectedOutput.toString());
+ assertThat(output).isEqualTo(expectedOutput.toString());
}
private void validateMapFileOutputContent(
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/security/TestTokenCache.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/security/TestTokenCache.java
index 9dc4f3e4c3b..c431c075e72 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/security/TestTokenCache.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/security/TestTokenCache.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce.security;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.*;
@@ -115,7 +116,7 @@ public class TestTokenCache {
// get token for fs3, should only add token for fs3
TokenCache.obtainTokensForNamenodesInternal(fs3, creds, conf, renewer);
Token> token3 = creds.getToken(new Text(fs3.getCanonicalServiceName()));
- assertTrue(token3 != null);
+ assertThat(token3).isNotNull();
checkToken(creds, newerToken1, token2, token3);
// be paranoid, check one last time that nothing changes
@@ -129,7 +130,7 @@ public class TestTokenCache {
assertEquals(tokens.length, creds.getAllTokens().size());
for (Token> token : tokens) {
Token> credsToken = creds.getToken(token.getService());
- assertTrue(credsToken != null);
+ assertThat(credsToken).isNotNull();
assertEquals(token, credsToken);
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestFetcher.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestFetcher.java
index 94e7b9a92a2..ed35ff6a2bd 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestFetcher.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestFetcher.java
@@ -27,11 +27,11 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.junit.After;
-import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.rules.TestName;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
@@ -182,12 +182,12 @@ public class TestFetcher {
except, key, connection);
fetcher.copyFromHost(host);
- Assert.assertEquals("No host failure is expected.",
- ss.hostFailureCount(host.getHostName()), 0);
- Assert.assertEquals("No fetch failure is expected.",
- ss.fetchFailureCount(map1ID), 0);
- Assert.assertEquals("No fetch failure is expected.",
- ss.fetchFailureCount(map2ID), 0);
+ assertThat(ss.hostFailureCount(host.getHostName()))
+ .withFailMessage("No host failure is expected.").isEqualTo(0);
+ assertThat(ss.fetchFailureCount(map1ID))
+ .withFailMessage("No fetch failure is expected.").isEqualTo(0);
+ assertThat(ss.fetchFailureCount(map2ID))
+ .withFailMessage("No fetch failure is expected.").isEqualTo(0);
verify(ss).penalize(eq(host), anyLong());
verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestMergeManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestMergeManager.java
index 494f3aa8241..4e718b85a82 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestMergeManager.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestMergeManager.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.mapreduce.task.reduce;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
@@ -78,7 +79,7 @@ public class TestMergeManager {
// next reservation should be a WAIT
MapOutput out3 = mgr.reserve(null, OUTPUT_SIZE, 0);
- Assert.assertEquals("Should be told to wait", null, out3);
+ assertThat(out3).withFailMessage("Should be told to wait").isNull();
// trigger the first merge and wait for merge thread to start merging
// and free enough output to reserve more
@@ -102,7 +103,7 @@ public class TestMergeManager {
// next reservation should be null
out3 = mgr.reserve(null, OUTPUT_SIZE, 0);
- Assert.assertEquals("Should be told to wait", null, out3);
+ assertThat(out3).withFailMessage("Should be told to wait").isNull();
// commit output *before* merge thread completes
mout1.commit();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestMerger.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestMerger.java
index c29d785ccfb..13cb6b32214 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestMerger.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/task/reduce/TestMerger.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.mapreduce.task.reduce;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
@@ -181,8 +182,10 @@ public class TestMerger {
readOnDiskMapOutput(conf, fs, next, keys, values);
paths.add(next);
}
- Assert.assertEquals(keys, Arrays.asList("apple", "banana", "carrot", "apple", "banana", "carrot"));
- Assert.assertEquals(values, Arrays.asList("awesome", "bla", "amazing", "disgusting", "pretty good", "delicious"));
+ assertThat(keys).isEqualTo(Arrays.asList("apple", "banana", "carrot",
+ "apple", "banana", "carrot"));
+ assertThat(values).isEqualTo(Arrays.asList("awesome", "bla", "amazing",
+ "disgusting", "pretty good", "delicious"));
mergeManager.close();
mergeManager = new MergeManagerImpl(
@@ -197,8 +200,10 @@ public class TestMerger {
keys = new ArrayList();
values = new ArrayList();
readOnDiskMapOutput(conf, fs, mergeManager.onDiskMapOutputs.iterator().next(), keys, values);
- Assert.assertEquals(keys, Arrays.asList("apple", "apple", "banana", "banana", "carrot", "carrot"));
- Assert.assertEquals(values, Arrays.asList("awesome", "disgusting", "pretty good", "bla", "amazing", "delicious"));
+ assertThat(keys).isEqualTo(Arrays.asList("apple", "apple", "banana",
+ "banana", "carrot", "carrot"));
+ assertThat(values).isEqualTo(Arrays.asList("awesome", "disgusting",
+ "pretty good", "bla", "amazing", "delicious"));
mergeManager.close();
Assert.assertEquals(0, mergeManager.inMemoryMapOutputs.size());
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/pom.xml
index d737e815cd2..eea95457459 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/pom.xml
@@ -70,6 +70,11 @@
org.fusesource.leveldbjni
leveldbjni-all
+
+ org.assertj
+ assertj-core
+ test
+
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJHSDelegationTokenSecretManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJHSDelegationTokenSecretManager.java
index 64715fbd694..1208bacb21f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJHSDelegationTokenSecretManager.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJHSDelegationTokenSecretManager.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.mapreduce.v2.hs;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@@ -181,10 +182,10 @@ public class TestJHSDelegationTokenSecretManager {
JHSDelegationTokenSecretManagerForTest mgr) {
mgr.stopThreads();
mgr.reset();
- Assert.assertEquals("Secret manager should not contain keys",
- mgr.getAllKeys().length, 0);
- Assert.assertEquals("Secret manager should not contain tokens",
- mgr.getAllTokens().size(), 0);
+ assertThat(mgr.getAllKeys().length)
+ .withFailMessage("Secret manager should not contain keys").isZero();
+ assertThat(mgr.getAllTokens().size())
+ .withFailMessage("Secret manager should not contain tokens").isZero();
}
private static class JHSDelegationTokenSecretManagerForTest
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistory.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistory.java
index 1595e8b1f4d..e9f1ba6e305 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistory.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistory.java
@@ -42,6 +42,7 @@ import org.junit.Test;
import org.mockito.Mockito;
import static junit.framework.TestCase.assertEquals;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
@@ -139,7 +140,7 @@ public class TestJobHistory {
.getHistoryStorage());
assertTrue(storage.getUseLoadedTasksCache());
- assertEquals(storage.getLoadedTasksCacheSize(), 50);
+ assertThat(storage.getLoadedTasksCacheSize()).isEqualTo(50);
// Create a bunch of smaller jobs (<< 50 tasks)
Job[] jobs = new Job[10];
@@ -202,7 +203,7 @@ public class TestJobHistory {
.getHistoryStorage());
assertTrue(storage.getUseLoadedTasksCache());
- assertEquals(storage.getLoadedTasksCacheSize(), 500);
+ assertThat(storage.getLoadedTasksCacheSize()).isEqualTo(500);
// Create a bunch of large jobs (>> 50 tasks)
Job[] lgJobs = new Job[10];
@@ -263,7 +264,7 @@ public class TestJobHistory {
.getHistoryStorage());
assertTrue(storage.getUseLoadedTasksCache());
- assertEquals(storage.getLoadedTasksCacheSize(), 1);
+ assertThat(storage.getLoadedTasksCacheSize()).isOne();
}
@Test
@@ -281,7 +282,7 @@ public class TestJobHistory {
.getHistoryStorage());
assertTrue(storage.getUseLoadedTasksCache());
- assertEquals(storage.getLoadedTasksCacheSize(), 1);
+ assertThat(storage.getLoadedTasksCacheSize()).isOne();
}
@Test
@@ -300,7 +301,7 @@ public class TestJobHistory {
.getHistoryStorage());
assertTrue(storage.getUseLoadedTasksCache());
- assertEquals(storage.getLoadedTasksCacheSize(), 50);
+ assertThat(storage.getLoadedTasksCacheSize()).isEqualTo(50);
// Create jobs for bad fileInfo results
Job[] jobs = new Job[4];
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEntities.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEntities.java
index c6ddae52ecb..e0d7b2c7479 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEntities.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEntities.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.mapreduce.v2.hs;
-import static org.junit.Assert.assertEquals;
-
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
@@ -51,7 +49,10 @@ import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.apache.hadoop.mapred.TaskCompletionEvent;
-import static org.junit.Assert.*;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.*;
@RunWith(value = Parameterized.class)
@@ -106,7 +107,7 @@ public class TestJobHistoryEntities {
assertEquals(1, completedJob.getCompletedReduces());
assertEquals(12, completedJob.getTasks().size());
//Verify tasks loaded at this point.
- assertEquals(true, completedJob.tasksLoaded.get());
+ assertThat(completedJob.tasksLoaded.get()).isTrue();
assertEquals(10, completedJob.getTasks(TaskType.MAP).size());
assertEquals(2, completedJob.getTasks(TaskType.REDUCE).size());
assertEquals("user", completedJob.getUserName());
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java
index e7e912fa994..4ad457a8e86 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java
@@ -45,6 +45,8 @@ import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static org.assertj.core.api.Assertions.assertThat;
+
public class TestJobHistoryEvents {
private static final Logger LOG =
LoggerFactory.getLogger(TestJobHistoryEvents.class);
@@ -179,14 +181,16 @@ public class TestJobHistoryEvents {
((JobHistory)context).init(conf);
((JobHistory)context).start();
Assert.assertTrue( context.getStartTime()>0);
- Assert.assertEquals(((JobHistory)context).getServiceState(),Service.STATE.STARTED);
+ assertThat(((JobHistory)context).getServiceState())
+ .isEqualTo(Service.STATE.STARTED);
// get job before stopping JobHistory
Job parsedJob = context.getJob(jobId);
// stop JobHistory
((JobHistory)context).stop();
- Assert.assertEquals(((JobHistory)context).getServiceState(),Service.STATE.STOPPED);
+ assertThat(((JobHistory)context).getServiceState())
+ .isEqualTo(Service.STATE.STOPPED);
Assert.assertEquals("QueueName not correct", "assignedQueue",
parsedJob.getQueueName());
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java
index 7b70f980b47..f309ba957ce 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.mapreduce.v2.hs;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic
.NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
@@ -449,8 +450,8 @@ public class TestJobHistoryParsing {
TaskAttemptInfo taskAttemptInfo = taskInfo.getAllTaskAttempts().get(
TypeConverter.fromYarn((taskAttempt.getID())));
// Verify rack-name for all task attempts
- Assert.assertEquals("rack-name is incorrect",
- taskAttemptInfo.getRackname(), RACK_NAME);
+ assertThat(taskAttemptInfo.getRackname())
+ .withFailMessage("rack-name is incorrect").isEqualTo(RACK_NAME);
if (taskAttemptInfo.getTaskStatus().equals("FAILED")) {
noOffailedAttempts++;
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/pom.xml
index 3ebba795ef6..047951dbba9 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/pom.xml
@@ -116,6 +116,11 @@
bcpkix-jdk15on
test
+
+ org.assertj
+ assertj-core
+ test
+
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
index c874a383956..075ab9a3f3c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java
@@ -50,6 +50,8 @@ import org.apache.hadoop.util.StringUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotSame;
@@ -103,7 +105,8 @@ public class TestFileSystem {
// This should go to TestFsShell.java when it is added.
CommandFormat cf;
cf= new CommandFormat("copyToLocal", 2,2,"crc","ignoreCrc");
- assertEquals(cf.parse(new String[] {"-get","file", "-"}, 1).get(1), "-");
+ assertThat(cf.parse(new String[] {"-get", "file", "-"}, 1).get(1))
+ .isEqualTo("-");
try {
cf.parse(new String[] {"-get","file","-ignoreCrc","/foo"}, 1);
fail("Expected parsing to fail as it should stop at first non-option");
@@ -112,12 +115,16 @@ public class TestFileSystem {
// Expected
}
cf = new CommandFormat("tail", 1, 1, "f");
- assertEquals(cf.parse(new String[] {"-tail","fileName"}, 1).get(0),"fileName");
- assertEquals(cf.parse(new String[] {"-tail","-f","fileName"}, 1).get(0),"fileName");
+ assertThat(cf.parse(new String[] {"-tail", "fileName"}, 1).get(0))
+ .isEqualTo("fileName");
+ assertThat(cf.parse(new String[] {"-tail", "-f", "fileName"}, 1).get(0))
+ .isEqualTo("fileName");
cf = new CommandFormat("setrep", 2, 2, "R", "w");
- assertEquals(cf.parse(new String[] {"-setrep","-R","2","/foo/bar"}, 1).get(1), "/foo/bar");
+ assertThat(cf.parse(new String[] {"-setrep", "-R", "2", "/foo/bar"}, 1)
+ .get(1)).isEqualTo("/foo/bar");
cf = new CommandFormat("put", 2, 10000);
- assertEquals(cf.parse(new String[] {"-put", "-", "dest"}, 1).get(1), "dest");
+ assertThat(cf.parse(new String[] {"-put", "-", "dest"}, 1).get(1))
+ .isEqualTo("dest");
}
public static void createControlFile(FileSystem fs,
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java
index 575cd0bf60e..218cae8bf7b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/TestSlive.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.fs.slive;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@@ -179,7 +180,7 @@ public class TestSlive {
op.run(fs);
types.add(op.getType());
}
- assertEquals(types.size(), expected);
+ assertThat(types.size()).isEqualTo(expected);
}
// gets the config merged with the arguments
@@ -231,24 +232,31 @@ public class TestSlive {
ConfigExtractor extractor = getTestConfig(true);
assertEquals(extractor.getOpCount().intValue(), Constants.OperationType
.values().length);
- assertEquals(extractor.getMapAmount().intValue(), 2);
- assertEquals(extractor.getReducerAmount().intValue(), 2);
+ assertThat(extractor.getMapAmount().intValue()).isEqualTo(2);
+ assertThat(extractor.getReducerAmount().intValue()).isEqualTo(2);
Range apRange = extractor.getAppendSize();
- assertEquals(apRange.getLower().intValue(), Constants.MEGABYTES * 1);
- assertEquals(apRange.getUpper().intValue(), Constants.MEGABYTES * 2);
+ assertThat(apRange.getLower().intValue()).isEqualTo(
+ Constants.MEGABYTES * 1);
+ assertThat(apRange.getUpper().intValue()).isEqualTo(
+ Constants.MEGABYTES * 2);
Range wRange = extractor.getWriteSize();
- assertEquals(wRange.getLower().intValue(), Constants.MEGABYTES * 1);
- assertEquals(wRange.getUpper().intValue(), Constants.MEGABYTES * 2);
+ assertThat(wRange.getLower().intValue()).isEqualTo(
+ Constants.MEGABYTES * 1);
+ assertThat(wRange.getUpper().intValue()).isEqualTo(
+ Constants.MEGABYTES * 2);
Range trRange = extractor.getTruncateSize();
- assertEquals(trRange.getLower().intValue(), 0);
- assertEquals(trRange.getUpper().intValue(), Constants.MEGABYTES * 1);
+ assertThat(trRange.getLower().intValue()).isZero();
+ assertThat(trRange.getUpper().intValue()).isEqualTo(
+ Constants.MEGABYTES * 1);
Range bRange = extractor.getBlockSize();
- assertEquals(bRange.getLower().intValue(), Constants.MEGABYTES * 1);
- assertEquals(bRange.getUpper().intValue(), Constants.MEGABYTES * 2);
+ assertThat(bRange.getLower().intValue()).isEqualTo(
+ Constants.MEGABYTES * 1);
+ assertThat(bRange.getUpper().intValue()).isEqualTo(
+ Constants.MEGABYTES * 2);
String resfile = extractor.getResultFile();
assertEquals(resfile, getResultFile().toString());
int durationMs = extractor.getDurationMilliseconds();
- assertEquals(durationMs, 10 * 1000);
+ assertThat(durationMs).isEqualTo(10 * 1000);
}
@Test
@@ -273,8 +281,8 @@ public class TestSlive {
@Test
public void testRange() {
Range r = new Range(10L, 20L);
- assertEquals(r.getLower().longValue(), 10L);
- assertEquals(r.getUpper().longValue(), 20L);
+ assertThat(r.getLower().longValue()).isEqualTo(10L);
+ assertThat(r.getUpper().longValue()).isEqualTo(20L);
}
@Test
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/JobClientUnitTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/JobClientUnitTest.java
index 04488cd1d21..c7ae57d5811 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/JobClientUnitTest.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/JobClientUnitTest.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.mapred;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertNotNull;
@@ -234,12 +235,12 @@ public class JobClientUnitTest {
//no retry
assertNotNull(client.getJob(id));
- assertEquals(client.getLastGetJobRetriesCounter(), 0);
+ assertThat(client.getLastGetJobRetriesCounter()).isEqualTo(0);
//2 retries
client.setGetJobRetries(2);
assertNotNull(client.getJob(id));
- assertEquals(client.getLastGetJobRetriesCounter(), 2);
+ assertThat(client.getLastGetJobRetriesCounter()).isEqualTo(2);
//beyond yarn.app.mapreduce.client.job.max-retries, will get null
client.setGetJobRetries(3);
@@ -260,8 +261,8 @@ public class JobClientUnitTest {
//3 retries (default)
client.setGetJobRetries(MRJobConfig.DEFAULT_MR_CLIENT_JOB_MAX_RETRIES);
assertNotNull(client.getJob(id));
- assertEquals(client.getLastGetJobRetriesCounter(),
- MRJobConfig.DEFAULT_MR_CLIENT_JOB_MAX_RETRIES);
+ assertThat(client.getLastGetJobRetriesCounter())
+ .isEqualTo(MRJobConfig.DEFAULT_MR_CLIENT_JOB_MAX_RETRIES);
//beyond yarn.app.mapreduce.client.job.max-retries, will get null
client.setGetJobRetries(MRJobConfig.DEFAULT_MR_CLIENT_JOB_MAX_RETRIES + 1);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java
index 190f728346f..bbfabe83542 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileInputFormat.java
@@ -33,6 +33,7 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.util.concurrent.TimeoutException;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
@@ -196,7 +197,7 @@ public class TestMRCJCFileInputFormat {
// Enable multi-level/recursive inputs
job.setBoolean(FileInputFormat.INPUT_DIR_RECURSIVE, true);
InputSplit[] splits = inFormat.getSplits(job, 1);
- assertEquals(splits.length, 2);
+ assertThat(splits.length).isEqualTo(2);
}
@SuppressWarnings("rawtypes")
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java
index 83796e8d85b..be7dcc5ec2b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java
@@ -33,6 +33,7 @@ import java.io.File;
import java.io.IOException;
import java.net.URI;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
@@ -112,7 +113,7 @@ public class TestMRCJCFileOutputCommitter {
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
- assertEquals(output, expectedOutput.toString());
+ assertThat(output).isEqualTo(expectedOutput.toString());
}
@Test
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java
index 340cba84a8f..28376ebc89c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRChildTask.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.mapred;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
@@ -171,9 +172,10 @@ public class TestMiniMRChildTask {
String javaOpts = job.get(JobConf.MAPRED_TASK_JAVA_OPTS);
assertNotNull(JobConf.MAPRED_TASK_JAVA_OPTS + " is null!",
javaOpts);
- assertEquals(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " +
- javaOpts,
- javaOpts, TASK_OPTS_VAL);
+ assertThat(javaOpts)
+ .withFailMessage(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: "
+ + javaOpts)
+ .isEqualTo(TASK_OPTS_VAL);
} else {
String mapJavaOpts = job.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS);
assertNotNull(JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " is null!",
@@ -214,16 +216,18 @@ public class TestMiniMRChildTask {
String javaOpts = job.get(JobConf.MAPRED_TASK_JAVA_OPTS);
assertNotNull(JobConf.MAPRED_TASK_JAVA_OPTS + " is null!",
javaOpts);
- assertEquals(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " +
- javaOpts,
- javaOpts, TASK_OPTS_VAL);
+ assertThat(javaOpts)
+ .withFailMessage(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: "
+ + javaOpts)
+ .isEqualTo(TASK_OPTS_VAL);
} else {
String reduceJavaOpts = job.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS);
assertNotNull(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " is null!",
reduceJavaOpts);
- assertEquals(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " has value of: " +
- reduceJavaOpts,
- reduceJavaOpts, REDUCE_OPTS_VAL);
+ assertThat(reduceJavaOpts)
+ .withFailMessage(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS +
+ " has value of: " + reduceJavaOpts)
+ .isEqualTo(REDUCE_OPTS_VAL);
}
// check if X=y works for an already existing parameter
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java
index 5bb336e4e81..d57be9d537a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultiFileSplit.java
@@ -25,11 +25,12 @@ import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
-import java.util.Arrays;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;
+
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@@ -64,8 +65,8 @@ public class TestMultiFileSplit {
assertTrue(split.getLength() != 0);
assertEquals(split.getLength(), readSplit.getLength());
- assertTrue(Arrays.equals(split.getPaths(), readSplit.getPaths()));
- assertTrue(Arrays.equals(split.getLengths(), readSplit.getLengths()));
+ assertThat(readSplit.getPaths()).containsExactly(split.getPaths());
+ assertThat(readSplit.getLengths()).containsExactly(split.getLengths());
System.out.println(split.toString());
}
@@ -88,7 +89,7 @@ public class TestMultiFileSplit {
MultiFileSplit split = new MultiFileSplit(job,path,lengths);
String [] locations= split.getLocations();
- assertTrue(locations.length==1);
- assertEquals(locations[0], "localhost");
+ assertThat(locations.length).isOne();
+ assertThat(locations[0]).isEqualTo("localhost");
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java
index b5047fc8331..43ead04b269 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java
@@ -27,7 +27,7 @@ import org.junit.Test;
import java.io.File;
import java.io.IOException;
-import static org.junit.Assert.assertEquals;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
public class TestMultipleTextOutputFormat {
@@ -112,7 +112,7 @@ public class TestMultipleTextOutputFormat {
}
String output = UtilsForTests.slurp(expectedFile_11);
//System.out.printf("File_2 output: %s\n", output);
- assertEquals(output, expectedOutput.toString());
+ assertThat(output).isEqualTo(expectedOutput.toString());
String file_12 = "2-part-00000";
@@ -124,7 +124,7 @@ public class TestMultipleTextOutputFormat {
}
output = UtilsForTests.slurp(expectedFile_12);
//System.out.printf("File_2 output: %s\n", output);
- assertEquals(output, expectedOutput.toString());
+ assertThat(output).isEqualTo(expectedOutput.toString());
String file_13 = "3-part-00000";
@@ -136,7 +136,7 @@ public class TestMultipleTextOutputFormat {
}
output = UtilsForTests.slurp(expectedFile_13);
//System.out.printf("File_2 output: %s\n", output);
- assertEquals(output, expectedOutput.toString());
+ assertThat(output).isEqualTo(expectedOutput.toString());
String file_2 = "2/3";
@@ -148,6 +148,6 @@ public class TestMultipleTextOutputFormat {
}
output = UtilsForTests.slurp(expectedFile_2);
//System.out.printf("File_2 output: %s\n", output);
- assertEquals(output, expectedOutput.toString());
+ assertThat(output).isEqualTo(expectedOutput.toString());
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java
index 65b9dbd880b..3223acca228 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.mapred;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
@@ -161,10 +162,10 @@ public class TestNetworkedJob {
assertTrue(runningJob.getJobFile().endsWith(
".staging/" + runningJob.getJobID() + "/job.xml"));
assertTrue(runningJob.getTrackingURL().length() > 0);
- assertTrue(runningJob.mapProgress() == 0.0f);
- assertTrue(runningJob.reduceProgress() == 0.0f);
- assertTrue(runningJob.cleanupProgress() == 0.0f);
- assertTrue(runningJob.setupProgress() == 0.0f);
+ assertThat(runningJob.mapProgress()).isEqualTo(0.0f);
+ assertThat(runningJob.reduceProgress()).isEqualTo(0.0f);
+ assertThat(runningJob.cleanupProgress()).isEqualTo(0.0f);
+ assertThat(runningJob.setupProgress()).isEqualTo(0.0f);
TaskCompletionEvent[] tce = runningJob.getTaskCompletionEvents(0);
assertEquals(tce.length, 0);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java
index ec538926432..92b15131bb2 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileAsBinaryOutputFormat.java
@@ -34,8 +34,8 @@ import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class TestSequenceFileAsBinaryOutputFormat {
@@ -126,10 +126,10 @@ public class TestSequenceFileAsBinaryOutputFormat {
"Keys don't match: " + "*" + iwritable.get() + ":" +
sourceInt + "*",
sourceInt, iwritable.get());
- assertTrue(
+ assertThat(dwritable.get()).withFailMessage(
"Vals don't match: " + "*" + dwritable.get() + ":" +
- sourceDouble + "*",
- Double.compare(dwritable.get(), sourceDouble) == 0 );
+ sourceDouble + "*")
+ .isEqualTo(sourceDouble);
++count;
}
} finally {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java
index 4a98566cb43..b3a0b8b6cd7 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java
@@ -30,6 +30,7 @@ import org.junit.Test;
import java.io.IOException;
import java.util.Random;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
public class TestSequenceFileInputFilter {
@@ -147,7 +148,7 @@ public class TestSequenceFileInputFilter {
int expectedCount = length/1000;
if (expectedCount*1000!=length)
expectedCount++;
- assertEquals(count, expectedCount);
+ assertThat(count).isEqualTo(expectedCount);
}
// clean up
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java
index 8a83e8153e3..d9b94e43268 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestStatisticsCollector.java
@@ -23,6 +23,7 @@ import org.apache.hadoop.mapred.StatisticsCollector.TimeWindow;
import org.apache.hadoop.mapred.StatisticsCollector.Stat;
import org.junit.Test;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
@@ -88,14 +89,14 @@ public class TestStatisticsCollector {
// test Stat class
Map updaters= collector.getUpdaters();
- assertEquals(updaters.size(),2);
+ assertThat(updaters.size()).isEqualTo(2);
Map ststistics=collector.getStatistics();
assertNotNull(ststistics.get("m1"));
Stat newStat= collector.createStat("m2");
- assertEquals(newStat.name, "m2");
+ assertThat(newStat.name).isEqualTo("m2");
Stat st=collector.removeStat("m1");
- assertEquals(st.name, "m1");
+ assertThat(st.name).isEqualTo("m1");
assertEquals((10+10+10+12+13+14), stat.getValues().get(window).getValue());
assertEquals(95, stat.getValues().get(sincStart).getValue());
st=collector.removeStat("m1");
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
index 332a0e1a2a7..0c42b3daab4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.mapred;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
@@ -617,9 +618,9 @@ public class TestYARNRunner {
ApplicationSubmissionContext appSubCtx =
buildSubmitContext(yarnRunner, jobConf);
- assertEquals(appSubCtx.getNodeLabelExpression(), "GPU");
- assertEquals(appSubCtx.getAMContainerResourceRequests().get(0)
- .getNodeLabelExpression(), "highMem");
+ assertThat(appSubCtx.getNodeLabelExpression()).isEqualTo("GPU");
+ assertThat(appSubCtx.getAMContainerResourceRequests().get(0)
+ .getNodeLabelExpression()).isEqualTo("highMem");
}
@Test
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChain.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChain.java
index dc47c2e9348..ddefcd09d80 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChain.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/TestChain.java
@@ -17,12 +17,12 @@
*/
package org.apache.hadoop.mapred.lib;
-import org.junit.Assert;
-
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reducer;
import org.junit.Test;
+import static org.assertj.core.api.Assertions.assertThat;
+
public class TestChain {
@Test
public void testSetReducerWithReducerByValueAsTrue() throws Exception {
@@ -33,8 +33,9 @@ public class TestChain {
Object.class, Object.class, true, reducerConf);
boolean reduceByValue = reducerConf.getBoolean("chain.reducer.byValue",
false);
- Assert.assertEquals("It should set chain.reducer.byValue as true "
- + "in reducerConf when we give value as true", true, reduceByValue);
+ assertThat(reduceByValue).withFailMessage(
+ "It should set chain.reducer.byValue as true in "
+ + "reducerConf when we give value as true").isTrue();
}
@Test
@@ -46,8 +47,9 @@ public class TestChain {
Object.class, Object.class, false, reducerConf);
boolean reduceByValue = reducerConf.getBoolean("chain.reducer.byValue",
true);
- Assert.assertEquals("It should set chain.reducer.byValue as false "
- + "in reducerConf when we give value as false", false, reduceByValue);
+ assertThat(reduceByValue).withFailMessage(
+ "It should set chain.reducer.byValue as false "
+ + "in reducerConf when we give value as false").isFalse();
}
interface MyReducer extends Reducer