MAPREDUCE-7197. Fix order of actual and expected expression in assert statements. Contributed by Adam Antal

This commit is contained in:
Szilard Nemeth 2019-08-12 13:54:13 +02:00
parent dfe772d234
commit ac6c4f0b29
83 changed files with 755 additions and 545 deletions

View File

@ -113,6 +113,11 @@
<artifactId>system-rules</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@ -68,6 +68,7 @@ import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
@ -219,7 +220,7 @@ public class TestTaskAttemptListenerImpl {
JVMId.forName("jvm_001_002_m_004_006");
fail();
} catch (IllegalArgumentException e) {
assertEquals(e.getMessage(),
assertThat(e.getMessage()).isEqualTo(
"TaskId string : jvm_001_002_m_004_006 is not properly formed");
}

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce.jobhistory;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@ -59,16 +60,16 @@ public class TestEvents {
TaskAttemptFinishedEvent test = new TaskAttemptFinishedEvent(taskAttemptId,
TaskType.REDUCE, "TEST", 123L, "RAKNAME", "HOSTNAME", "STATUS",
counters, 234);
assertEquals(test.getAttemptId().toString(), taskAttemptId.toString());
assertEquals(test.getCounters(), counters);
assertEquals(test.getFinishTime(), 123L);
assertEquals(test.getHostname(), "HOSTNAME");
assertEquals(test.getRackName(), "RAKNAME");
assertEquals(test.getState(), "STATUS");
assertEquals(test.getTaskId(), tid);
assertEquals(test.getTaskStatus(), "TEST");
assertEquals(test.getTaskType(), TaskType.REDUCE);
assertThat(test.getAttemptId().toString())
.isEqualTo(taskAttemptId.toString());
assertThat(test.getCounters()).isEqualTo(counters);
assertThat(test.getFinishTime()).isEqualTo(123L);
assertThat(test.getHostname()).isEqualTo("HOSTNAME");
assertThat(test.getRackName()).isEqualTo("RAKNAME");
assertThat(test.getState()).isEqualTo("STATUS");
assertThat(test.getTaskId()).isEqualTo(tid);
assertThat(test.getTaskStatus()).isEqualTo("TEST");
assertThat(test.getTaskType()).isEqualTo(TaskType.REDUCE);
assertEquals(234, test.getStartTime());
}
@ -83,8 +84,8 @@ public class TestEvents {
org.apache.hadoop.mapreduce.JobID jid = new JobID("001", 1);
JobPriorityChangeEvent test = new JobPriorityChangeEvent(jid,
JobPriority.LOW);
assertEquals(test.getJobId().toString(), jid.toString());
assertEquals(test.getPriority(), JobPriority.LOW);
assertThat(test.getJobId().toString()).isEqualTo(jid.toString());
assertThat(test.getPriority()).isEqualTo(JobPriority.LOW);
}
@ -93,8 +94,8 @@ public class TestEvents {
org.apache.hadoop.mapreduce.JobID jid = new JobID("001", 1);
JobQueueChangeEvent test = new JobQueueChangeEvent(jid,
"newqueue");
assertEquals(test.getJobId().toString(), jid.toString());
assertEquals(test.getJobQueueName(), "newqueue");
assertThat(test.getJobId().toString()).isEqualTo(jid.toString());
assertThat(test.getJobQueueName()).isEqualTo("newqueue");
}
/**
@ -107,8 +108,8 @@ public class TestEvents {
JobID jid = new JobID("001", 1);
TaskID tid = new TaskID(jid, TaskType.REDUCE, 2);
TaskUpdatedEvent test = new TaskUpdatedEvent(tid, 1234L);
assertEquals(test.getTaskId().toString(), tid.toString());
assertEquals(test.getFinishTime(), 1234L);
assertThat(test.getTaskId().toString()).isEqualTo(tid.toString());
assertThat(test.getFinishTime()).isEqualTo(1234L);
}

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce.jobhistory;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
@ -412,8 +413,9 @@ public class TestJobHistoryEventHandler {
JobStateInternal.FAILED.toString())));
// verify the value of the sensitive property in job.xml is restored.
Assert.assertEquals(sensitivePropertyName + " is modified.",
conf.get(sensitivePropertyName), sensitivePropertyValue);
assertThat(conf.get(sensitivePropertyName))
.isEqualTo(sensitivePropertyValue)
.withFailMessage(sensitivePropertyName + " is modified.");
// load the job_conf.xml in JHS directory and verify property redaction.
Path jhsJobConfFile = getJobConfInIntermediateDoneDir(conf, params.jobId);
@ -543,19 +545,19 @@ public class TestJobHistoryEventHandler {
JobHistoryEventHandler.MetaInfo mi =
JobHistoryEventHandler.fileMap.get(t.jobId);
Assert.assertEquals(mi.getJobIndexInfo().getSubmitTime(), 100);
Assert.assertEquals(mi.getJobIndexInfo().getJobStartTime(), 200);
Assert.assertEquals(mi.getJobSummary().getJobSubmitTime(), 100);
Assert.assertEquals(mi.getJobSummary().getJobLaunchTime(), 200);
assertThat(mi.getJobIndexInfo().getSubmitTime()).isEqualTo(100);
assertThat(mi.getJobIndexInfo().getJobStartTime()).isEqualTo(200);
assertThat(mi.getJobSummary().getJobSubmitTime()).isEqualTo(100);
assertThat(mi.getJobSummary().getJobLaunchTime()).isEqualTo(200);
handleEvent(jheh, new JobHistoryEvent(t.jobId,
new JobUnsuccessfulCompletionEvent(TypeConverter.fromYarn(t.jobId), 0,
0, 0, 0, 0, 0, 0, JobStateInternal.FAILED.toString())));
Assert.assertEquals(mi.getJobIndexInfo().getSubmitTime(), 100);
Assert.assertEquals(mi.getJobIndexInfo().getJobStartTime(), 200);
Assert.assertEquals(mi.getJobSummary().getJobSubmitTime(), 100);
Assert.assertEquals(mi.getJobSummary().getJobLaunchTime(), 200);
assertThat(mi.getJobIndexInfo().getSubmitTime()).isEqualTo(100);
assertThat(mi.getJobIndexInfo().getJobStartTime()).isEqualTo(200);
assertThat(mi.getJobSummary().getJobSubmitTime()).isEqualTo(100);
assertThat(mi.getJobSummary().getJobLaunchTime()).isEqualTo(200);
verify(jheh, times(1)).processDoneFiles(t.jobId);
mockWriter = jheh.getEventWriter();

View File

@ -26,6 +26,7 @@ import org.apache.hadoop.yarn.util.Records;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
@ -69,8 +70,8 @@ public class TestTaskAttemptReport {
// Create basic class
TaskAttemptReport report = Records.newRecord(TaskAttemptReport.class);
// Verify properties initialized to null
assertEquals(null, report.getCounters());
assertEquals(null, report.getRawCounters());
assertThat(report.getCounters()).isNull();
assertThat(report.getRawCounters()).isNull();
}
@Test
@ -80,8 +81,8 @@ public class TestTaskAttemptReport {
// Set raw counters to null
report.setRawCounters(null);
// Verify properties still null
assertEquals(null, report.getCounters());
assertEquals(null, report.getRawCounters());
assertThat(report.getCounters()).isNull();
assertThat(report.getRawCounters()).isNull();
}
@ -92,8 +93,8 @@ public class TestTaskAttemptReport {
// Set raw counters to null
report.setCounters(null);
// Verify properties still null
assertEquals(null, report.getCounters());
assertEquals(null, report.getRawCounters());
assertThat(report.getCounters()).isNull();
assertThat(report.getRawCounters()).isNull();
}
@Test
@ -108,8 +109,8 @@ public class TestTaskAttemptReport {
assertNotEquals(null, counters);
// Clear counters to null and then verify
report.setCounters(null);
assertEquals(null, report.getCounters());
assertEquals(null, report.getRawCounters());
assertThat(report.getCounters()).isNull();
assertThat(report.getRawCounters()).isNull();
}
@Test
@ -124,8 +125,8 @@ public class TestTaskAttemptReport {
assertNotEquals(null, counters);
// Clear counters to null and then verify
report.setRawCounters(null);
assertEquals(null, report.getCounters());
assertEquals(null, report.getRawCounters());
assertThat(report.getCounters()).isNull();
assertThat(report.getRawCounters()).isNull();
}
}

View File

@ -26,6 +26,7 @@ import org.apache.hadoop.yarn.util.Records;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
@ -68,7 +69,7 @@ public class TestTaskReport {
report.setCounters(altCounters);
// Verify real counters has priority over raw
Counters counters = report.getCounters();
assertNotEquals(null, counters);
assertThat(counters).isNotNull();
assertNotEquals(rCounters, altCounters);
assertEquals(counters, altCounters);
}
@ -78,8 +79,8 @@ public class TestTaskReport {
// Create basic class
TaskReport report = Records.newRecord(TaskReport.class);
// Verify properties initialized to null
assertEquals(null, report.getCounters());
assertEquals(null, report.getRawCounters());
assertThat(report.getCounters()).isNull();
assertThat(report.getRawCounters()).isNull();
}
@Test
@ -89,8 +90,8 @@ public class TestTaskReport {
// Set raw counters to null
report.setRawCounters(null);
// Verify properties still null
assertEquals(null, report.getCounters());
assertEquals(null, report.getRawCounters());
assertThat(report.getCounters()).isNull();
assertThat(report.getRawCounters()).isNull();
}
@ -101,8 +102,8 @@ public class TestTaskReport {
// Set raw counters to null
report.setCounters(null);
// Verify properties still null
assertEquals(null, report.getCounters());
assertEquals(null, report.getRawCounters());
assertThat(report.getCounters()).isNull();
assertThat(report.getRawCounters()).isNull();
}
@Test
@ -117,8 +118,8 @@ public class TestTaskReport {
assertNotEquals(null, counters);
// Clear counters to null and then verify
report.setCounters(null);
assertEquals(null, report.getCounters());
assertEquals(null, report.getRawCounters());
assertThat(report.getCounters()).isNull();
assertThat(report.getRawCounters()).isNull();
}
@Test
@ -133,7 +134,7 @@ public class TestTaskReport {
assertNotEquals(null, counters);
// Clear counters to null and then verify
report.setRawCounters(null);
assertEquals(null, report.getCounters());
assertEquals(null, report.getRawCounters());
assertThat(report.getCounters()).isNull();
assertThat(report.getRawCounters()).isNull();
}
}

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce.v2.app;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
@ -356,8 +357,8 @@ public class TestFetchFailure {
Assert.assertEquals("Map TaskAttempt state not correct",
TaskAttemptState.FAILED, mapAttempt1.getState());
Assert.assertEquals(mapAttempt1.getDiagnostics().get(0),
"Too many fetch failures. Failing the attempt. "
assertThat(mapAttempt1.getDiagnostics().get(0))
.isEqualTo("Too many fetch failures. Failing the attempt. "
+ "Last failure reported by "
+ reduceAttempt3.getID().toString() + " from host host3");

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.mapreduce.v2.app;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@ -550,8 +551,8 @@ public class TestMRAppMaster {
.handleEvent(captor.capture());
HistoryEvent event = captor.getValue().getHistoryEvent();
assertTrue(event instanceof JobUnsuccessfulCompletionEvent);
assertEquals(((JobUnsuccessfulCompletionEvent) event).getStatus()
, expectedJobState);
assertThat(((JobUnsuccessfulCompletionEvent) event).getStatus())
.isEqualTo(expectedJobState);
}
}
class MRAppMasterTest extends MRAppMaster {

View File

@ -285,7 +285,7 @@ public class TestMRClientService {
Assert.assertEquals(1, amInfo.getContainerId().getApplicationAttemptId()
.getAttemptId());
Assert.assertTrue(amInfo.getStartTime() > 0);
Assert.assertEquals(false, jr.isUber());
Assert.assertFalse(jr.isUber());
}
private void verifyTaskAttemptReport(TaskAttemptReport tar) {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce.v2.app;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@ -2075,7 +2076,7 @@ public class TestRecovery {
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = slurp(expectedFile);
Assert.assertEquals(output, expectedOutput.toString());
assertThat(output).isEqualTo(expectedOutput.toString());
}
public static String slurp(File f) throws IOException {

View File

@ -82,6 +82,9 @@ import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.offset;
@SuppressWarnings({"unchecked", "rawtypes"})
public class TestRuntimeEstimators {
@ -151,10 +154,10 @@ public class TestRuntimeEstimators {
500L, speculator.getSoonestRetryAfterNoSpeculate());
Assert.assertEquals("wrong SPECULATIVE_RETRY_AFTER_SPECULATE value",
5000L, speculator.getSoonestRetryAfterSpeculate());
Assert.assertEquals(speculator.getProportionRunningTasksSpeculatable(),
0.1, 0.00001);
Assert.assertEquals(speculator.getProportionTotalTasksSpeculatable(),
0.001, 0.00001);
assertThat(speculator.getProportionRunningTasksSpeculatable())
.isCloseTo(0.1, offset(0.00001));
assertThat(speculator.getProportionTotalTasksSpeculatable())
.isCloseTo(0.001, offset(0.00001));
Assert.assertEquals("wrong SPECULATIVE_MINIMUM_ALLOWED_TASKS value",
5, speculator.getMinimumAllowedSpeculativeTasks());

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce.v2.app;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
@ -111,10 +112,10 @@ import org.junit.Test;
appMaster.shutDownJob();
((RunningAppContext) appMaster.getContext()).resetIsLastAMRetry();
if (shouldHaveDeleted) {
Assert.assertEquals(new Boolean(true), appMaster.isLastAMRetry());
assertTrue(appMaster.isLastAMRetry());
verify(fs).delete(stagingJobPath, true);
} else {
Assert.assertEquals(new Boolean(false), appMaster.isLastAMRetry());
assertFalse(appMaster.isLastAMRetry());
verify(fs, never()).delete(stagingJobPath, true);
}
}
@ -141,7 +142,7 @@ import org.junit.Test;
appMaster.start();
appMaster.shutDownJob();
//test whether notifyIsLastAMRetry called
Assert.assertEquals(true, ((TestMRApp)appMaster).getTestIsLastAMRetry());
assertTrue(((TestMRApp)appMaster).getTestIsLastAMRetry());
verify(fs).delete(stagingJobPath, true);
}
@ -165,7 +166,7 @@ import org.junit.Test;
//shutdown the job, not the lastRetry
appMaster.shutDownJob();
//test whether notifyIsLastAMRetry called
Assert.assertEquals(false, ((TestMRApp)appMaster).getTestIsLastAMRetry());
assertFalse(((TestMRApp)appMaster).getTestIsLastAMRetry());
verify(fs, times(0)).delete(stagingJobPath, true);
}
@ -192,7 +193,7 @@ import org.junit.Test;
//shutdown the job, is lastRetry
appMaster.shutDownJob();
//test whether notifyIsLastAMRetry called
Assert.assertEquals(true, ((TestMRApp)appMaster).getTestIsLastAMRetry());
assertTrue(((TestMRApp)appMaster).getTestIsLastAMRetry());
verify(fs).delete(stagingJobPath, true);
}
@ -276,7 +277,7 @@ import org.junit.Test;
appMaster.start();
appMaster.shutDownJob();
//test whether notifyIsLastAMRetry called
Assert.assertEquals(true, ((TestMRApp) appMaster).getTestIsLastAMRetry());
assertTrue(((TestMRApp) appMaster).getTestIsLastAMRetry());
verify(fs, times(0)).delete(stagingJobPath, true);
}
@ -304,7 +305,7 @@ import org.junit.Test;
appMaster.start();
appMaster.shutDownJob();
//test whether notifyIsLastAMRetry called
Assert.assertEquals(true, ((TestMRApp) appMaster).getTestIsLastAMRetry());
assertTrue(((TestMRApp) appMaster).getTestIsLastAMRetry());
verify(fs, times(0)).delete(stagingJobPath, true);
}
@ -330,7 +331,7 @@ import org.junit.Test;
appMaster.start();
appMaster.shutDownJob();
//test whether notifyIsLastAMRetry called
Assert.assertEquals(true, ((TestMRApp) appMaster).getTestIsLastAMRetry());
assertTrue(((TestMRApp) appMaster).getTestIsLastAMRetry());
//Staging dir should be deleted because it is not matched with
//PRESERVE_FILES_PATTERN
verify(fs, times(1)).delete(stagingJobPath, true);
@ -361,7 +362,7 @@ import org.junit.Test;
appMaster.start();
appMaster.shutDownJob();
//test whether notifyIsLastAMRetry called
Assert.assertEquals(true, ((TestMRApp) appMaster).getTestIsLastAMRetry());
assertTrue(((TestMRApp) appMaster).getTestIsLastAMRetry());
verify(fs, times(0)).delete(stagingJobPath, true);
}

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.mapreduce.v2.app.job.impl;
import static org.apache.hadoop.test.GenericTestUtils.waitFor;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@ -377,10 +378,10 @@ public class TestTaskAttempt{
.getEventHandler()
.handle(new TaskAttemptEvent(rta.getID(), TaskAttemptEventType.TA_DONE));
app.waitForState(job, JobState.SUCCEEDED);
Assert.assertEquals(mta.getFinishTime(), 11);
Assert.assertEquals(mta.getLaunchTime(), 10);
Assert.assertEquals(rta.getFinishTime(), 11);
Assert.assertEquals(rta.getLaunchTime(), 10);
assertThat(mta.getFinishTime()).isEqualTo(11);
assertThat(mta.getLaunchTime()).isEqualTo(10);
assertThat(rta.getFinishTime()).isEqualTo(11);
assertThat(rta.getLaunchTime()).isEqualTo(10);
Counters counters = job.getAllCounters();
int memoryMb = (int) containerResource.getMemorySize();
@ -683,8 +684,9 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId,
container, mock(Map.class)));
taImpl.handle(new TaskAttemptContainerLaunchedEvent(attemptId, 0));
assertEquals("Task attempt is not in running state", taImpl.getState(),
TaskAttemptState.RUNNING);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in RUNNING state")
.isEqualTo(TaskAttemptState.RUNNING);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_CONTAINER_CLEANED));
assertFalse("InternalError occurred trying to handle TA_CONTAINER_CLEANED",
@ -744,8 +746,9 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_COMMIT_PENDING));
assertEquals("Task attempt is not in commit pending state", taImpl.getState(),
TaskAttemptState.COMMIT_PENDING);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in COMMIT_PENDING state")
.isEqualTo(TaskAttemptState.COMMIT_PENDING);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_CONTAINER_CLEANED));
assertFalse("InternalError occurred trying to handle TA_CONTAINER_CLEANED",
@ -810,16 +813,19 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_CONTAINER_COMPLETED));
assertEquals("Task attempt is not in succeeded state", taImpl.getState(),
TaskAttemptState.SUCCEEDED);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in SUCCEEDED state")
.isEqualTo(TaskAttemptState.SUCCEEDED);
taImpl.handle(new TaskAttemptTooManyFetchFailureEvent(attemptId,
reduceTAId, "Host"));
assertEquals("Task attempt is not in FAILED state", taImpl.getState(),
TaskAttemptState.FAILED);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in FAILED state")
.isEqualTo(TaskAttemptState.FAILED);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_TOO_MANY_FETCH_FAILURE));
assertEquals("Task attempt is not in FAILED state, still", taImpl.getState(),
TaskAttemptState.FAILED);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in FAILED state, still")
.isEqualTo(TaskAttemptState.FAILED);
assertFalse("InternalError occurred trying to handle TA_CONTAINER_CLEANED",
eventHandler.internalError);
}
@ -937,16 +943,19 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_CONTAINER_COMPLETED));
assertEquals("Task attempt is not in succeeded state", taImpl.getState(),
TaskAttemptState.SUCCEEDED);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in SUCCEEDED state")
.isEqualTo(TaskAttemptState.SUCCEEDED);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_KILL));
assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
TaskAttemptState.KILLED);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in KILLED state")
.isEqualTo(TaskAttemptState.KILLED);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_TOO_MANY_FETCH_FAILURE));
assertEquals("Task attempt is not in KILLED state, still", taImpl.getState(),
TaskAttemptState.KILLED);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in KILLED state, still")
.isEqualTo(TaskAttemptState.KILLED);
assertFalse("InternalError occurred trying to handle TA_CONTAINER_CLEANED",
eventHandler.internalError);
}
@ -1053,8 +1062,9 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_CONTAINER_COMPLETED));
assertEquals("Task attempt is not in succeeded state", taImpl.getState(),
TaskAttemptState.SUCCEEDED);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in SUCCEEDED state")
.isEqualTo(TaskAttemptState.SUCCEEDED);
assertTrue("Task Attempt finish time is not greater than 0",
taImpl.getFinishTime() > 0);
@ -1064,8 +1074,9 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptTooManyFetchFailureEvent(attemptId,
reduceTAId, "Host"));
assertEquals("Task attempt is not in Too Many Fetch Failure state",
taImpl.getState(), TaskAttemptState.FAILED);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in FAILED state")
.isEqualTo(TaskAttemptState.FAILED);
assertEquals("After TA_TOO_MANY_FETCH_FAILURE,"
+ " Task attempt finish time is not the same ",
@ -1090,10 +1101,13 @@ public class TestTaskAttempt{
TaskAttemptEventType.TA_SCHEDULE));
}
taImpl.handle(new TaskAttemptKillEvent(taImpl.getID(),"", true));
assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
TaskAttemptState.KILLED);
assertEquals("Task attempt's internal state is not KILLED",
taImpl.getInternalState(), TaskAttemptStateInternal.KILLED);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in KILLED state")
.isEqualTo(TaskAttemptState.KILLED);
assertThat(taImpl.getInternalState())
.withFailMessage("Task attempt's internal state is not KILLED")
.isEqualTo(TaskAttemptStateInternal.KILLED);
assertFalse("InternalError occurred", eventHandler.internalError);
TaskEvent event = eventHandler.lastTaskEvent;
assertEquals(TaskEventType.T_ATTEMPT_KILLED, event.getType());
@ -1156,13 +1170,15 @@ public class TestTaskAttempt{
TaskAttemptEventType.TA_SCHEDULE));
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId, container,
mock(Map.class)));
assertEquals("Task attempt is not in assinged state",
taImpl.getInternalState(), TaskAttemptStateInternal.ASSIGNED);
assertThat(taImpl.getInternalState())
.withFailMessage("Task attempt is not in ASSIGNED state")
.isEqualTo(TaskAttemptStateInternal.ASSIGNED);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_KILL));
assertEquals("Task should be in KILL_CONTAINER_CLEANUP state",
TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP,
taImpl.getInternalState());
assertThat(taImpl.getInternalState())
.withFailMessage("Task should be in KILL_CONTAINER_CLEANUP state")
.isEqualTo(TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP);
}
@Test
@ -1211,15 +1227,16 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId, container,
mock(Map.class)));
taImpl.handle(new TaskAttemptContainerLaunchedEvent(attemptId, 0));
assertEquals("Task attempt is not in running state", taImpl.getState(),
TaskAttemptState.RUNNING);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in RUNNING state")
.isEqualTo(TaskAttemptState.RUNNING);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_KILL));
assertFalse("InternalError occurred trying to handle TA_KILL",
eventHandler.internalError);
assertEquals("Task should be in KILL_CONTAINER_CLEANUP state",
TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP,
taImpl.getInternalState());
assertThat(taImpl.getInternalState())
.withFailMessage("Task should be in KILL_CONTAINER_CLEANUP state")
.isEqualTo(TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP);
}
@Test
@ -1268,19 +1285,21 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptContainerAssignedEvent(attemptId, container,
mock(Map.class)));
taImpl.handle(new TaskAttemptContainerLaunchedEvent(attemptId, 0));
assertEquals("Task attempt is not in running state", taImpl.getState(),
TaskAttemptState.RUNNING);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in RUNNING state")
.isEqualTo(TaskAttemptState.RUNNING);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_COMMIT_PENDING));
assertEquals("Task should be in COMMIT_PENDING state",
TaskAttemptStateInternal.COMMIT_PENDING, taImpl.getInternalState());
assertThat(taImpl.getInternalState())
.withFailMessage("Task should be in COMMIT_PENDING state")
.isEqualTo(TaskAttemptStateInternal.COMMIT_PENDING);
taImpl.handle(new TaskAttemptEvent(attemptId,
TaskAttemptEventType.TA_KILL));
assertFalse("InternalError occurred trying to handle TA_KILL",
eventHandler.internalError);
assertEquals("Task should be in KILL_CONTAINER_CLEANUP state",
TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP,
taImpl.getInternalState());
assertThat(taImpl.getInternalState())
.withFailMessage("Task should be in KILL_CONTAINER_CLEANUP state")
.isEqualTo(TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP);
}
@Test
@ -1291,33 +1310,37 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_DONE));
assertEquals("Task attempt is not in SUCCEEDED state", taImpl.getState(),
TaskAttemptState.SUCCEEDED);
assertEquals("Task attempt's internal state is not " +
"SUCCESS_FINISHING_CONTAINER", taImpl.getInternalState(),
TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
// this is where we are
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in SUCCEEDED state")
.isEqualTo(TaskAttemptState.SUCCEEDED);
assertThat(taImpl.getInternalState()).withFailMessage(
"Task attempt's internal state is not SUCCESS_FINISHING_CONTAINER")
.isEqualTo(TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
// If the map task is killed when it is in SUCCESS_FINISHING_CONTAINER
// state, the state will move to KILL_CONTAINER_CLEANUP
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_KILL));
assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
TaskAttemptState.KILLED);
assertEquals("Task attempt's internal state is not KILL_CONTAINER_CLEANUP",
taImpl.getInternalState(),
TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in KILLED state")
.isEqualTo(TaskAttemptState.KILLED);
assertThat(taImpl.getInternalState()).withFailMessage(
"Task attempt's internal state is not KILL_CONTAINER_CLEANUP")
.isEqualTo(TaskAttemptStateInternal.KILL_CONTAINER_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CONTAINER_CLEANED));
assertEquals("Task attempt's internal state is not KILL_TASK_CLEANUP",
taImpl.getInternalState(),
TaskAttemptStateInternal.KILL_TASK_CLEANUP);
assertThat(taImpl.getInternalState()).withFailMessage(
"Task attempt's internal state is not KILL_TASK_CLEANUP")
.isEqualTo(TaskAttemptStateInternal.KILL_TASK_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CLEANUP_DONE));
assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
TaskAttemptState.KILLED);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in KILLED state")
.isEqualTo(TaskAttemptState.KILLED);
assertFalse("InternalError occurred", eventHandler.internalError);
}
@ -1366,21 +1389,25 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_DONE));
assertEquals("Task attempt is not in SUCCEEDED state", taImpl.getState(),
TaskAttemptState.SUCCEEDED);
assertEquals("Task attempt's internal state is not " +
"SUCCESS_FINISHING_CONTAINER", taImpl.getInternalState(),
TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in SUCCEEDED state")
.isEqualTo(TaskAttemptState.SUCCEEDED);
assertThat(taImpl.getInternalState())
.withFailMessage("Task attempt's internal state is not " +
"SUCCESS_FINISHING_CONTAINER")
.isEqualTo(TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CONTAINER_CLEANED));
// Send a map task attempt kill event indicating next map attempt has to be
// reschedule
taImpl.handle(new TaskAttemptKillEvent(taImpl.getID(), "", true));
assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
TaskAttemptState.KILLED);
assertEquals("Task attempt's internal state is not KILLED",
taImpl.getInternalState(), TaskAttemptStateInternal.KILLED);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in KILLED state")
.isEqualTo(TaskAttemptState.KILLED);
assertThat(taImpl.getInternalState())
.withFailMessage("Task attempt's internal state is not KILLED")
.isEqualTo(TaskAttemptStateInternal.KILLED);
assertFalse("InternalError occurred", eventHandler.internalError);
TaskEvent event = eventHandler.lastTaskEvent;
assertEquals(TaskEventType.T_ATTEMPT_KILLED, event.getType());
@ -1424,39 +1451,46 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptFailEvent(taImpl.getID()));
assertEquals("Task attempt is not in FAILED state", taImpl.getState(),
TaskAttemptState.FAILED);
assertEquals("Task attempt's internal state is not " +
"FAIL_FINISHING_CONTAINER", taImpl.getInternalState(),
TaskAttemptStateInternal.FAIL_FINISHING_CONTAINER);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in FAILED state")
.isEqualTo(TaskAttemptState.FAILED);
assertThat(taImpl.getInternalState())
.withFailMessage("Task attempt's internal state is not " +
"FAIL_FINISHING_CONTAINER")
.isEqualTo(TaskAttemptStateInternal.FAIL_FINISHING_CONTAINER);
// If the map task is killed when it is in FAIL_FINISHING_CONTAINER state,
// the state will stay in FAIL_FINISHING_CONTAINER.
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_KILL));
assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
TaskAttemptState.FAILED);
assertEquals("Task attempt's internal state is not " +
"FAIL_FINISHING_CONTAINER", taImpl.getInternalState(),
TaskAttemptStateInternal.FAIL_FINISHING_CONTAINER);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in FAILED state")
.isEqualTo(TaskAttemptState.FAILED);
assertThat(taImpl.getInternalState())
.withFailMessage("Task attempt's internal state is not " +
"FAIL_FINISHING_CONTAINER")
.isEqualTo(TaskAttemptStateInternal.FAIL_FINISHING_CONTAINER);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_TIMED_OUT));
assertEquals("Task attempt's internal state is not FAIL_CONTAINER_CLEANUP",
taImpl.getInternalState(),
TaskAttemptStateInternal.FAIL_CONTAINER_CLEANUP);
assertThat(taImpl.getInternalState())
.withFailMessage("Task attempt's internal state is not " +
"FAIL_CONTAINER_CLEANUP")
.isEqualTo(TaskAttemptStateInternal.FAIL_CONTAINER_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CONTAINER_CLEANED));
assertEquals("Task attempt's internal state is not FAIL_TASK_CLEANUP",
taImpl.getInternalState(),
TaskAttemptStateInternal.FAIL_TASK_CLEANUP);
assertThat(taImpl.getInternalState())
.withFailMessage("Task attempt's internal state is not " +
"FAIL_TASK_CLEANUP")
.isEqualTo(TaskAttemptStateInternal.FAIL_TASK_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CLEANUP_DONE));
assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
TaskAttemptState.FAILED);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in FAILED state")
.isEqualTo(TaskAttemptState.FAILED);
assertFalse("InternalError occurred", eventHandler.internalError);
}
@ -1469,23 +1503,27 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_FAILMSG_BY_CLIENT));
assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
TaskAttemptState.FAILED);
assertEquals("Task attempt's internal state is not " +
"FAIL_CONTAINER_CLEANUP", taImpl.getInternalState(),
TaskAttemptStateInternal.FAIL_CONTAINER_CLEANUP);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in FAILED state")
.isEqualTo(TaskAttemptState.FAILED);
assertThat(taImpl.getInternalState())
.withFailMessage("Task attempt's internal state is not " +
"FAIL_CONTAINER_CLEANUP")
.isEqualTo(TaskAttemptStateInternal.FAIL_CONTAINER_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CONTAINER_CLEANED));
assertEquals("Task attempt's internal state is not FAIL_TASK_CLEANUP",
taImpl.getInternalState(),
TaskAttemptStateInternal.FAIL_TASK_CLEANUP);
assertThat(taImpl.getInternalState())
.withFailMessage("Task attempt's internal state is not " +
"FAIL_TASK_CLEANUP")
.isEqualTo(TaskAttemptStateInternal.FAIL_TASK_CLEANUP);
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_CLEANUP_DONE));
assertEquals("Task attempt is not in KILLED state", taImpl.getState(),
TaskAttemptState.FAILED);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in FAILED state")
.isEqualTo(TaskAttemptState.FAILED);
assertFalse("InternalError occurred", eventHandler.internalError);
}
@ -1498,20 +1536,24 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_DONE));
assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
TaskAttemptState.SUCCEEDED);
assertEquals("Task attempt's internal state is not " +
"SUCCESS_FINISHING_CONTAINER", taImpl.getInternalState(),
TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in SUCCEEDED state")
.isEqualTo(TaskAttemptState.SUCCEEDED);
assertThat(taImpl.getInternalState())
.withFailMessage("Task attempt's internal state is not " +
"SUCCESS_FINISHING_CONTAINER")
.isEqualTo(TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
// TA_DIAGNOSTICS_UPDATE doesn't change state
taImpl.handle(new TaskAttemptDiagnosticsUpdateEvent(taImpl.getID(),
"Task got updated"));
assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
TaskAttemptState.SUCCEEDED);
assertEquals("Task attempt's internal state is not " +
"SUCCESS_FINISHING_CONTAINER", taImpl.getInternalState(),
TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in SUCCEEDED state")
.isEqualTo(TaskAttemptState.SUCCEEDED);
assertThat(taImpl.getInternalState())
.withFailMessage("Task attempt's internal state is not " +
"SUCCESS_FINISHING_CONTAINER")
.isEqualTo(TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
assertFalse("InternalError occurred", eventHandler.internalError);
}
@ -1524,21 +1566,25 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_DONE));
assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
TaskAttemptState.SUCCEEDED);
assertEquals("Task attempt's internal state is not " +
"SUCCESS_FINISHING_CONTAINER", taImpl.getInternalState(),
TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in SUCCEEDED state")
.isEqualTo(TaskAttemptState.SUCCEEDED);
assertThat(taImpl.getInternalState())
.withFailMessage("Task attempt's internal state is not " +
"SUCCESS_FINISHING_CONTAINER")
.isEqualTo(TaskAttemptStateInternal.SUCCESS_FINISHING_CONTAINER);
// If the task stays in SUCCESS_FINISHING_CONTAINER for too long,
// TaskAttemptListenerImpl will time out the attempt.
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_TIMED_OUT));
assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
TaskAttemptState.SUCCEEDED);
assertEquals("Task attempt's internal state is not " +
"SUCCESS_CONTAINER_CLEANUP", taImpl.getInternalState(),
TaskAttemptStateInternal.SUCCESS_CONTAINER_CLEANUP);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in SUCCEEDED state")
.isEqualTo(TaskAttemptState.SUCCEEDED);
assertThat(taImpl.getInternalState())
.withFailMessage("Task attempt's internal state is not " +
"SUCCESS_CONTAINER_CLEANUP")
.isEqualTo(TaskAttemptStateInternal.SUCCESS_CONTAINER_CLEANUP);
assertFalse("InternalError occurred", eventHandler.internalError);
}
@ -1550,19 +1596,22 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptFailEvent(taImpl.getID()));
assertEquals("Task attempt is not in RUNNING state", taImpl.getState(),
TaskAttemptState.FAILED);
assertEquals("Task attempt's internal state is not " +
"FAIL_FINISHING_CONTAINER", taImpl.getInternalState(),
TaskAttemptStateInternal.FAIL_FINISHING_CONTAINER);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in FAILED state")
.isEqualTo(TaskAttemptState.FAILED);
assertThat(taImpl.getInternalState())
.withFailMessage("Task attempt's internal state is not " +
"FAIL_FINISHING_CONTAINER")
.isEqualTo(TaskAttemptStateInternal.FAIL_FINISHING_CONTAINER);
// If the task stays in FAIL_FINISHING_CONTAINER for too long,
// TaskAttemptListenerImpl will time out the attempt.
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_TIMED_OUT));
assertEquals("Task attempt's internal state is not FAIL_CONTAINER_CLEANUP",
taImpl.getInternalState(),
TaskAttemptStateInternal.FAIL_CONTAINER_CLEANUP);
assertThat(taImpl.getInternalState())
.withFailMessage("Task attempt's internal state is not " +
"FAIL_CONTAINER_CLEANUP")
.isEqualTo(TaskAttemptStateInternal.FAIL_CONTAINER_CLEANUP);
assertFalse("InternalError occurred", eventHandler.internalError);
}
@ -1784,8 +1833,9 @@ public class TestTaskAttempt{
taImpl.handle(new TaskAttemptEvent(taImpl.getID(),
TaskAttemptEventType.TA_SCHEDULE));
assertEquals("Task attempt is not in STARTING state", taImpl.getState(),
TaskAttemptState.STARTING);
assertThat(taImpl.getState())
.withFailMessage("Task attempt is not in STARTING state")
.isEqualTo(TaskAttemptState.STARTING);
ArgumentCaptor<Event> captor = ArgumentCaptor.forClass(Event.class);
verify(eventHandler, times(2)).handle(captor.capture());

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce.v2.app.launcher;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import java.io.IOException;
@ -124,7 +125,7 @@ public class TestContainerLauncher {
ThreadPoolExecutor threadPool = containerLauncher.getThreadPool();
// No events yet
Assert.assertEquals(containerLauncher.initialPoolSize,
assertThat(containerLauncher.initialPoolSize).isEqualTo(
MRJobConfig.DEFAULT_MR_AM_CONTAINERLAUNCHER_THREADPOOL_INITIAL_SIZE);
Assert.assertEquals(0, threadPool.getPoolSize());
Assert.assertEquals(containerLauncher.initialPoolSize,
@ -190,7 +191,7 @@ public class TestContainerLauncher {
20);
containerLauncher = new CustomContainerLauncher(context);
containerLauncher.init(conf);
Assert.assertEquals(containerLauncher.initialPoolSize, 20);
assertThat(containerLauncher.initialPoolSize).isEqualTo(20);
}
@Test(timeout = 5000)

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.mapreduce.v2.app.local;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.isA;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
@ -202,8 +203,8 @@ public class TestLocalContainerAllocator {
Container container = containerAssignedCaptor.getValue().getContainer();
Resource containerResource = container.getResource();
Assert.assertNotNull(containerResource);
Assert.assertEquals(containerResource.getMemorySize(), 0);
Assert.assertEquals(containerResource.getVirtualCores(), 0);
assertThat(containerResource.getMemorySize()).isEqualTo(0);
assertThat(containerResource.getVirtualCores()).isEqualTo(0);
}
private static ContainerAllocatorEvent createContainerRequestEvent() {

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.mapreduce.v2.app.rm;
import static org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestCreator.createRequest;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyFloat;
@ -686,7 +687,7 @@ public class TestRMContainerAllocator {
rm.drainEvents();
}
// only 1 allocated container should be assigned
Assert.assertEquals(assignedContainer, 1);
assertThat(assignedContainer).isEqualTo(1);
}
@Test

View File

@ -46,6 +46,11 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@ -90,7 +90,7 @@ public class TestLocalModeWithNewApis {
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, inDir);
FileOutputFormat.setOutputPath(job, outDir);
assertEquals(job.waitForCompletion(true), true);
assertTrue(job.waitForCompletion(true));
String output = readOutput(outDir, conf);
assertEquals("The\t1\nbrown\t1\nfox\t2\nhas\t1\nmany\t1\n" +

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.mapreduce;
import org.apache.hadoop.util.StringUtils;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ -189,8 +190,9 @@ public class TestTypeConverter {
TypeConverter.fromYarn(queueInfo, new Configuration());
//Verify that the converted queue has the 1 child we had added
Assert.assertEquals("QueueInfo children weren't properly converted",
returned.getQueueChildren().size(), 1);
assertThat(returned.getQueueChildren().size())
.withFailMessage("QueueInfo children weren't properly converted")
.isEqualTo(1);
}
@Test

View File

@ -66,6 +66,11 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@ -24,6 +24,7 @@ import java.io.IOException;
import java.net.URI;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import org.junit.Assert;
@ -181,7 +182,7 @@ public class TestFileOutputCommitter {
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
assertThat(output).isEqualTo(expectedOutput.toString());
}
private void validateMapFileOutputContent(

View File

@ -244,7 +244,7 @@ public class TestIndexCache {
}
getInfoThread.join();
removeMapThread.join();
assertEquals(true, cache.checkTotalMemoryUsed());
assertTrue(cache.checkTotalMemoryUsed());
}
}

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.mapred;
import java.util.regex.Pattern;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import org.apache.hadoop.fs.Path;
@ -99,9 +100,9 @@ public class TestJobConf {
assertEquals(70, conf.getMaxReduceTaskFailuresPercent());
// by default
assertEquals(JobPriority.DEFAULT.name(), conf.getJobPriority().name());
assertThat(conf.getJobPriority()).isEqualTo(JobPriority.DEFAULT);
conf.setJobPriority(JobPriority.HIGH);
assertEquals(JobPriority.HIGH.name(), conf.getJobPriority().name());
assertThat(conf.getJobPriority()).isEqualTo(JobPriority.HIGH);
assertNull(conf.getJobSubmitHostName());
conf.setJobSubmitHostName("hostname");
@ -152,10 +153,10 @@ public class TestJobConf {
// make sure mapreduce.map|reduce.java.opts are not set by default
// so that they won't override mapred.child.java.opts
assertEquals("mapreduce.map.java.opts should not be set by default",
null, conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS));
assertEquals("mapreduce.reduce.java.opts should not be set by default",
null, conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS));
assertNull("mapreduce.map.java.opts should not be set by default",
conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS));
assertNull("mapreduce.reduce.java.opts should not be set by default",
conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS));
}
/**
@ -225,44 +226,44 @@ public class TestJobConf {
JobConf configuration = new JobConf();
configuration.set(MRJobConfig.MAP_MEMORY_MB,String.valueOf(300));
configuration.set(MRJobConfig.REDUCE_MEMORY_MB,String.valueOf(300));
Assert.assertEquals(configuration.getMemoryForMapTask(),300);
Assert.assertEquals(configuration.getMemoryForReduceTask(),300);
assertThat(configuration.getMemoryForMapTask()).isEqualTo(300);
assertThat(configuration.getMemoryForReduceTask()).isEqualTo(300);
configuration.set("mapred.task.maxvmem" , String.valueOf(2*1024 * 1024));
configuration.set(MRJobConfig.MAP_MEMORY_MB,String.valueOf(300));
configuration.set(MRJobConfig.REDUCE_MEMORY_MB,String.valueOf(300));
Assert.assertEquals(configuration.getMemoryForMapTask(),2);
Assert.assertEquals(configuration.getMemoryForReduceTask(),2);
assertThat(configuration.getMemoryForMapTask()).isEqualTo(2);
assertThat(configuration.getMemoryForReduceTask()).isEqualTo(2);
configuration = new JobConf();
configuration.set("mapred.task.maxvmem" , "-1");
configuration.set(MRJobConfig.MAP_MEMORY_MB,String.valueOf(300));
configuration.set(MRJobConfig.REDUCE_MEMORY_MB,String.valueOf(400));
Assert.assertEquals(configuration.getMemoryForMapTask(), 300);
Assert.assertEquals(configuration.getMemoryForReduceTask(), 400);
assertThat(configuration.getMemoryForMapTask()).isEqualTo(300);
assertThat(configuration.getMemoryForReduceTask()).isEqualTo(400);
configuration = new JobConf();
configuration.set("mapred.task.maxvmem" , String.valueOf(2*1024 * 1024));
configuration.set(MRJobConfig.MAP_MEMORY_MB,"-1");
configuration.set(MRJobConfig.REDUCE_MEMORY_MB,"-1");
Assert.assertEquals(configuration.getMemoryForMapTask(),2);
Assert.assertEquals(configuration.getMemoryForReduceTask(),2);
assertThat(configuration.getMemoryForMapTask()).isEqualTo(2);
assertThat(configuration.getMemoryForReduceTask()).isEqualTo(2);
configuration = new JobConf();
configuration.set("mapred.task.maxvmem" , String.valueOf(-1));
configuration.set(MRJobConfig.MAP_MEMORY_MB,"-1");
configuration.set(MRJobConfig.REDUCE_MEMORY_MB,"-1");
Assert.assertEquals(configuration.getMemoryForMapTask(),
assertThat(configuration.getMemoryForMapTask()).isEqualTo(
MRJobConfig.DEFAULT_MAP_MEMORY_MB);
Assert.assertEquals(configuration.getMemoryForReduceTask(),
assertThat(configuration.getMemoryForReduceTask()).isEqualTo(
MRJobConfig.DEFAULT_REDUCE_MEMORY_MB);
configuration = new JobConf();
configuration.set("mapred.task.maxvmem" , String.valueOf(2*1024 * 1024));
configuration.set(MRJobConfig.MAP_MEMORY_MB, "3");
configuration.set(MRJobConfig.REDUCE_MEMORY_MB, "3");
Assert.assertEquals(configuration.getMemoryForMapTask(),2);
Assert.assertEquals(configuration.getMemoryForReduceTask(),2);
assertThat(configuration.getMemoryForMapTask()).isEqualTo(2);
assertThat(configuration.getMemoryForReduceTask()).isEqualTo(2);
}
/**
@ -305,46 +306,47 @@ public class TestJobConf {
* Test deprecated accessor and mutator method for mapred.task.maxvmem
*/
@Test
@SuppressWarnings("deprecation")
public void testMaxVirtualMemoryForTask() {
JobConf configuration = new JobConf();
//get test case
configuration.set(MRJobConfig.MAP_MEMORY_MB, String.valueOf(300));
configuration.set(MRJobConfig.REDUCE_MEMORY_MB, String.valueOf(-1));
Assert.assertEquals(
configuration.getMaxVirtualMemoryForTask(), 1024 * 1024 * 1024);
assertThat(configuration.getMaxVirtualMemoryForTask())
.isEqualTo(1024 * 1024 * 1024);
configuration = new JobConf();
configuration.set(MRJobConfig.MAP_MEMORY_MB, String.valueOf(-1));
configuration.set(MRJobConfig.REDUCE_MEMORY_MB, String.valueOf(200));
Assert.assertEquals(
configuration.getMaxVirtualMemoryForTask(), 1024 * 1024 * 1024);
assertThat(configuration.getMaxVirtualMemoryForTask())
.isEqualTo(1024 * 1024 * 1024);
configuration = new JobConf();
configuration.set(MRJobConfig.MAP_MEMORY_MB, String.valueOf(-1));
configuration.set(MRJobConfig.REDUCE_MEMORY_MB, String.valueOf(-1));
configuration.set("mapred.task.maxvmem", String.valueOf(1 * 1024 * 1024));
Assert.assertEquals(
configuration.getMaxVirtualMemoryForTask(), 1 * 1024 * 1024);
assertThat(configuration.getMaxVirtualMemoryForTask())
.isEqualTo(1 * 1024 * 1024);
configuration = new JobConf();
configuration.set("mapred.task.maxvmem", String.valueOf(1 * 1024 * 1024));
Assert.assertEquals(
configuration.getMaxVirtualMemoryForTask(), 1 * 1024 * 1024);
assertThat(configuration.getMaxVirtualMemoryForTask())
.isEqualTo(1 * 1024 * 1024);
//set test case
configuration = new JobConf();
configuration.setMaxVirtualMemoryForTask(2 * 1024 * 1024);
Assert.assertEquals(configuration.getMemoryForMapTask(), 2);
Assert.assertEquals(configuration.getMemoryForReduceTask(), 2);
assertThat(configuration.getMemoryForMapTask()).isEqualTo(2);
assertThat(configuration.getMemoryForReduceTask()).isEqualTo(2);
configuration = new JobConf();
configuration.set(MRJobConfig.MAP_MEMORY_MB, String.valueOf(300));
configuration.set(MRJobConfig.REDUCE_MEMORY_MB, String.valueOf(400));
configuration.setMaxVirtualMemoryForTask(2 * 1024 * 1024);
Assert.assertEquals(configuration.getMemoryForMapTask(), 2);
Assert.assertEquals(configuration.getMemoryForReduceTask(), 2);
assertThat(configuration.getMemoryForMapTask()).isEqualTo(2);
assertThat(configuration.getMemoryForReduceTask()).isEqualTo(2);
}
/**
@ -386,37 +388,35 @@ public class TestJobConf {
JobConf conf = new JobConf();
// by default
assertEquals(JobPriority.DEFAULT.name(), conf.getJobPriority().name());
assertThat(conf.getJobPriority()).isEqualTo(JobPriority.DEFAULT);
assertEquals(0, conf.getJobPriorityAsInteger());
// Set JobPriority.LOW using old API, and verify output from both getter
conf.setJobPriority(JobPriority.LOW);
assertEquals(JobPriority.LOW.name(), conf.getJobPriority().name());
assertThat(conf.getJobPriority()).isEqualTo(JobPriority.LOW);
assertEquals(2, conf.getJobPriorityAsInteger());
// Set JobPriority.VERY_HIGH using old API, and verify output
conf.setJobPriority(JobPriority.VERY_HIGH);
assertEquals(JobPriority.VERY_HIGH.name(), conf.getJobPriority().name());
assertThat(conf.getJobPriority()).isEqualTo(JobPriority.VERY_HIGH);
assertEquals(5, conf.getJobPriorityAsInteger());
// Set 3 as priority using new API, and verify output from both getter
conf.setJobPriorityAsInteger(3);
assertEquals(JobPriority.NORMAL.name(), conf.getJobPriority().name());
assertThat(conf.getJobPriority()).isEqualTo(JobPriority.NORMAL);
assertEquals(3, conf.getJobPriorityAsInteger());
// Set 4 as priority using new API, and verify output
conf.setJobPriorityAsInteger(4);
assertEquals(JobPriority.HIGH.name(), conf.getJobPriority().name());
assertThat(conf.getJobPriority()).isEqualTo(JobPriority.HIGH);
assertEquals(4, conf.getJobPriorityAsInteger());
// Now set some high integer values and verify output from old api
conf.setJobPriorityAsInteger(57);
assertEquals(JobPriority.UNDEFINED_PRIORITY.name(), conf.getJobPriority()
.name());
assertThat(conf.getJobPriority()).isEqualTo(JobPriority.UNDEFINED_PRIORITY);
assertEquals(57, conf.getJobPriorityAsInteger());
// Error case where UNDEFINED_PRIORITY is set explicitly
conf.setJobPriority(JobPriority.UNDEFINED_PRIORITY);
assertEquals(JobPriority.UNDEFINED_PRIORITY.name(), conf.getJobPriority()
.name());
assertThat(conf.getJobPriority()).isEqualTo(JobPriority.UNDEFINED_PRIORITY);
// As UNDEFINED_PRIORITY cannot be mapped to any integer value, resetting
// to default as 0.

View File

@ -30,9 +30,10 @@ import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.mapreduce.JobStatus.State;
import org.junit.Assert;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
/**
* test class JobInfo
@ -69,9 +70,9 @@ public class TestJobInfo {
new org.apache.hadoop.mapred.TaskReport(tid1, 0.0f,
State.FAILED.toString(), null, TIPStatus.FAILED, 100, 100,
new org.apache.hadoop.mapred.Counters());
Assert
.assertEquals(treport.getTaskId(), "task_1014873536921_0006_m_000000");
Assert.assertEquals(treport.getTaskID().toString(),
"task_1014873536921_0006_m_000000");
assertThat(treport.getTaskId()).isEqualTo(
"task_1014873536921_0006_m_000000");
assertThat(treport.getTaskID().toString()).isEqualTo(
"task_1014873536921_0006_m_000000");
}
}

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.mapred;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
@ -565,7 +566,7 @@ public class TestLineRecordReader {
reader = new LineRecordReader(conf, split, recordDelimiterBytes);
// Get first record: "abcd|efgh" always possible
assertTrue("Expected record got nothing", reader.next(key, value));
assertTrue("abcd|efgh".equals(value.toString()));
assertThat(value.toString()).isEqualTo("abcd|efgh");
assertEquals("Wrong position after record read", 9, value.getLength());
// Position should be 12 right after "|+|"
int recordPos = 12;
@ -574,7 +575,7 @@ public class TestLineRecordReader {
// get the next record: "ij|kl" if the split/buffer allows it
if (reader.next(key, value)) {
// check the record info: "ij|kl"
assertTrue("ij|kl".equals(value.toString()));
assertThat(value.toString()).isEqualTo("ij|kl");
// Position should be 20 right after "|+|"
recordPos = 20;
assertEquals("Wrong position after record read", recordPos,
@ -583,7 +584,7 @@ public class TestLineRecordReader {
// get the third record: "mno|pqr" if the split/buffer allows it
if (reader.next(key, value)) {
// check the record info: "mno|pqr"
assertTrue("mno|pqr".equals(value.toString()));
assertThat(value.toString()).isEqualTo("mno|pqr");
// Position should be 27 at the end of the string now
recordPos = inputData.length();
assertEquals("Wrong position after record read", recordPos,

View File

@ -20,7 +20,8 @@ package org.apache.hadoop.mapred;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import static org.junit.Assert.assertEquals;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
import org.junit.Test;
@ -46,6 +47,6 @@ public class TestMaster {
// Change master address to a valid value
conf.set(MRConfig.MASTER_ADDRESS, "bar.com:8042");
String masterHostname = Master.getMasterAddress(conf);
assertEquals(masterHostname, "bar.com");
assertThat(masterHostname).isEqualTo("bar.com");
}
}

View File

@ -27,6 +27,8 @@ import java.io.IOException;
import org.apache.hadoop.mapred.TaskCompletionEvent.Status;
import org.apache.hadoop.mapreduce.TaskType;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
/**
@ -44,14 +46,14 @@ public class TestOldMethodsJobID {
public void testDepricatedMethods() throws IOException {
JobID jid = new JobID();
TaskID test = new TaskID(jid, true, 1);
assertEquals(test.getTaskType(), TaskType.MAP);
assertThat(test.getTaskType()).isEqualTo(TaskType.MAP);
test = new TaskID(jid, false, 1);
assertEquals(test.getTaskType(), TaskType.REDUCE);
assertThat(test.getTaskType()).isEqualTo(TaskType.REDUCE);
test = new TaskID("001", 1, false, 1);
assertEquals(test.getTaskType(), TaskType.REDUCE);
assertThat(test.getTaskType()).isEqualTo(TaskType.REDUCE);
test = new TaskID("001", 1, true, 1);
assertEquals(test.getTaskType(), TaskType.MAP);
assertThat(test.getTaskType()).isEqualTo(TaskType.MAP);
ByteArrayOutputStream out = new ByteArrayOutputStream();
test.write(new DataOutputStream(out));
@ -100,9 +102,9 @@ public class TestOldMethodsJobID {
assertEquals(Status.OBSOLETE.toString(), testEl.getStatus().toString());
testEl.setTaskRunTime(20);
assertEquals(testEl.getTaskRunTime(), 20);
assertThat(testEl.getTaskRunTime()).isEqualTo(20);
testEl.setEventId(16);
assertEquals(testEl.getEventId(), 16);
assertThat(testEl.getEventId()).isEqualTo(16);
}

View File

@ -35,6 +35,7 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
@ -71,7 +72,7 @@ public class TestQueue {
manager.setSchedulerInfo("first", "queueInfo");
manager.setSchedulerInfo("second", "queueInfoqueueInfo");
Queue root = manager.getRoot();
assertTrue(root.getChildren().size() == 2);
assertThat(root.getChildren().size()).isEqualTo(2);
Iterator<Queue> iterator = root.getChildren().iterator();
Queue firstSubQueue = iterator.next();
assertEquals("first", firstSubQueue.getName());
@ -81,11 +82,15 @@ public class TestQueue {
"Users [user1, user2] and members of the groups [group1, group2] are allowed");
Queue secondSubQueue = iterator.next();
assertEquals("second", secondSubQueue.getName());
assertEquals(secondSubQueue.getProperties().getProperty("key"), "value");
assertEquals(secondSubQueue.getProperties().getProperty("key1"), "value1");
assertThat(secondSubQueue.getProperties().getProperty("key"))
.isEqualTo("value");
assertThat(secondSubQueue.getProperties().getProperty("key1"))
.isEqualTo("value1");
// test status
assertEquals(firstSubQueue.getState().getStateName(), "running");
assertEquals(secondSubQueue.getState().getStateName(), "stopped");
assertThat(firstSubQueue.getState().getStateName())
.isEqualTo("running");
assertThat(secondSubQueue.getState().getStateName())
.isEqualTo("stopped");
Set<String> template = new HashSet<String>();
template.add("first");
@ -105,7 +110,7 @@ public class TestQueue {
assertTrue(manager.hasAccess("first", QueueACL.ADMINISTER_JOBS, mockUGI));
QueueAclsInfo[] qai = manager.getQueueAcls(mockUGI);
assertEquals(qai.length, 1);
assertThat(qai.length).isEqualTo(1);
// test refresh queue
manager.refreshQueues(getConfiguration(), null);
@ -113,21 +118,28 @@ public class TestQueue {
Queue firstSubQueue1 = iterator.next();
Queue secondSubQueue1 = iterator.next();
// tets equal method
assertTrue(firstSubQueue.equals(firstSubQueue1));
assertEquals(firstSubQueue1.getState().getStateName(), "running");
assertEquals(secondSubQueue1.getState().getStateName(), "stopped");
assertThat(firstSubQueue).isEqualTo(firstSubQueue1);
assertThat(firstSubQueue1.getState().getStateName())
.isEqualTo("running");
assertThat(secondSubQueue1.getState().getStateName())
.isEqualTo("stopped");
assertEquals(firstSubQueue1.getSchedulingInfo(), "queueInfo");
assertEquals(secondSubQueue1.getSchedulingInfo(), "queueInfoqueueInfo");
assertThat(firstSubQueue1.getSchedulingInfo())
.isEqualTo("queueInfo");
assertThat(secondSubQueue1.getSchedulingInfo())
.isEqualTo("queueInfoqueueInfo");
// test JobQueueInfo
assertEquals(firstSubQueue.getJobQueueInfo().getQueueName(), "first");
assertEquals(firstSubQueue.getJobQueueInfo().getQueueState(), "running");
assertEquals(firstSubQueue.getJobQueueInfo().getSchedulingInfo(),
"queueInfo");
assertEquals(secondSubQueue.getJobQueueInfo().getChildren().size(), 0);
assertThat(firstSubQueue.getJobQueueInfo().getQueueName())
.isEqualTo("first");
assertThat(firstSubQueue.getJobQueueInfo().getState().toString())
.isEqualTo("running");
assertThat(firstSubQueue.getJobQueueInfo().getSchedulingInfo())
.isEqualTo("queueInfo");
assertThat(secondSubQueue.getJobQueueInfo().getChildren().size())
.isEqualTo(0);
// test
assertEquals(manager.getSchedulerInfo("first"), "queueInfo");
assertThat(manager.getSchedulerInfo("first")).isEqualTo("queueInfo");
Set<String> queueJobQueueInfos = new HashSet<String>();
for(JobQueueInfo jobInfo : manager.getJobQueueInfos()){
queueJobQueueInfos.add(jobInfo.getQueueName());
@ -138,8 +150,8 @@ public class TestQueue {
}
assertEquals(queueJobQueueInfos, rootJobQueueInfos);
// test getJobQueueInfoMapping
assertEquals(
manager.getJobQueueInfoMapping().get("first").getQueueName(), "first");
assertThat(manager.getJobQueueInfoMapping().get("first").getQueueName())
.isEqualTo("first");
// test dumpConfiguration
Writer writer = new StringWriter();
@ -185,7 +197,7 @@ public class TestQueue {
@Test (timeout=5000)
public void testDefaultConfig() {
QueueManager manager = new QueueManager(true);
assertEquals(manager.getRoot().getChildren().size(), 2);
assertThat(manager.getRoot().getChildren().size()).isEqualTo(2);
}
/**
@ -209,27 +221,27 @@ public class TestQueue {
Iterator<Queue> iterator = root.getChildren().iterator();
Queue firstSubQueue = iterator.next();
assertEquals("first", firstSubQueue.getName());
assertEquals(
assertThat(
firstSubQueue.getAcls().get("mapred.queue.first.acl-submit-job")
.toString(),
"Users [user1, user2] and members of the groups [group1, group2] are allowed");
.toString()).isEqualTo(
"Users [user1, user2] and members of " +
"the groups [group1, group2] are allowed");
Queue secondSubQueue = iterator.next();
assertEquals("second", secondSubQueue.getName());
assertEquals(firstSubQueue.getState().getStateName(), "running");
assertEquals(secondSubQueue.getState().getStateName(), "stopped");
assertThat(firstSubQueue.getState().getStateName()).isEqualTo("running");
assertThat(secondSubQueue.getState().getStateName()).isEqualTo("stopped");
assertTrue(manager.isRunning("first"));
assertFalse(manager.isRunning("second"));
assertEquals(firstSubQueue.getSchedulingInfo(), "queueInfo");
assertEquals(secondSubQueue.getSchedulingInfo(), "queueInfoqueueInfo");
// test leaf queue
assertThat(firstSubQueue.getSchedulingInfo()).isEqualTo("queueInfo");
assertThat(secondSubQueue.getSchedulingInfo())
.isEqualTo("queueInfoqueueInfo");
// test leaf queue
Set<String> template = new HashSet<String>();
template.add("first");
template.add("second");
assertEquals(manager.getLeafQueueNames(), template);
}
/**
* write cofiguration

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.mapred;
import static org.junit.Assert.assertEquals;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ -58,7 +58,7 @@ public class TestTaskLog {
// test TaskLog
System.setProperty(
YarnConfiguration.YARN_APP_CONTAINER_LOG_DIR, "testString");
assertEquals(TaskLog.getMRv2LogDir(), "testString");
assertThat(TaskLog.getMRv2LogDir()).isEqualTo("testString");
TaskAttemptID taid = mock(TaskAttemptID.class);
JobID jid = new JobID("job", 1);
@ -137,7 +137,7 @@ public class TestTaskLog {
// test TaskLog
assertEquals(TaskLog.getMRv2LogDir(), null);
assertThat(TaskLog.getMRv2LogDir()).isNull();
TaskAttemptID taid = mock(TaskAttemptID.class);
JobID jid = new JobID("job", 1);

View File

@ -29,7 +29,10 @@ import org.apache.log4j.PatternLayout;
import org.apache.log4j.Priority;
import org.apache.log4j.spi.LoggingEvent;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class TestTaskLogAppender {
/**
@ -43,9 +46,9 @@ public class TestTaskLogAppender {
System.setProperty(TaskLogAppender.TASKID_PROPERTY,"attempt_01_02_m03_04_001");
System.setProperty(TaskLogAppender.LOGSIZE_PROPERTY, "1003");
appender.activateOptions();
assertEquals(appender.getTaskId(), "attempt_01_02_m03_04_001");
assertEquals(appender.getTotalLogFileSize(),1000);
assertEquals(appender.getIsCleanup(),false);
assertThat(appender.getTaskId()).isEqualTo("attempt_01_02_m03_04_001");
assertThat(appender.getTotalLogFileSize()).isEqualTo(1000);
assertFalse(appender.getIsCleanup());
// test writer
Writer writer= new StringWriter();
@ -63,7 +66,7 @@ public class TestTaskLogAppender {
appender= new TaskLogAppender();
appender.setIsCleanup(true);
appender.activateOptions();
assertEquals(appender.getIsCleanup(),true);
assertTrue(appender.getIsCleanup());
}

View File

@ -37,6 +37,8 @@ import org.apache.hadoop.util.ExitUtil;
import org.junit.Assert;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class TestTaskProgressReporter {
private static int statusUpdateTimes = 0;
@ -262,7 +264,7 @@ public class TestTaskProgressReporter {
task.setTaskDone();
reporter.resetDoneFlag();
t.join();
Assert.assertEquals(statusUpdateTimes, 2);
assertThat(statusUpdateTimes).isEqualTo(2);
}
@Test(timeout=10000)

View File

@ -58,6 +58,7 @@ import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
@ -233,7 +234,7 @@ public class TestFileOutputCommitter {
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
assertThat(output).isEqualTo(expectedOutput.toString());
}
private void validateMapFileOutputContent(

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce.security;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.*;
@ -115,7 +116,7 @@ public class TestTokenCache {
// get token for fs3, should only add token for fs3
TokenCache.obtainTokensForNamenodesInternal(fs3, creds, conf, renewer);
Token<?> token3 = creds.getToken(new Text(fs3.getCanonicalServiceName()));
assertTrue(token3 != null);
assertThat(token3).isNotNull();
checkToken(creds, newerToken1, token2, token3);
// be paranoid, check one last time that nothing changes
@ -129,7 +130,7 @@ public class TestTokenCache {
assertEquals(tokens.length, creds.getAllTokens().size());
for (Token<?> token : tokens) {
Token<?> credsToken = creds.getToken(token.getService());
assertTrue(credsToken != null);
assertThat(credsToken).isNotNull();
assertEquals(token, credsToken);
}
}

View File

@ -27,11 +27,11 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.rules.TestName;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
@ -182,12 +182,12 @@ public class TestFetcher {
except, key, connection);
fetcher.copyFromHost(host);
Assert.assertEquals("No host failure is expected.",
ss.hostFailureCount(host.getHostName()), 0);
Assert.assertEquals("No fetch failure is expected.",
ss.fetchFailureCount(map1ID), 0);
Assert.assertEquals("No fetch failure is expected.",
ss.fetchFailureCount(map2ID), 0);
assertThat(ss.hostFailureCount(host.getHostName()))
.withFailMessage("No host failure is expected.").isEqualTo(0);
assertThat(ss.fetchFailureCount(map1ID))
.withFailMessage("No fetch failure is expected.").isEqualTo(0);
assertThat(ss.fetchFailureCount(map2ID))
.withFailMessage("No fetch failure is expected.").isEqualTo(0);
verify(ss).penalize(eq(host), anyLong());
verify(ss).putBackKnownMapOutput(any(MapHost.class), eq(map1ID));

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.mapreduce.task.reduce;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
@ -78,7 +79,7 @@ public class TestMergeManager {
// next reservation should be a WAIT
MapOutput<Text, Text> out3 = mgr.reserve(null, OUTPUT_SIZE, 0);
Assert.assertEquals("Should be told to wait", null, out3);
assertThat(out3).withFailMessage("Should be told to wait").isNull();
// trigger the first merge and wait for merge thread to start merging
// and free enough output to reserve more
@ -102,7 +103,7 @@ public class TestMergeManager {
// next reservation should be null
out3 = mgr.reserve(null, OUTPUT_SIZE, 0);
Assert.assertEquals("Should be told to wait", null, out3);
assertThat(out3).withFailMessage("Should be told to wait").isNull();
// commit output *before* merge thread completes
mout1.commit();

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.mapreduce.task.reduce;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
@ -181,8 +182,10 @@ public class TestMerger {
readOnDiskMapOutput(conf, fs, next, keys, values);
paths.add(next);
}
Assert.assertEquals(keys, Arrays.asList("apple", "banana", "carrot", "apple", "banana", "carrot"));
Assert.assertEquals(values, Arrays.asList("awesome", "bla", "amazing", "disgusting", "pretty good", "delicious"));
assertThat(keys).isEqualTo(Arrays.asList("apple", "banana", "carrot",
"apple", "banana", "carrot"));
assertThat(values).isEqualTo(Arrays.asList("awesome", "bla", "amazing",
"disgusting", "pretty good", "delicious"));
mergeManager.close();
mergeManager = new MergeManagerImpl<Text, Text>(
@ -197,8 +200,10 @@ public class TestMerger {
keys = new ArrayList<String>();
values = new ArrayList<String>();
readOnDiskMapOutput(conf, fs, mergeManager.onDiskMapOutputs.iterator().next(), keys, values);
Assert.assertEquals(keys, Arrays.asList("apple", "apple", "banana", "banana", "carrot", "carrot"));
Assert.assertEquals(values, Arrays.asList("awesome", "disgusting", "pretty good", "bla", "amazing", "delicious"));
assertThat(keys).isEqualTo(Arrays.asList("apple", "apple", "banana",
"banana", "carrot", "carrot"));
assertThat(values).isEqualTo(Arrays.asList("awesome", "disgusting",
"pretty good", "bla", "amazing", "delicious"));
mergeManager.close();
Assert.assertEquals(0, mergeManager.inMemoryMapOutputs.size());

View File

@ -70,6 +70,11 @@
<groupId>org.fusesource.leveldbjni</groupId>
<artifactId>leveldbjni-all</artifactId>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.mapreduce.v2.hs;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@ -181,10 +182,10 @@ public class TestJHSDelegationTokenSecretManager {
JHSDelegationTokenSecretManagerForTest mgr) {
mgr.stopThreads();
mgr.reset();
Assert.assertEquals("Secret manager should not contain keys",
mgr.getAllKeys().length, 0);
Assert.assertEquals("Secret manager should not contain tokens",
mgr.getAllTokens().size(), 0);
assertThat(mgr.getAllKeys().length)
.withFailMessage("Secret manager should not contain keys").isZero();
assertThat(mgr.getAllTokens().size())
.withFailMessage("Secret manager should not contain tokens").isZero();
}
private static class JHSDelegationTokenSecretManagerForTest

View File

@ -42,6 +42,7 @@ import org.junit.Test;
import org.mockito.Mockito;
import static junit.framework.TestCase.assertEquals;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
@ -139,7 +140,7 @@ public class TestJobHistory {
.getHistoryStorage());
assertTrue(storage.getUseLoadedTasksCache());
assertEquals(storage.getLoadedTasksCacheSize(), 50);
assertThat(storage.getLoadedTasksCacheSize()).isEqualTo(50);
// Create a bunch of smaller jobs (<< 50 tasks)
Job[] jobs = new Job[10];
@ -202,7 +203,7 @@ public class TestJobHistory {
.getHistoryStorage());
assertTrue(storage.getUseLoadedTasksCache());
assertEquals(storage.getLoadedTasksCacheSize(), 500);
assertThat(storage.getLoadedTasksCacheSize()).isEqualTo(500);
// Create a bunch of large jobs (>> 50 tasks)
Job[] lgJobs = new Job[10];
@ -263,7 +264,7 @@ public class TestJobHistory {
.getHistoryStorage());
assertTrue(storage.getUseLoadedTasksCache());
assertEquals(storage.getLoadedTasksCacheSize(), 1);
assertThat(storage.getLoadedTasksCacheSize()).isOne();
}
@Test
@ -281,7 +282,7 @@ public class TestJobHistory {
.getHistoryStorage());
assertTrue(storage.getUseLoadedTasksCache());
assertEquals(storage.getLoadedTasksCacheSize(), 1);
assertThat(storage.getLoadedTasksCacheSize()).isOne();
}
@Test
@ -300,7 +301,7 @@ public class TestJobHistory {
.getHistoryStorage());
assertTrue(storage.getUseLoadedTasksCache());
assertEquals(storage.getLoadedTasksCacheSize(), 50);
assertThat(storage.getLoadedTasksCacheSize()).isEqualTo(50);
// Create jobs for bad fileInfo results
Job[] jobs = new Job[4];

View File

@ -17,8 +17,6 @@
*/
package org.apache.hadoop.mapreduce.v2.hs;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
@ -51,7 +49,10 @@ import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.apache.hadoop.mapred.TaskCompletionEvent;
import static org.junit.Assert.*;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.*;
@RunWith(value = Parameterized.class)
@ -106,7 +107,7 @@ public class TestJobHistoryEntities {
assertEquals(1, completedJob.getCompletedReduces());
assertEquals(12, completedJob.getTasks().size());
//Verify tasks loaded at this point.
assertEquals(true, completedJob.tasksLoaded.get());
assertThat(completedJob.tasksLoaded.get()).isTrue();
assertEquals(10, completedJob.getTasks(TaskType.MAP).size());
assertEquals(2, completedJob.getTasks(TaskType.REDUCE).size());
assertEquals("user", completedJob.getUserName());

View File

@ -45,6 +45,8 @@ import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.assertj.core.api.Assertions.assertThat;
public class TestJobHistoryEvents {
private static final Logger LOG =
LoggerFactory.getLogger(TestJobHistoryEvents.class);
@ -179,14 +181,16 @@ public class TestJobHistoryEvents {
((JobHistory)context).init(conf);
((JobHistory)context).start();
Assert.assertTrue( context.getStartTime()>0);
Assert.assertEquals(((JobHistory)context).getServiceState(),Service.STATE.STARTED);
assertThat(((JobHistory)context).getServiceState())
.isEqualTo(Service.STATE.STARTED);
// get job before stopping JobHistory
Job parsedJob = context.getJob(jobId);
// stop JobHistory
((JobHistory)context).stop();
Assert.assertEquals(((JobHistory)context).getServiceState(),Service.STATE.STOPPED);
assertThat(((JobHistory)context).getServiceState())
.isEqualTo(Service.STATE.STOPPED);
Assert.assertEquals("QueueName not correct", "assignedQueue",
parsedJob.getQueueName());

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.mapreduce.v2.hs;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic
.NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
@ -449,8 +450,8 @@ public class TestJobHistoryParsing {
TaskAttemptInfo taskAttemptInfo = taskInfo.getAllTaskAttempts().get(
TypeConverter.fromYarn((taskAttempt.getID())));
// Verify rack-name for all task attempts
Assert.assertEquals("rack-name is incorrect",
taskAttemptInfo.getRackname(), RACK_NAME);
assertThat(taskAttemptInfo.getRackname())
.withFailMessage("rack-name is incorrect").isEqualTo(RACK_NAME);
if (taskAttemptInfo.getTaskStatus().equals("FAILED")) {
noOffailedAttempts++;
}

View File

@ -116,6 +116,11 @@
<artifactId>bcpkix-jdk15on</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<profiles>

View File

@ -50,6 +50,8 @@ import org.apache.hadoop.util.StringUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotSame;
@ -103,7 +105,8 @@ public class TestFileSystem {
// This should go to TestFsShell.java when it is added.
CommandFormat cf;
cf= new CommandFormat("copyToLocal", 2,2,"crc","ignoreCrc");
assertEquals(cf.parse(new String[] {"-get","file", "-"}, 1).get(1), "-");
assertThat(cf.parse(new String[] {"-get", "file", "-"}, 1).get(1))
.isEqualTo("-");
try {
cf.parse(new String[] {"-get","file","-ignoreCrc","/foo"}, 1);
fail("Expected parsing to fail as it should stop at first non-option");
@ -112,12 +115,16 @@ public class TestFileSystem {
// Expected
}
cf = new CommandFormat("tail", 1, 1, "f");
assertEquals(cf.parse(new String[] {"-tail","fileName"}, 1).get(0),"fileName");
assertEquals(cf.parse(new String[] {"-tail","-f","fileName"}, 1).get(0),"fileName");
assertThat(cf.parse(new String[] {"-tail", "fileName"}, 1).get(0))
.isEqualTo("fileName");
assertThat(cf.parse(new String[] {"-tail", "-f", "fileName"}, 1).get(0))
.isEqualTo("fileName");
cf = new CommandFormat("setrep", 2, 2, "R", "w");
assertEquals(cf.parse(new String[] {"-setrep","-R","2","/foo/bar"}, 1).get(1), "/foo/bar");
assertThat(cf.parse(new String[] {"-setrep", "-R", "2", "/foo/bar"}, 1)
.get(1)).isEqualTo("/foo/bar");
cf = new CommandFormat("put", 2, 10000);
assertEquals(cf.parse(new String[] {"-put", "-", "dest"}, 1).get(1), "dest");
assertThat(cf.parse(new String[] {"-put", "-", "dest"}, 1).get(1))
.isEqualTo("dest");
}
public static void createControlFile(FileSystem fs,

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.fs.slive;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@ -179,7 +180,7 @@ public class TestSlive {
op.run(fs);
types.add(op.getType());
}
assertEquals(types.size(), expected);
assertThat(types.size()).isEqualTo(expected);
}
// gets the config merged with the arguments
@ -231,24 +232,31 @@ public class TestSlive {
ConfigExtractor extractor = getTestConfig(true);
assertEquals(extractor.getOpCount().intValue(), Constants.OperationType
.values().length);
assertEquals(extractor.getMapAmount().intValue(), 2);
assertEquals(extractor.getReducerAmount().intValue(), 2);
assertThat(extractor.getMapAmount().intValue()).isEqualTo(2);
assertThat(extractor.getReducerAmount().intValue()).isEqualTo(2);
Range<Long> apRange = extractor.getAppendSize();
assertEquals(apRange.getLower().intValue(), Constants.MEGABYTES * 1);
assertEquals(apRange.getUpper().intValue(), Constants.MEGABYTES * 2);
assertThat(apRange.getLower().intValue()).isEqualTo(
Constants.MEGABYTES * 1);
assertThat(apRange.getUpper().intValue()).isEqualTo(
Constants.MEGABYTES * 2);
Range<Long> wRange = extractor.getWriteSize();
assertEquals(wRange.getLower().intValue(), Constants.MEGABYTES * 1);
assertEquals(wRange.getUpper().intValue(), Constants.MEGABYTES * 2);
assertThat(wRange.getLower().intValue()).isEqualTo(
Constants.MEGABYTES * 1);
assertThat(wRange.getUpper().intValue()).isEqualTo(
Constants.MEGABYTES * 2);
Range<Long> trRange = extractor.getTruncateSize();
assertEquals(trRange.getLower().intValue(), 0);
assertEquals(trRange.getUpper().intValue(), Constants.MEGABYTES * 1);
assertThat(trRange.getLower().intValue()).isZero();
assertThat(trRange.getUpper().intValue()).isEqualTo(
Constants.MEGABYTES * 1);
Range<Long> bRange = extractor.getBlockSize();
assertEquals(bRange.getLower().intValue(), Constants.MEGABYTES * 1);
assertEquals(bRange.getUpper().intValue(), Constants.MEGABYTES * 2);
assertThat(bRange.getLower().intValue()).isEqualTo(
Constants.MEGABYTES * 1);
assertThat(bRange.getUpper().intValue()).isEqualTo(
Constants.MEGABYTES * 2);
String resfile = extractor.getResultFile();
assertEquals(resfile, getResultFile().toString());
int durationMs = extractor.getDurationMilliseconds();
assertEquals(durationMs, 10 * 1000);
assertThat(durationMs).isEqualTo(10 * 1000);
}
@Test
@ -273,8 +281,8 @@ public class TestSlive {
@Test
public void testRange() {
Range<Long> r = new Range<Long>(10L, 20L);
assertEquals(r.getLower().longValue(), 10L);
assertEquals(r.getUpper().longValue(), 20L);
assertThat(r.getLower().longValue()).isEqualTo(10L);
assertThat(r.getUpper().longValue()).isEqualTo(20L);
}
@Test

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.mapred;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertNotNull;
@ -234,12 +235,12 @@ public class JobClientUnitTest {
//no retry
assertNotNull(client.getJob(id));
assertEquals(client.getLastGetJobRetriesCounter(), 0);
assertThat(client.getLastGetJobRetriesCounter()).isEqualTo(0);
//2 retries
client.setGetJobRetries(2);
assertNotNull(client.getJob(id));
assertEquals(client.getLastGetJobRetriesCounter(), 2);
assertThat(client.getLastGetJobRetriesCounter()).isEqualTo(2);
//beyond yarn.app.mapreduce.client.job.max-retries, will get null
client.setGetJobRetries(3);
@ -260,8 +261,8 @@ public class JobClientUnitTest {
//3 retries (default)
client.setGetJobRetries(MRJobConfig.DEFAULT_MR_CLIENT_JOB_MAX_RETRIES);
assertNotNull(client.getJob(id));
assertEquals(client.getLastGetJobRetriesCounter(),
MRJobConfig.DEFAULT_MR_CLIENT_JOB_MAX_RETRIES);
assertThat(client.getLastGetJobRetriesCounter())
.isEqualTo(MRJobConfig.DEFAULT_MR_CLIENT_JOB_MAX_RETRIES);
//beyond yarn.app.mapreduce.client.job.max-retries, will get null
client.setGetJobRetries(MRJobConfig.DEFAULT_MR_CLIENT_JOB_MAX_RETRIES + 1);

View File

@ -33,6 +33,7 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.util.concurrent.TimeoutException;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
@ -196,7 +197,7 @@ public class TestMRCJCFileInputFormat {
// Enable multi-level/recursive inputs
job.setBoolean(FileInputFormat.INPUT_DIR_RECURSIVE, true);
InputSplit[] splits = inFormat.getSplits(job, 1);
assertEquals(splits.length, 2);
assertThat(splits.length).isEqualTo(2);
}
@SuppressWarnings("rawtypes")

View File

@ -33,6 +33,7 @@ import java.io.File;
import java.io.IOException;
import java.net.URI;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
@ -112,7 +113,7 @@ public class TestMRCJCFileOutputCommitter {
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
assertThat(output).isEqualTo(expectedOutput.toString());
}
@Test

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.mapred;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
@ -171,9 +172,10 @@ public class TestMiniMRChildTask {
String javaOpts = job.get(JobConf.MAPRED_TASK_JAVA_OPTS);
assertNotNull(JobConf.MAPRED_TASK_JAVA_OPTS + " is null!",
javaOpts);
assertEquals(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " +
javaOpts,
javaOpts, TASK_OPTS_VAL);
assertThat(javaOpts)
.withFailMessage(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: "
+ javaOpts)
.isEqualTo(TASK_OPTS_VAL);
} else {
String mapJavaOpts = job.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS);
assertNotNull(JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " is null!",
@ -214,16 +216,18 @@ public class TestMiniMRChildTask {
String javaOpts = job.get(JobConf.MAPRED_TASK_JAVA_OPTS);
assertNotNull(JobConf.MAPRED_TASK_JAVA_OPTS + " is null!",
javaOpts);
assertEquals(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " +
javaOpts,
javaOpts, TASK_OPTS_VAL);
assertThat(javaOpts)
.withFailMessage(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: "
+ javaOpts)
.isEqualTo(TASK_OPTS_VAL);
} else {
String reduceJavaOpts = job.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS);
assertNotNull(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " is null!",
reduceJavaOpts);
assertEquals(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " has value of: " +
reduceJavaOpts,
reduceJavaOpts, REDUCE_OPTS_VAL);
assertThat(reduceJavaOpts)
.withFailMessage(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS +
" has value of: " + reduceJavaOpts)
.isEqualTo(REDUCE_OPTS_VAL);
}
// check if X=y works for an already existing parameter

View File

@ -25,11 +25,12 @@ import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Arrays;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@ -64,8 +65,8 @@ public class TestMultiFileSplit {
assertTrue(split.getLength() != 0);
assertEquals(split.getLength(), readSplit.getLength());
assertTrue(Arrays.equals(split.getPaths(), readSplit.getPaths()));
assertTrue(Arrays.equals(split.getLengths(), readSplit.getLengths()));
assertThat(readSplit.getPaths()).containsExactly(split.getPaths());
assertThat(readSplit.getLengths()).containsExactly(split.getLengths());
System.out.println(split.toString());
}
@ -88,7 +89,7 @@ public class TestMultiFileSplit {
MultiFileSplit split = new MultiFileSplit(job,path,lengths);
String [] locations= split.getLocations();
assertTrue(locations.length==1);
assertEquals(locations[0], "localhost");
assertThat(locations.length).isOne();
assertThat(locations[0]).isEqualTo("localhost");
}
}

View File

@ -27,7 +27,7 @@ import org.junit.Test;
import java.io.File;
import java.io.IOException;
import static org.junit.Assert.assertEquals;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
public class TestMultipleTextOutputFormat {
@ -112,7 +112,7 @@ public class TestMultipleTextOutputFormat {
}
String output = UtilsForTests.slurp(expectedFile_11);
//System.out.printf("File_2 output: %s\n", output);
assertEquals(output, expectedOutput.toString());
assertThat(output).isEqualTo(expectedOutput.toString());
String file_12 = "2-part-00000";
@ -124,7 +124,7 @@ public class TestMultipleTextOutputFormat {
}
output = UtilsForTests.slurp(expectedFile_12);
//System.out.printf("File_2 output: %s\n", output);
assertEquals(output, expectedOutput.toString());
assertThat(output).isEqualTo(expectedOutput.toString());
String file_13 = "3-part-00000";
@ -136,7 +136,7 @@ public class TestMultipleTextOutputFormat {
}
output = UtilsForTests.slurp(expectedFile_13);
//System.out.printf("File_2 output: %s\n", output);
assertEquals(output, expectedOutput.toString());
assertThat(output).isEqualTo(expectedOutput.toString());
String file_2 = "2/3";
@ -148,6 +148,6 @@ public class TestMultipleTextOutputFormat {
}
output = UtilsForTests.slurp(expectedFile_2);
//System.out.printf("File_2 output: %s\n", output);
assertEquals(output, expectedOutput.toString());
assertThat(output).isEqualTo(expectedOutput.toString());
}
}

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.mapred;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
@ -161,10 +162,10 @@ public class TestNetworkedJob {
assertTrue(runningJob.getJobFile().endsWith(
".staging/" + runningJob.getJobID() + "/job.xml"));
assertTrue(runningJob.getTrackingURL().length() > 0);
assertTrue(runningJob.mapProgress() == 0.0f);
assertTrue(runningJob.reduceProgress() == 0.0f);
assertTrue(runningJob.cleanupProgress() == 0.0f);
assertTrue(runningJob.setupProgress() == 0.0f);
assertThat(runningJob.mapProgress()).isEqualTo(0.0f);
assertThat(runningJob.reduceProgress()).isEqualTo(0.0f);
assertThat(runningJob.cleanupProgress()).isEqualTo(0.0f);
assertThat(runningJob.setupProgress()).isEqualTo(0.0f);
TaskCompletionEvent[] tce = runningJob.getTaskCompletionEvents(0);
assertEquals(tce.length, 0);

View File

@ -34,8 +34,8 @@ import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class TestSequenceFileAsBinaryOutputFormat {
@ -126,10 +126,10 @@ public class TestSequenceFileAsBinaryOutputFormat {
"Keys don't match: " + "*" + iwritable.get() + ":" +
sourceInt + "*",
sourceInt, iwritable.get());
assertTrue(
assertThat(dwritable.get()).withFailMessage(
"Vals don't match: " + "*" + dwritable.get() + ":" +
sourceDouble + "*",
Double.compare(dwritable.get(), sourceDouble) == 0 );
sourceDouble + "*")
.isEqualTo(sourceDouble);
++count;
}
} finally {

View File

@ -30,6 +30,7 @@ import org.junit.Test;
import java.io.IOException;
import java.util.Random;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
public class TestSequenceFileInputFilter {
@ -147,7 +148,7 @@ public class TestSequenceFileInputFilter {
int expectedCount = length/1000;
if (expectedCount*1000!=length)
expectedCount++;
assertEquals(count, expectedCount);
assertThat(count).isEqualTo(expectedCount);
}
// clean up

View File

@ -23,6 +23,7 @@ import org.apache.hadoop.mapred.StatisticsCollector.TimeWindow;
import org.apache.hadoop.mapred.StatisticsCollector.Stat;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
@ -88,14 +89,14 @@ public class TestStatisticsCollector {
// test Stat class
Map updaters= collector.getUpdaters();
assertEquals(updaters.size(),2);
assertThat(updaters.size()).isEqualTo(2);
Map<String, Stat> ststistics=collector.getStatistics();
assertNotNull(ststistics.get("m1"));
Stat newStat= collector.createStat("m2");
assertEquals(newStat.name, "m2");
assertThat(newStat.name).isEqualTo("m2");
Stat st=collector.removeStat("m1");
assertEquals(st.name, "m1");
assertThat(st.name).isEqualTo("m1");
assertEquals((10+10+10+12+13+14), stat.getValues().get(window).getValue());
assertEquals(95, stat.getValues().get(sincStart).getValue());
st=collector.removeStat("m1");

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.mapred;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
@ -617,9 +618,9 @@ public class TestYARNRunner {
ApplicationSubmissionContext appSubCtx =
buildSubmitContext(yarnRunner, jobConf);
assertEquals(appSubCtx.getNodeLabelExpression(), "GPU");
assertEquals(appSubCtx.getAMContainerResourceRequests().get(0)
.getNodeLabelExpression(), "highMem");
assertThat(appSubCtx.getNodeLabelExpression()).isEqualTo("GPU");
assertThat(appSubCtx.getAMContainerResourceRequests().get(0)
.getNodeLabelExpression()).isEqualTo("highMem");
}
@Test

View File

@ -17,12 +17,12 @@
*/
package org.apache.hadoop.mapred.lib;
import org.junit.Assert;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reducer;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class TestChain {
@Test
public void testSetReducerWithReducerByValueAsTrue() throws Exception {
@ -33,8 +33,9 @@ public class TestChain {
Object.class, Object.class, true, reducerConf);
boolean reduceByValue = reducerConf.getBoolean("chain.reducer.byValue",
false);
Assert.assertEquals("It should set chain.reducer.byValue as true "
+ "in reducerConf when we give value as true", true, reduceByValue);
assertThat(reduceByValue).withFailMessage(
"It should set chain.reducer.byValue as true in "
+ "reducerConf when we give value as true").isTrue();
}
@Test
@ -46,8 +47,9 @@ public class TestChain {
Object.class, Object.class, false, reducerConf);
boolean reduceByValue = reducerConf.getBoolean("chain.reducer.byValue",
true);
Assert.assertEquals("It should set chain.reducer.byValue as false "
+ "in reducerConf when we give value as false", false, reduceByValue);
assertThat(reduceByValue).withFailMessage(
"It should set chain.reducer.byValue as false "
+ "in reducerConf when we give value as false").isFalse();
}
interface MyReducer extends Reducer<Object, Object, Object, Object> {

View File

@ -137,7 +137,7 @@ public class TestPipeApplication {
if (psw != null) {
// remove password files
for (File file : psw) {
file.deleteOnExit();
file.delete();
}
}
@ -231,7 +231,7 @@ public class TestPipeApplication {
if (psw != null) {
// remove password files
for (File file : psw) {
file.deleteOnExit();
file.delete();
}
}
}
@ -328,7 +328,7 @@ public class TestPipeApplication {
if (psw != null) {
// remove password files
for (File file : psw) {
file.deleteOnExit();
file.delete();
}
}
}
@ -428,7 +428,7 @@ public class TestPipeApplication {
if (psw != null) {
// remove password files
for (File file : psw) {
file.deleteOnExit();
file.delete();
}
}
}

View File

@ -62,6 +62,7 @@ import org.mockito.MockitoAnnotations;
import com.google.common.collect.HashMultiset;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@ -1661,7 +1662,7 @@ public class TestCombineFileInputFormat {
for (InputSplit split : splits) {
System.out.println("File split(Test0): " + split);
}
assertEquals(splits.size(), 1);
assertThat(splits.size()).isEqualTo(1);
CombineFileSplit fileSplit = (CombineFileSplit) splits.get(0);
assertEquals(2, fileSplit.getNumPaths());
assertEquals(1, fileSplit.getLocations().length);
@ -1788,7 +1789,7 @@ public class TestCombineFileInputFormat {
for (InputSplit s : splits) {
CombineFileSplit cfs = (CombineFileSplit)s;
for (Path p : cfs.getPaths()) {
assertEquals(p.toUri().getScheme(), "file");
assertThat(p.toUri().getScheme()).isEqualTo("file");
}
}
}

View File

@ -39,6 +39,7 @@ import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Random;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
public class TestMRSequenceFileInputFilter {
@ -166,7 +167,7 @@ public class TestMRSequenceFileInputFilter {
int expectedCount = length / 1000;
if (expectedCount * 1000 != length)
expectedCount++;
assertEquals(count, expectedCount);
assertThat(count).isEqualTo(expectedCount);
}
// clean up

View File

@ -33,6 +33,7 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@ -181,7 +182,8 @@ public class TestJoinTupleWritable {
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
TupleWritable dTuple = new TupleWritable();
dTuple.readFields(new DataInputStream(in));
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
assertThat(dTuple).withFailMessage("Failed to write/read tuple")
.isEqualTo(sTuple);
assertEquals("All tuple data has not been read from the stream",
-1, in.read());
}
@ -200,7 +202,8 @@ public class TestJoinTupleWritable {
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
TupleWritable dTuple = new TupleWritable();
dTuple.readFields(new DataInputStream(in));
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
assertThat(dTuple).withFailMessage("Failed to write/read tuple")
.isEqualTo(sTuple);
assertEquals("All tuple data has not been read from the stream",
-1, in.read());
}
@ -225,7 +228,8 @@ public class TestJoinTupleWritable {
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
TupleWritable dTuple = new TupleWritable();
dTuple.readFields(new DataInputStream(in));
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
assertThat(dTuple).withFailMessage("Failed to write/read tuple")
.isEqualTo(sTuple);
assertEquals("All tuple data has not been read from the stream",
-1, in.read());
}

View File

@ -24,7 +24,11 @@ import java.net.URI;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
@ -123,7 +127,7 @@ public class TestMRCJCFileOutputCommitter {
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
assertThat(output).isEqualTo(expectedOutput.toString());
FileUtil.fullyDelete(new File(outDir.toString()));
}
@ -180,8 +184,8 @@ public class TestMRCJCFileOutputCommitter {
expectedFile = new File(new Path(outDir, FileOutputCommitter.PENDING_DIR_NAME)
.toString());
assertFalse("job temp dir still exists", expectedFile.exists());
assertEquals("Output directory not empty", 0, new File(outDir.toString())
.listFiles().length);
assertThat(new File(outDir.toString())
.listFiles()).withFailMessage("Output directory not empty").isEmpty();
FileUtil.fullyDelete(new File(outDir.toString()));
}

View File

@ -48,8 +48,8 @@ import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Random;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class TestMRSequenceFileAsBinaryOutputFormat {
@ -142,10 +142,9 @@ public class TestMRSequenceFileAsBinaryOutputFormat {
"Keys don't match: " + "*" + iwritable.get() + ":" +
sourceInt + "*",
sourceInt, iwritable.get());
assertTrue(
assertThat(dwritable.get()).withFailMessage(
"Vals don't match: " + "*" + dwritable.get() + ":" +
sourceDouble + "*",
Double.compare(dwritable.get(), sourceDouble) == 0 );
sourceDouble + "*").isEqualTo(sourceDouble);
++count;
}
} finally {

View File

@ -17,7 +17,7 @@
package org.apache.hadoop.mapreduce.security;
import static org.junit.Assert.assertEquals;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
import java.io.File;
@ -131,7 +131,7 @@ public class TestMRCredentials {
e.printStackTrace(System.out);
fail("Job failed");
}
assertEquals("dist job res is not 0", res, 0);
assertThat(res).withFailMessage("dist job res is not 0").isEqualTo(0);
}
}

View File

@ -29,6 +29,7 @@ import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
@ -330,8 +331,8 @@ public class TestMRAsyncDiskService {
File toBeDeletedDir = new File(vols[0], MRAsyncDiskService.TOBEDELETED);
String[] content = toBeDeletedDir.list();
assertNotNull("Cannot find " + toBeDeletedDir, content);
assertEquals("" + toBeDeletedDir + " should be empty now.", 0,
content.length);
assertThat(content).withFailMessage(
toBeDeletedDir.toString() + " should be empty now.").isEmpty();
}
}

View File

@ -105,6 +105,8 @@ import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.assertj.core.api.Assertions.assertThat;
public class TestMRJobs {
private static final Logger LOG = LoggerFactory.getLogger(TestMRJobs.class);
@ -433,17 +435,17 @@ public class TestMRJobs {
job.setPriority(JobPriority.HIGH);
waitForPriorityToUpdate(job, JobPriority.HIGH);
// Verify the priority from job itself
Assert.assertEquals(job.getPriority(), JobPriority.HIGH);
assertThat(job.getPriority()).isEqualTo(JobPriority.HIGH);
// Change priority to NORMAL (3) with new api
job.setPriorityAsInteger(3); // Verify the priority from job itself
waitForPriorityToUpdate(job, JobPriority.NORMAL);
Assert.assertEquals(job.getPriority(), JobPriority.NORMAL);
assertThat(job.getPriority()).isEqualTo(JobPriority.NORMAL);
// Change priority to a high integer value with new api
job.setPriorityAsInteger(89); // Verify the priority from job itself
waitForPriorityToUpdate(job, JobPriority.UNDEFINED_PRIORITY);
Assert.assertEquals(job.getPriority(), JobPriority.UNDEFINED_PRIORITY);
assertThat(job.getPriority()).isEqualTo(JobPriority.UNDEFINED_PRIORITY);
boolean succeeded = job.waitForCompletion(true);
Assert.assertTrue(succeeded);
@ -1386,12 +1388,14 @@ public class TestMRJobs {
sleepJob.setConf(conf);
Job job1 = sleepJob.createJob(1, 1, 1, 1, 1, 1);
Assert.assertEquals("Wrong default name of sleep job.",
job1.getJobName(), SleepJob.SLEEP_JOB_NAME);
assertThat(job1.getJobName())
.withFailMessage("Wrong default name of sleep job.")
.isEqualTo(SleepJob.SLEEP_JOB_NAME);
String expectedJob2Name = SleepJob.SLEEP_JOB_NAME + " - test";
Job job2 = sleepJob.createJob(1, 1, 1, 1, 1, 1, "test");
Assert.assertEquals("Wrong name of sleep job.",
job2.getJobName(), expectedJob2Name);
assertThat(job2.getJobName())
.withFailMessage("Wrong name of sleep job.")
.isEqualTo(expectedJob2Name);
}
}

View File

@ -36,6 +36,8 @@ import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.assertj.core.api.Assertions.assertThat;
public class TestUberAM extends TestMRJobs {
private static final Logger LOG = LoggerFactory.getLogger(TestUberAM.class);
@ -127,7 +129,7 @@ public class TestUberAM extends TestMRJobs {
} catch (Exception e) {
secondTaskAttemptExists = false;
}
Assert.assertEquals(false, secondTaskAttemptExists);
assertThat(secondTaskAttemptExists).isFalse();
TaskCompletionEvent[] events = job.getTaskCompletionEvents(0, 2);
Assert.assertEquals(1, events.length);

View File

@ -66,6 +66,11 @@
<artifactId>hadoop-mapreduce-client-common</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@ -17,17 +17,19 @@
*/
package org.apache.hadoop.mapred.nativetask.buffer;
import java.io.*;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import com.google.common.base.Charsets;
import com.google.common.primitives.Shorts;
import org.apache.hadoop.mapred.nativetask.NativeDataTarget;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.offset;
public class TestByteBufferReadWrite {
@Test
public void testReadWrite() throws IOException {
@ -55,24 +57,25 @@ public class TestByteBufferReadWrite {
input.rewind(0, length);
ByteBufferDataReader reader = new ByteBufferDataReader(input);
Assert.assertEquals(1, reader.read());
assertThat(reader.read()).isOne();
byte[] two = new byte[2];
reader.read(two);
Assert.assertTrue(two[0] == two[1] && two[0] == 2);
assertThat(two[0]).isEqualTo(two[1]);
assertThat(two[0]).isEqualTo((byte) 2);
Assert.assertEquals(true, reader.readBoolean());
Assert.assertEquals(4, reader.readByte());
Assert.assertEquals(5, reader.readShort());
Assert.assertEquals(6, reader.readChar());
Assert.assertEquals(7, reader.readInt());
Assert.assertEquals(8, reader.readLong());
Assert.assertTrue(reader.readFloat() - 9 < 0.0001);
Assert.assertTrue(reader.readDouble() - 10 < 0.0001);
assertThat(reader.readBoolean()).isTrue();
assertThat(reader.readByte()).isEqualTo((byte) 4);
assertThat(reader.readShort()).isEqualTo((short) 5);
assertThat(reader.readChar()).isEqualTo((char) 6);
assertThat(reader.readInt()).isEqualTo(7);
assertThat(reader.readLong()).isEqualTo(8);
assertThat(reader.readFloat()).isEqualTo(9f, offset(0.0001f));
assertThat(reader.readDouble()).isEqualTo(10, offset(0.0001));
byte[] goodboy = new byte["goodboy".length()];
reader.read(goodboy);
Assert.assertEquals("goodboy", toString(goodboy));
assertThat(toString(goodboy)).isEqualTo("goodboy");
char[] hello = new char["hello".length()];
for (int i = 0; i < hello.length; i++) {
@ -80,11 +83,9 @@ public class TestByteBufferReadWrite {
}
String helloString = new String(hello);
Assert.assertEquals("hello", helloString);
Assert.assertEquals("native task", reader.readUTF());
Assert.assertEquals(0, input.remaining());
assertThat(helloString).isEqualTo("hello");
assertThat(reader.readUTF()).isEqualTo("native task");
assertThat(input.remaining()).isZero();
}
/**
@ -103,11 +104,11 @@ public class TestByteBufferReadWrite {
InputBuffer input = new InputBuffer(buff);
input.rewind(0, buff.length);
ByteBufferDataReader reader = new ByteBufferDataReader(input);
Assert.assertEquals(catFace, reader.readUTF());
assertThat(reader.readUTF()).isEqualTo(catFace);
// Check that the standard Java one can read it too
String fromJava = new java.io.DataInputStream(new ByteArrayInputStream(buff)).readUTF();
Assert.assertEquals(catFace, fromJava);
assertThat(fromJava).isEqualTo(catFace);
}
@Test
@ -115,13 +116,13 @@ public class TestByteBufferReadWrite {
byte[] buff = new byte[10];
MockDataTarget target = new MockDataTarget(buff);
ByteBufferDataWriter writer = new ByteBufferDataWriter(target);
Assert.assertEquals(false, writer.hasUnFlushedData());
assertThat(writer.hasUnFlushedData()).isFalse();
writer.write(1);
writer.write(new byte[] {2, 2}, 0, 2);
Assert.assertEquals(true, writer.hasUnFlushedData());
assertThat(writer.hasUnFlushedData()).isTrue();
Assert.assertEquals(true, writer.shortOfSpace(100));
assertThat(writer.shortOfSpace(100)).isTrue();
}
@ -131,12 +132,12 @@ public class TestByteBufferReadWrite {
MockDataTarget target = Mockito.spy(new MockDataTarget(buff));
ByteBufferDataWriter writer = new ByteBufferDataWriter(target);
Assert.assertEquals(false, writer.hasUnFlushedData());
assertThat(writer.hasUnFlushedData()).isFalse();
writer.write(1);
writer.write(new byte[100]);
Assert.assertEquals(true, writer.hasUnFlushedData());
assertThat(writer.hasUnFlushedData()).isTrue();
writer.close();
Mockito.verify(target, Mockito.times(11)).sendData();
Mockito.verify(target).finishSendData();

View File

@ -18,9 +18,9 @@
package org.apache.hadoop.mapred.nativetask.buffer;
import java.io.IOException;
import org.junit.Test;
import org.junit.Assert;
import static org.assertj.core.api.Assertions.assertThat;
public class TestInputBuffer {
@ -28,27 +28,27 @@ public class TestInputBuffer {
public void testInputBuffer() throws IOException {
final int size = 100;
final InputBuffer input1 = new InputBuffer(BufferType.DIRECT_BUFFER, size);
Assert.assertEquals(input1.getType(), BufferType.DIRECT_BUFFER);
assertThat(input1.getType()).isEqualTo(BufferType.DIRECT_BUFFER);
Assert.assertTrue(input1.position() == 0);
Assert.assertTrue(input1.length() == 0);
Assert.assertTrue(input1.remaining() == 0);
Assert.assertTrue(input1.capacity() == size);
assertThat(input1.position()).isZero();
assertThat(input1.length()).isZero();
assertThat(input1.remaining()).isZero();
assertThat(input1.capacity()).isEqualTo(size);
final InputBuffer input2 = new InputBuffer(BufferType.HEAP_BUFFER, size);
Assert.assertEquals(input2.getType(), BufferType.HEAP_BUFFER);
assertThat(input2.getType()).isEqualTo(BufferType.HEAP_BUFFER);
Assert.assertTrue(input2.position() == 0);
Assert.assertTrue(input2.length() == 0);
Assert.assertTrue(input2.remaining() == 0);
Assert.assertTrue(input2.capacity() == size);
assertThat(input2.position()).isZero();
assertThat(input2.length()).isZero();
assertThat(input2.remaining()).isZero();
assertThat(input2.capacity()).isEqualTo(size);
final InputBuffer input3 = new InputBuffer(new byte[size]);
Assert.assertEquals(input3.getType(), BufferType.HEAP_BUFFER);
assertThat(input3.getType()).isEqualTo(BufferType.HEAP_BUFFER);
Assert.assertTrue(input3.position() == 0);
Assert.assertTrue(input3.length() == 0);
Assert.assertTrue(input3.remaining() == 0);
Assert.assertEquals(input3.capacity(), size);
assertThat(input3.position()).isZero();
assertThat(input3.length()).isZero();
assertThat(input3.remaining()).isZero();
assertThat(input3.capacity()).isEqualTo(size);
}
}

View File

@ -18,7 +18,8 @@
package org.apache.hadoop.mapred.nativetask.buffer;
import org.junit.Test;
import org.junit.Assert;
import static org.assertj.core.api.Assertions.assertThat;
public class TestOutputBuffer {
@ -26,21 +27,21 @@ public class TestOutputBuffer {
public void testOutputBuffer() {
final int size = 100;
final OutputBuffer output1 = new OutputBuffer(BufferType.DIRECT_BUFFER, size);
Assert.assertEquals(output1.getType(), BufferType.DIRECT_BUFFER);
assertThat(output1.getType()).isEqualTo(BufferType.DIRECT_BUFFER);
Assert.assertTrue(output1.length() == 0);
Assert.assertEquals(output1.limit(), size);
assertThat(output1.length()).isZero();
assertThat(output1.limit()).isEqualTo(size);
final OutputBuffer output2 = new OutputBuffer(BufferType.HEAP_BUFFER, size);
Assert.assertEquals(output2.getType(), BufferType.HEAP_BUFFER);
assertThat(output2.getType()).isEqualTo(BufferType.HEAP_BUFFER);
Assert.assertTrue(output2.length() == 0);
Assert.assertEquals(output2.limit(), size);
assertThat(output2.length()).isZero();
assertThat(output2.limit()).isEqualTo(size);
final OutputBuffer output3 = new OutputBuffer(new byte[size]);
Assert.assertEquals(output3.getType(), BufferType.HEAP_BUFFER);
assertThat(output3.getType()).isEqualTo(BufferType.HEAP_BUFFER);
Assert.assertTrue(output3.length() == 0);
Assert.assertEquals(output3.limit(), size);
assertThat(output3.length()).isZero();
assertThat(output3.limit()).isEqualTo(size);
}
}

View File

@ -17,15 +17,13 @@
*/
package org.apache.hadoop.mapred.nativetask.combinertest;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.assertj.core.api.Assertions.assertThat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.Task;
import org.apache.hadoop.mapred.nativetask.NativeRuntime;
import org.apache.hadoop.mapred.nativetask.combinertest.WordCount.IntSumReducer;
import org.apache.hadoop.mapred.nativetask.combinertest.WordCount.TokenizerMapper;
@ -33,7 +31,6 @@ import org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile;
import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier;
import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration;
import org.apache.hadoop.mapred.nativetask.testutil.TestConstants;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
@ -62,9 +59,10 @@ public class CombinerTest {
commonConf.addResource(TestConstants.COMBINER_CONF_PATH);
final Job normaljob = getJob("normalwordcount", commonConf, inputpath, hadoopoutputpath);
assertTrue(nativejob.waitForCompletion(true));
assertTrue(normaljob.waitForCompletion(true));
assertEquals(true, ResultVerifier.verify(nativeoutputpath, hadoopoutputpath));
assertThat(nativejob.waitForCompletion(true)).isTrue();
assertThat(normaljob.waitForCompletion(true)).isTrue();
assertThat(ResultVerifier.verify(nativeoutputpath, hadoopoutputpath))
.isTrue();
ResultVerifier.verifyCounters(normaljob, nativejob, true);
}

View File

@ -17,23 +17,19 @@
*/
package org.apache.hadoop.mapred.nativetask.combinertest;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.assertj.core.api.Assertions.assertThat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.Task;
import org.apache.hadoop.mapred.nativetask.NativeRuntime;
import org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile;
import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier;
import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration;
import org.apache.hadoop.mapred.nativetask.testutil.TestConstants;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.TaskCounter;
import org.junit.AfterClass;
import org.apache.hadoop.util.NativeCodeLoader;
import org.junit.Assume;
@ -90,9 +86,9 @@ public class LargeKVCombinerTest {
final Job nativejob = CombinerTest.getJob("nativewordcount", nativeConf,
inputPath, nativeOutputPath);
assertTrue(nativejob.waitForCompletion(true));
assertThat(nativejob.waitForCompletion(true)).isTrue();
assertTrue(normaljob.waitForCompletion(true));
assertThat(normaljob.waitForCompletion(true)).isTrue();
final boolean compareRet = ResultVerifier.verify(nativeOutputPath, hadoopOutputPath);
@ -100,7 +96,7 @@ public class LargeKVCombinerTest {
", max size: " + max + ", normal out: " + hadoopOutputPath +
", native Out: " + nativeOutputPath;
assertEquals(reason, true, compareRet);
assertThat(compareRet).withFailMessage(reason).isTrue();
ResultVerifier.verifyCounters(normaljob, nativejob, true);
}
fs.close();

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.mapred.nativetask.combinertest;
import static org.junit.Assert.assertEquals;
import static org.assertj.core.api.Assertions.assertThat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@ -30,7 +30,6 @@ import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.Task;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapred.nativetask.NativeRuntime;
import org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile;
@ -74,10 +73,14 @@ public class OldAPICombinerTest {
TaskCounter.REDUCE_INPUT_RECORDS);
final boolean compareRet = ResultVerifier.verify(nativeoutput, normaloutput);
assertEquals("file compare result: if they are the same ,then return true", true, compareRet);
assertThat(compareRet)
.withFailMessage(
"file compare result: if they are the same ,then return true")
.isTrue();
assertEquals("The input reduce record count must be same",
nativeReduceGroups.getValue(), normalReduceGroups.getValue());
assertThat(nativeReduceGroups.getValue())
.withFailMessage("The input reduce record count must be same")
.isEqualTo(normalReduceGroups.getValue());
}
@Before

View File

@ -17,8 +17,7 @@
*/
package org.apache.hadoop.mapred.nativetask.compresstest;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.assertj.core.api.Assertions.assertThat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@ -58,17 +57,20 @@ public class CompressTest {
TestConstants.NATIVETASK_COMPRESS_TEST_NATIVE_OUTPUTDIR + "/snappy";
final Job job = CompressMapper.getCompressJob("nativesnappy", nativeConf,
TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR, nativeOutputPath);
assertTrue(job.waitForCompletion(true));
assertThat(job.waitForCompletion(true)).isTrue();
hadoopConf.set(MRJobConfig.MAP_OUTPUT_COMPRESS_CODEC, snappyCodec);
final String hadoopOutputPath =
TestConstants.NATIVETASK_COMPRESS_TEST_NORMAL_OUTPUTDIR + "/snappy";
final Job hadoopjob = CompressMapper.getCompressJob("hadoopsnappy", hadoopConf,
TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR, hadoopOutputPath);
assertTrue(hadoopjob.waitForCompletion(true));
assertThat(hadoopjob.waitForCompletion(true)).isTrue();
final boolean compareRet = ResultVerifier.verify(nativeOutputPath, hadoopOutputPath);
assertEquals("file compare result: if they are the same ,then return true", true, compareRet);
assertThat(compareRet)
.withFailMessage(
"file compare result: if they are the same ,then return true")
.isTrue();
ResultVerifier.verifyCounters(hadoopjob, job);
}
@ -81,17 +83,20 @@ public class CompressTest {
TestConstants.NATIVETASK_COMPRESS_TEST_NATIVE_OUTPUTDIR + "/gzip";
final Job job = CompressMapper.getCompressJob("nativegzip", nativeConf,
TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR, nativeOutputPath);
assertTrue(job.waitForCompletion(true));
assertThat(job.waitForCompletion(true)).isTrue();
hadoopConf.set(MRJobConfig.MAP_OUTPUT_COMPRESS_CODEC, gzipCodec);
final String hadoopOutputPath =
TestConstants.NATIVETASK_COMPRESS_TEST_NORMAL_OUTPUTDIR + "/gzip";
final Job hadoopjob = CompressMapper.getCompressJob("hadoopgzip", hadoopConf,
TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR, hadoopOutputPath);
assertTrue(hadoopjob.waitForCompletion(true));
assertThat(hadoopjob.waitForCompletion(true)).isTrue();
final boolean compareRet = ResultVerifier.verify(nativeOutputPath, hadoopOutputPath);
assertEquals("file compare result: if they are the same ,then return true", true, compareRet);
assertThat(compareRet)
.withFailMessage(
"file compare result: if they are the same ,then return true")
.isTrue();
ResultVerifier.verifyCounters(hadoopjob, job);
}
@ -104,16 +109,19 @@ public class CompressTest {
TestConstants.NATIVETASK_COMPRESS_TEST_NATIVE_OUTPUTDIR + "/lz4";
final Job nativeJob = CompressMapper.getCompressJob("nativelz4", nativeConf,
TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR, nativeOutputPath);
assertTrue(nativeJob.waitForCompletion(true));
assertThat(nativeJob.waitForCompletion(true)).isTrue();
hadoopConf.set(MRJobConfig.MAP_OUTPUT_COMPRESS_CODEC, lz4Codec);
final String hadoopOutputPath =
TestConstants.NATIVETASK_COMPRESS_TEST_NORMAL_OUTPUTDIR + "/lz4";
final Job hadoopJob = CompressMapper.getCompressJob("hadooplz4", hadoopConf,
TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR, hadoopOutputPath);
assertTrue(hadoopJob.waitForCompletion(true));
assertThat(hadoopJob.waitForCompletion(true)).isTrue();
final boolean compareRet = ResultVerifier.verify(nativeOutputPath, hadoopOutputPath);
assertEquals("file compare result: if they are the same ,then return true", true, compareRet);
assertThat(compareRet)
.withFailMessage(
"file compare result: if they are the same ,then return true")
.isTrue();
ResultVerifier.verifyCounters(hadoopJob, nativeJob);
}

View File

@ -24,11 +24,11 @@ import org.apache.hadoop.mapred.nativetask.Command;
import org.apache.hadoop.mapred.nativetask.INativeHandler;
import org.apache.hadoop.mapred.nativetask.buffer.BufferType;
import org.apache.hadoop.mapred.nativetask.buffer.InputBuffer;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.eq;
@SuppressWarnings({ "rawtypes", "unchecked", "deprecation" })
@ -63,7 +63,7 @@ public class TestCombineHandler {
@Test
public void testCombine() throws IOException, InterruptedException, ClassNotFoundException {
this.handler = new CombinerHandler(nativeHandler, combinerRunner, puller, pusher);
Assert.assertEquals(null, handler.onCall(CombinerHandler.COMBINE, null));
assertThat(handler.onCall(CombinerHandler.COMBINE, null)).isNull();
handler.close();
handler.close();
@ -78,6 +78,6 @@ public class TestCombineHandler {
@Test
public void testOnCall() throws IOException {
this.handler = new CombinerHandler(nativeHandler, combinerRunner, puller, pusher);
Assert.assertEquals(null, handler.onCall(new Command(-1), null));
assertThat(handler.onCall(new Command(-1), null)).isNull();
}
}

View File

@ -17,8 +17,7 @@
*/
package org.apache.hadoop.mapred.nativetask.kvtest;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.IOException;
import java.util.List;
@ -127,7 +126,9 @@ public class KVTest {
nativekvtestconf.set(TestConstants.NATIVETASK_KVTEST_CREATEFILE, "true");
final KVJob nativeJob = new KVJob(jobName, nativekvtestconf, keyclass,
valueclass, inputPath, nativeOutputPath);
assertTrue("job should complete successfully", nativeJob.runJob());
assertThat(nativeJob.runJob())
.withFailMessage("job should complete successfully")
.isTrue();
final String normalOutputPath = TestConstants.NATIVETASK_KVTEST_NORMAL_OUTPUTDIR
+ "/" + keyclass.getName() + "/" + valueclass.getName();
@ -136,11 +137,13 @@ public class KVTest {
hadoopkvtestconf.set(TestConstants.NATIVETASK_KVTEST_CREATEFILE, "false");
final KVJob normalJob = new KVJob(jobName, hadoopkvtestconf, keyclass,
valueclass, inputPath, normalOutputPath);
assertTrue("job should complete successfully", normalJob.runJob());
assertThat(normalJob.runJob())
.withFailMessage("job should complete successfully")
.isTrue();
final boolean compareRet = ResultVerifier.verify(normalOutputPath,
nativeOutputPath);
assertEquals("job output not the same", true, compareRet);
assertThat(compareRet).withFailMessage("job output not the same").isTrue();
ResultVerifier.verifyCounters(normalJob.job, nativeJob.job);
fs.close();
}

View File

@ -17,8 +17,7 @@
*/
package org.apache.hadoop.mapred.nativetask.nonsorttest;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.IOException;
@ -54,19 +53,22 @@ public class NonSortTest {
final Job nativeNonSort = getJob(nativeConf, "NativeNonSort",
TestConstants.NATIVETASK_NONSORT_TEST_INPUTDIR,
TestConstants.NATIVETASK_NONSORT_TEST_NATIVE_OUTPUT);
assertTrue(nativeNonSort.waitForCompletion(true));
assertThat(nativeNonSort.waitForCompletion(true)).isTrue();
Configuration normalConf = ScenarioConfiguration.getNormalConfiguration();
normalConf.addResource(TestConstants.NONSORT_TEST_CONF);
final Job hadoopWithSort = getJob(normalConf, "NormalJob",
TestConstants.NATIVETASK_NONSORT_TEST_INPUTDIR,
TestConstants.NATIVETASK_NONSORT_TEST_NORMAL_OUTPUT);
assertTrue(hadoopWithSort.waitForCompletion(true));
assertThat(hadoopWithSort.waitForCompletion(true)).isTrue();
final boolean compareRet = ResultVerifier.verify(
TestConstants.NATIVETASK_NONSORT_TEST_NATIVE_OUTPUT,
TestConstants.NATIVETASK_NONSORT_TEST_NORMAL_OUTPUT);
assertEquals("file compare result: if they are the same ,then return true", true, compareRet);
assertThat(compareRet)
.withFailMessage(
"file compare result: if they are the same ,then return true")
.isTrue();
ResultVerifier.verifyCounters(hadoopWithSort, nativeNonSort);
}

View File

@ -18,10 +18,11 @@
package org.apache.hadoop.mapred.nativetask.utils;
import org.junit.Test;
import org.junit.Assert;
import org.apache.hadoop.mapred.nativetask.util.ReadWriteBuffer;
import static org.assertj.core.api.Assertions.assertThat;
public class TestReadWriteBuffer {
private static byte[] bytes = new byte[] { '0', 'a', 'b', 'c', 'd', '9' };
@ -31,10 +32,10 @@ public class TestReadWriteBuffer {
final ReadWriteBuffer buffer = new ReadWriteBuffer();
Assert.assertFalse(buffer.getBuff() == null);
assertThat(buffer.getBuff()).isNotNull();
Assert.assertEquals(buffer.getWritePoint(), 0);
Assert.assertEquals(buffer.getReadPoint(), 0);
assertThat(buffer.getWritePoint()).isZero();
assertThat(buffer.getReadPoint()).isZero();
buffer.writeInt(3);
@ -44,20 +45,20 @@ public class TestReadWriteBuffer {
buffer.writeBytes(bytes, 0, bytes.length);
buffer.writeLong(100L);
Assert.assertEquals(buffer.getWritePoint(), 41);
Assert.assertEquals(buffer.getReadPoint(), 0);
Assert.assertTrue(buffer.getBuff().length >= 41);
assertThat(buffer.getWritePoint()).isEqualTo(41);
assertThat(buffer.getReadPoint()).isZero();
assertThat(buffer.getBuff().length).isEqualTo(41);
Assert.assertEquals(buffer.readInt(), 3);
Assert.assertEquals(buffer.readString(), "goodboy");
Assert.assertEquals(buffer.readLong(), 10L);
assertThat(buffer.readInt()).isEqualTo(3);
assertThat(buffer.readString()).isEqualTo("goodboy");
assertThat(buffer.readLong()).isEqualTo(10L);
final byte[] read = buffer.readBytes();
for (int i = 0; i < bytes.length; i++) {
Assert.assertEquals(bytes[i], read[i]);
assertThat(read[i]).isEqualTo(bytes[i]);
}
Assert.assertEquals(100L, buffer.readLong());
Assert.assertEquals(41, buffer.getReadPoint());
assertThat(buffer.readLong()).isEqualTo(100L);
assertThat(buffer.getReadPoint()).isEqualTo(41);
}
}

View File

@ -48,6 +48,11 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-client</artifactId>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<properties>
<!-- Needed for generating FindBugs warnings using parent pom -->

View File

@ -53,6 +53,7 @@ import java.util.Random;
import java.util.Set;
import java.util.zip.GZIPInputStream;
import static org.assertj.core.api.Assertions.assertThat;
import static org.apache.hadoop.fs.FileSystem.FS_DEFAULT_NAME_KEY;
/**
@ -82,12 +83,15 @@ public class TestFrameworkUploader {
FrameworkUploader uploader = new FrameworkUploader();
boolean success = uploader.parseArguments(args);
Assert.assertFalse("Expected to print help", success);
Assert.assertEquals("Expected ignore run", null,
uploader.input);
Assert.assertEquals("Expected ignore run", null,
uploader.whitelist);
Assert.assertEquals("Expected ignore run", null,
uploader.target);
assertThat(uploader.input)
.withFailMessage("Expected ignore run")
.isNull();
assertThat(uploader.whitelist)
.withFailMessage("Expected ignore run")
.isNull();
assertThat(uploader.target)
.withFailMessage("Expected ignore run")
.isNull();
}
/**

View File

@ -116,6 +116,11 @@
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@ -30,6 +30,7 @@ import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
@ -115,7 +116,7 @@ public class TestTeraSort extends HadoopTestCase {
@Test
public void testTeraSortWithLessThanTwoArgs() throws Exception {
String[] args = new String[1];
assertEquals(new TeraSort().run(args), 2);
assertThat(new TeraSort().run(args)).isEqualTo(2);
}
}