HDFS-14708. TestLargeBlockReport#testBlockReportSucceedsWithLargerLengthLimit fails in trunk. Contributed by Lisheng Sun.
This commit is contained in:
parent
c92b49876a
commit
454420e4f2
|
@ -18,6 +18,7 @@
|
||||||
package org.apache.hadoop.hdfs.server.datanode;
|
package org.apache.hadoop.hdfs.server.datanode;
|
||||||
|
|
||||||
import static org.apache.hadoop.fs.CommonConfigurationKeys.IPC_MAXIMUM_DATA_LENGTH;
|
import static org.apache.hadoop.fs.CommonConfigurationKeys.IPC_MAXIMUM_DATA_LENGTH;
|
||||||
|
import static org.apache.hadoop.fs.CommonConfigurationKeys.IPC_MAXIMUM_DATA_LENGTH_DEFAULT;
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -91,7 +92,7 @@ public class TestLargeBlockReport {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testBlockReportSucceedsWithLargerLengthLimit() throws Exception {
|
public void testBlockReportSucceedsWithLargerLengthLimit() throws Exception {
|
||||||
conf.setInt(IPC_MAXIMUM_DATA_LENGTH, 128 * 1024 * 1024); // 128 MB
|
conf.setInt(IPC_MAXIMUM_DATA_LENGTH, IPC_MAXIMUM_DATA_LENGTH_DEFAULT * 2);
|
||||||
initCluster();
|
initCluster();
|
||||||
StorageBlockReport[] reports = createReports(6000000);
|
StorageBlockReport[] reports = createReports(6000000);
|
||||||
nnProxy.blockReport(bpRegistration, bpId, reports,
|
nnProxy.blockReport(bpRegistration, bpId, reports,
|
||||||
|
|
Loading…
Reference in New Issue