HDFS-15469. Dynamically configure the size of PacketReceiver#MAX_PACKET_SIZE. (#2138)

This commit is contained in:
jianghuazhu 2020-11-11 08:34:17 +08:00 committed by GitHub
parent 375900049c
commit e6d2dccbef
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 33 additions and 2 deletions

View File

@ -221,6 +221,12 @@ public interface HdfsClientConfigKeys {
"dfs.encrypt.data.transfer.cipher.key.bitlength"; "dfs.encrypt.data.transfer.cipher.key.bitlength";
int DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_DEFAULT = 128; int DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_DEFAULT = 128;
public static final String
DFS_DATA_TRANSFER_MAX_PACKET_SIZE =
"dfs.data.transfer.max.packet.size";
public static final int DFS_DATA_TRANSFER_MAX_PACKET_SIZE_DEFAULT =
16 * 1024 * 1024;
String DFS_TRUSTEDCHANNEL_RESOLVER_CLASS = String DFS_TRUSTEDCHANNEL_RESOLVER_CLASS =
"dfs.trustedchannel.resolver.class"; "dfs.trustedchannel.resolver.class";

View File

@ -25,6 +25,9 @@
import java.nio.channels.ReadableByteChannel; import java.nio.channels.ReadableByteChannel;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.util.DirectBufferPool; import org.apache.hadoop.util.DirectBufferPool;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
@ -45,7 +48,7 @@ public class PacketReceiver implements Closeable {
* The max size of any single packet. This prevents OOMEs when * The max size of any single packet. This prevents OOMEs when
* invalid data is sent. * invalid data is sent.
*/ */
public static final int MAX_PACKET_SIZE = 16 * 1024 * 1024; public static final int MAX_PACKET_SIZE;
static final Logger LOG = LoggerFactory.getLogger(PacketReceiver.class); static final Logger LOG = LoggerFactory.getLogger(PacketReceiver.class);
@ -74,6 +77,13 @@ public class PacketReceiver implements Closeable {
*/ */
private PacketHeader curHeader; private PacketHeader curHeader;
static {
Configuration conf = new HdfsConfiguration();
MAX_PACKET_SIZE = conf.getInt(HdfsClientConfigKeys.
DFS_DATA_TRANSFER_MAX_PACKET_SIZE,
HdfsClientConfigKeys.DFS_DATA_TRANSFER_MAX_PACKET_SIZE_DEFAULT);
}
public PacketReceiver(boolean useDirectBuffers) { public PacketReceiver(boolean useDirectBuffers) {
this.useDirectBuffers = useDirectBuffers; this.useDirectBuffers = useDirectBuffers;
reallocPacketBuf(PacketHeader.PKT_LENGTHS_LEN); reallocPacketBuf(PacketHeader.PKT_LENGTHS_LEN);

View File

@ -4458,6 +4458,14 @@
</description> </description>
</property> </property>
<property>
<name>dfs.data.transfer.max.packet.size</name>
<value>16777216</value>
<description>
The max size of any single packet.
</description>
</property>
<property> <property>
<name>dfs.datanode.balance.max.concurrent.moves</name> <name>dfs.datanode.balance.max.concurrent.moves</name>
<value>100</value> <value>100</value>

View File

@ -24,6 +24,7 @@
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.apache.hadoop.hdfs.AppendTestUtil; import org.apache.hadoop.hdfs.AppendTestUtil;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.junit.Test; import org.junit.Test;
import org.mockito.Mockito; import org.mockito.Mockito;
@ -57,6 +58,12 @@ private static byte[] remainingAsArray(ByteBuffer buf) {
return b; return b;
} }
@Test
public void testPacketSize() {
assertEquals(PacketReceiver.MAX_PACKET_SIZE,
HdfsClientConfigKeys.DFS_DATA_TRANSFER_MAX_PACKET_SIZE_DEFAULT);
}
@Test @Test
public void testReceiveAndMirror() throws IOException { public void testReceiveAndMirror() throws IOException {
PacketReceiver pr = new PacketReceiver(false); PacketReceiver pr = new PacketReceiver(false);