HDFS-16430. Add validation to maximum blocks in EC group when adding an EC policy (#3899). Contributed by daimin.

Reviewed-by: tomscut <litao@bigo.sg>
Signed-off-by: Ayush Saxena <ayushsaxena@apache.org>
(cherry picked from commit 5ef335da1e)

 Conflicts:
	hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ErasureCodingPolicyManager.java
This commit is contained in:
daimin 2022-01-24 14:34:26 +08:00 committed by Wei-Chiu Chuang
parent 4fd0389153
commit 728ed10a7c
No known key found for this signature in database
GPG Key ID: B362E1C021854B9D
2 changed files with 16 additions and 0 deletions

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.namenode;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -304,6 +305,12 @@ public final class ErasureCodingPolicyManager {
+ policy.getCodecName() + " is not supported"); + policy.getCodecName() + " is not supported");
} }
int blocksInGroup = policy.getNumDataUnits() + policy.getNumParityUnits();
if (blocksInGroup > HdfsServerConstants.MAX_BLOCKS_IN_GROUP) {
throw new HadoopIllegalArgumentException("Number of data and parity blocks in an EC group " +
blocksInGroup + " should not exceed maximum " + HdfsServerConstants.MAX_BLOCKS_IN_GROUP);
}
if (policy.getCellSize() > maxCellSize) { if (policy.getCellSize() > maxCellSize) {
throw new HadoopIllegalArgumentException("Cell size " + throw new HadoopIllegalArgumentException("Cell size " +
policy.getCellSize() + " should not exceed maximum " + policy.getCellSize() + " should not exceed maximum " +

View File

@ -747,6 +747,15 @@ public class TestErasureCodingPolicies {
assertEquals(1, responses.length); assertEquals(1, responses.length);
assertFalse(responses[0].isSucceed()); assertFalse(responses[0].isSucceed());
// Test numDataUnits + numParityUnits > 16
toAddSchema = new ECSchema("rs", 14, 4);
newPolicy =
new ErasureCodingPolicy(toAddSchema, 128 * 1024 * 1024);
policyArray = new ErasureCodingPolicy[]{newPolicy};
responses = fs.addErasureCodingPolicies(policyArray);
assertEquals(1, responses.length);
assertFalse(responses[0].isSucceed());
// Test too big cell size // Test too big cell size
toAddSchema = new ECSchema("rs", 3, 2); toAddSchema = new ECSchema("rs", 3, 2);
newPolicy = newPolicy =