diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java index 7201e114b38..1e93a2df704 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import static org.junit.Assume.assumeTrue; import java.io.File; import java.io.IOException; @@ -67,14 +68,13 @@ import org.junit.Test; public class TestReconstructStripedFile { public static final Log LOG = LogFactory.getLog(TestReconstructStripedFile.class); - private final ErasureCodingPolicy ecPolicy = - StripedFileTestUtil.getDefaultECPolicy(); - private final int dataBlkNum = ecPolicy.getNumDataUnits(); - private final int parityBlkNum = ecPolicy.getNumParityUnits(); - private final int cellSize = ecPolicy.getCellSize(); - private final int blockSize = cellSize * 3; - private final int groupSize = dataBlkNum + parityBlkNum; - private final int dnNum = groupSize + parityBlkNum; + private ErasureCodingPolicy ecPolicy; + private int dataBlkNum; + private int parityBlkNum; + private int cellSize; + private int blockSize; + private int groupSize; + private int dnNum; static { GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL); @@ -95,8 +95,20 @@ public class TestReconstructStripedFile { private Map dnMap = new HashMap<>(); private final Random random = new Random(); + public ErasureCodingPolicy getEcPolicy() { + return StripedFileTestUtil.getDefaultECPolicy(); + } + @Before public void setup() throws IOException { + ecPolicy = getEcPolicy(); + dataBlkNum = ecPolicy.getNumDataUnits(); + parityBlkNum = ecPolicy.getNumParityUnits(); + cellSize = ecPolicy.getCellSize(); + blockSize = cellSize * 3; + groupSize = dataBlkNum + parityBlkNum; + dnNum = groupSize + parityBlkNum; + conf = new Configuration(); conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize); conf.setInt( @@ -114,10 +126,8 @@ public class TestReconstructStripedFile { cluster.waitActive(); fs = cluster.getFileSystem(); - fs.enableErasureCodingPolicy( - StripedFileTestUtil.getDefaultECPolicy().getName()); - fs.getClient().setErasureCodingPolicy("/", - StripedFileTestUtil.getDefaultECPolicy().getName()); + fs.enableErasureCodingPolicy(ecPolicy.getName()); + fs.getClient().setErasureCodingPolicy("/", ecPolicy.getName()); List datanodes = cluster.getDataNodes(); for (int i = 0; i < dnNum; i++) { @@ -432,7 +442,7 @@ public class TestReconstructStripedFile { BlockECReconstructionInfo invalidECInfo = new BlockECReconstructionInfo( new ExtendedBlock("bp-id", 123456), dataDNs, dnStorageInfo, liveIndices, - StripedFileTestUtil.getDefaultECPolicy()); + ecPolicy); List ecTasks = new ArrayList<>(); ecTasks.add(invalidECInfo); dataNode.getErasureCodingWorker().processErasureCodingTasks(ecTasks); @@ -461,7 +471,8 @@ public class TestReconstructStripedFile { .numDataNodes(numDataNodes).build(); cluster.waitActive(); fs = cluster.getFileSystem(); - ErasureCodingPolicy policy = StripedFileTestUtil.getDefaultECPolicy(); + ErasureCodingPolicy policy = ecPolicy; + fs.enableErasureCodingPolicy(policy.getName()); fs.getClient().setErasureCodingPolicy("/", policy.getName()); final int fileLen = cellSize * ecPolicy.getNumDataUnits(); @@ -470,7 +481,8 @@ public class TestReconstructStripedFile { } // Inject data-loss by tear down desired number of DataNodes. - assertTrue(policy.getNumParityUnits() >= deadDN); + assumeTrue("Ignore case where num dead DNs > num parity units", + policy.getNumParityUnits() >= deadDN); List dataNodes = new ArrayList<>(cluster.getDataNodes()); Collections.shuffle(dataNodes); for (DataNode dn : dataNodes.subList(0, deadDN)) { @@ -516,10 +528,8 @@ public class TestReconstructStripedFile { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(dnNum).build(); cluster.waitActive(); fs = cluster.getFileSystem(); - fs.enableErasureCodingPolicy( - StripedFileTestUtil.getDefaultECPolicy().getName()); - fs.getClient().setErasureCodingPolicy("/", - StripedFileTestUtil.getDefaultECPolicy().getName()); + fs.enableErasureCodingPolicy(ecPolicy.getName()); + fs.getClient().setErasureCodingPolicy("/", ecPolicy.getName()); final int fileLen = cellSize * ecPolicy.getNumDataUnits() * 2; writeFile(fs, "/ec-xmits-weight", fileLen); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFileWithRandomECPolicy.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFileWithRandomECPolicy.java new file mode 100644 index 00000000000..45cde4b6664 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFileWithRandomECPolicy.java @@ -0,0 +1,49 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs; + +import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This test extends TestReconstructStripedFile to use a random + * (non-default) EC policy. + */ +public class TestReconstructStripedFileWithRandomECPolicy extends + TestReconstructStripedFile { + private static final Logger LOG = LoggerFactory.getLogger( + TestReconstructStripedFileWithRandomECPolicy.class); + + private ErasureCodingPolicy ecPolicy; + + public TestReconstructStripedFileWithRandomECPolicy() { + // If you want to debug this test with a specific ec policy, please use + // SystemErasureCodingPolicies class. + // e.g. ecPolicy = SystemErasureCodingPolicies.getByID(RS_3_2_POLICY_ID); + ecPolicy = StripedFileTestUtil.getRandomNonDefaultECPolicy(); + LOG.info("run {} with {}.", + TestReconstructStripedFileWithRandomECPolicy.class + .getSuperclass().getSimpleName(), ecPolicy.getName()); + } + + @Override + public ErasureCodingPolicy getEcPolicy() { + return ecPolicy; + } +}