HDFS-13806. EC: No error message for unsetting EC policy of the directory inherits the erasure coding policy from an ancestor directory. Contributed by Ayush Saxena.

(cherry picked from commit 30eceec3420fc6be00d3878ba787bd9518d3ca0e)
This commit is contained in:
Vinayakumar B 2018-09-10 09:10:51 +05:30 committed by Wei-Chiu Chuang
parent 166d38ceaa
commit 7ab02a67bc
6 changed files with 92 additions and 6 deletions

View File

@ -133,6 +133,7 @@
import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException; import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException;
import org.apache.hadoop.hdfs.protocol.NoECPolicySetException;
import org.apache.hadoop.hdfs.protocol.OpenFileEntry; import org.apache.hadoop.hdfs.protocol.OpenFileEntry;
import org.apache.hadoop.hdfs.protocol.OpenFilesIterator; import org.apache.hadoop.hdfs.protocol.OpenFilesIterator;
import org.apache.hadoop.hdfs.protocol.OpenFilesIterator.OpenFilesType; import org.apache.hadoop.hdfs.protocol.OpenFilesIterator.OpenFilesType;
@ -2757,7 +2758,7 @@ public void unsetErasureCodingPolicy(String src) throws IOException {
throw re.unwrapRemoteException(AccessControlException.class, throw re.unwrapRemoteException(AccessControlException.class,
SafeModeException.class, SafeModeException.class,
UnresolvedPathException.class, UnresolvedPathException.class,
FileNotFoundException.class); FileNotFoundException.class, NoECPolicySetException.class);
} }
} }

View File

@ -0,0 +1,37 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.protocol;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
*Thrown when no EC policy is set explicitly on the directory.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class NoECPolicySetException extends IOException {
private static final long serialVersionUID = 1L;
public NoECPolicySetException(String msg) {
super(msg);
}
}

View File

@ -28,6 +28,7 @@
import org.apache.hadoop.hdfs.XAttrHelper; import org.apache.hadoop.hdfs.XAttrHelper;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.protocol.NoECPolicySetException;
import org.apache.hadoop.hdfs.server.namenode.FSDirectory.DirOp; import org.apache.hadoop.hdfs.server.namenode.FSDirectory.DirOp;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.WritableUtils;
@ -206,6 +207,9 @@ static FileStatus unsetErasureCodingPolicy(final FSNamesystem fsn,
} }
if (xAttrs != null) { if (xAttrs != null) {
fsn.getEditLog().logRemoveXAttrs(src, xAttrs, logRetryCache); fsn.getEditLog().logRemoveXAttrs(src, xAttrs, logRetryCache);
} else {
throw new NoECPolicySetException(
"No erasure coding policy explicitly set on " + src);
} }
return fsd.getAuditFileInfo(iip); return fsd.getAuditFileInfo(iip);
} }

View File

@ -26,6 +26,7 @@
import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.protocol.NoECPolicySetException;
import org.apache.hadoop.hdfs.util.ECPolicyLoader; import org.apache.hadoop.hdfs.util.ECPolicyLoader;
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants; import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
import org.apache.hadoop.tools.TableListing; import org.apache.hadoop.tools.TableListing;
@ -424,6 +425,12 @@ public int run(Configuration conf, List<String> args) throws IOException {
"non-empty directory will not automatically convert existing" + "non-empty directory will not automatically convert existing" +
" files to replicated data."); " files to replicated data.");
} }
} catch (NoECPolicySetException e) {
System.err.println(AdminHelper.prettifyException(e));
System.err.println("Use '-setPolicy -path <PATH> -replicate' to enforce"
+ " default replication policy irrespective of EC policy"
+ " defined on parent.");
return 2;
} catch (Exception e) { } catch (Exception e) {
System.err.println(AdminHelper.prettifyException(e)); System.err.println(AdminHelper.prettifyException(e));
return 2; return 2;

View File

@ -23,6 +23,7 @@
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies; import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.NoECPolicySetException;
import org.apache.hadoop.io.erasurecode.CodecUtil; import org.apache.hadoop.io.erasurecode.CodecUtil;
import org.apache.hadoop.io.erasurecode.ErasureCodeNative; import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory; import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory;
@ -98,7 +99,11 @@ public void testUnsetEcPolicy() throws Exception {
fs.mkdirs(dirPath); fs.mkdirs(dirPath);
// Test unset a directory which has no EC policy // Test unset a directory which has no EC policy
fs.unsetErasureCodingPolicy(dirPath); try {
fs.unsetErasureCodingPolicy(dirPath);
fail();
} catch (NoECPolicySetException e) {
}
// Set EC policy on directory // Set EC policy on directory
fs.setErasureCodingPolicy(dirPath, ecPolicy.getName()); fs.setErasureCodingPolicy(dirPath, ecPolicy.getName());
@ -126,8 +131,8 @@ public void testUnsetEcPolicy() throws Exception {
} }
/* /*
* Test nested directory with different EC policy. * Test nested directory with different EC policy.
*/ */
@Test @Test
public void testNestedEcPolicy() throws Exception { public void testNestedEcPolicy() throws Exception {
final int numBlocks = 1; final int numBlocks = 1;
@ -199,7 +204,11 @@ public void testUnsetRootDirEcPolicy() throws Exception {
final Path replicateFilePath = new Path(rootPath, "rep_file"); final Path replicateFilePath = new Path(rootPath, "rep_file");
// Test unset root path which has no EC policy // Test unset root path which has no EC policy
fs.unsetErasureCodingPolicy(rootPath); try {
fs.unsetErasureCodingPolicy(rootPath);
fail();
} catch (NoECPolicySetException e) {
}
// Set EC policy on root path // Set EC policy on root path
fs.setErasureCodingPolicy(rootPath, ecPolicy.getName()); fs.setErasureCodingPolicy(rootPath, ecPolicy.getName());
DFSTestUtil.createFile(fs, ecFilePath, fileLen, (short) 1, 0L); DFSTestUtil.createFile(fs, ecFilePath, fileLen, (short) 1, 0L);
@ -238,7 +247,11 @@ public void testChangeRootDirEcPolicy() throws Exception {
final ErasureCodingPolicy ec32Policy = SystemErasureCodingPolicies final ErasureCodingPolicy ec32Policy = SystemErasureCodingPolicies
.getByID(SystemErasureCodingPolicies.RS_3_2_POLICY_ID); .getByID(SystemErasureCodingPolicies.RS_3_2_POLICY_ID);
fs.unsetErasureCodingPolicy(rootPath); try {
fs.unsetErasureCodingPolicy(rootPath);
fail();
} catch (NoECPolicySetException e) {
}
fs.setErasureCodingPolicy(rootPath, ecPolicy.getName()); fs.setErasureCodingPolicy(rootPath, ecPolicy.getName());
// Create RS(6,3) EC policy file // Create RS(6,3) EC policy file
DFSTestUtil.createFile(fs, ec63FilePath, fileLen, (short) 1, 0L); DFSTestUtil.createFile(fs, ec63FilePath, fileLen, (short) 1, 0L);

View File

@ -359,6 +359,30 @@
</comparators> </comparators>
</test> </test>
<test>
<description>unsetPolicy : unset on non EC directory</description>
<test-commands>
<command>-fs NAMENODE -mkdir /ecdir</command>
<command>-fs NAMENODE -mkdir /ecdir/child</command>
<ec-admin-command>-fs NAMENODE -unsetPolicy -path /ecdir/child</ec-admin-command>
</test-commands>
<cleanup-commands>
<command>-fs NAMENODE -rm /ecdir/child/ecfile</command>
<command>-fs NAMENODE -rmdir /ecdir/child</command>
<command>-fs NAMENODE -rmdir /ecdir</command>
</cleanup-commands>
<comparators>
<comparator>
<type>SubstringComparator</type>
<expected-output>NoECPolicySetException: No erasure coding policy explicitly set on /ecdir/child</expected-output>
</comparator>
<comparator>
<type>SubstringComparator</type>
<expected-output>Use '-setPolicy -path &lt;PATH&gt; -replicate' to enforce default replication policy irrespective of EC policy defined on parent.</expected-output>
</comparator>
</comparators>
</test>
<test> <test>
<description>unsetPolicy : unset policy on non-empty directory</description> <description>unsetPolicy : unset policy on non-empty directory</description>
<test-commands> <test-commands>