HDFS-11794. Add ec sub command -listCodec to show currently supported ec codecs. Contributed by SammiChen.

This commit is contained in:
Rakesh Radhakrishnan 2017-05-23 17:03:28 +05:30 committed by Xiaoyu Yao
parent 641d6c0822
commit 0e93e5c482
16 changed files with 264 additions and 4 deletions

View File

@ -55,9 +55,14 @@ public final class CodecRegistry {
private Map<String, String[]> coderNameMap;
// Protobuffer 2.5.0 doesn't support map<String, String[]> type well, so use
// the compact value instead
private HashMap<String, String> coderNameCompactMap;
private CodecRegistry() {
coderMap = new HashMap<>();
coderNameMap = new HashMap<>();
coderNameCompactMap = new HashMap<>();
final ServiceLoader<RawErasureCoderFactory> coderFactories =
ServiceLoader.load(RawErasureCoderFactory.class);
updateCoders(coderFactories);
@ -113,6 +118,9 @@ public final class CodecRegistry {
coderNameMap.put(codecName, coders.stream().
map(RawErasureCoderFactory::getCoderName).
collect(Collectors.toList()).toArray(new String[0]));
coderNameCompactMap.put(codecName, coders.stream().
map(RawErasureCoderFactory::getCoderName)
.collect(Collectors.joining(", ")));
}
}
@ -173,4 +181,13 @@ public final class CodecRegistry {
throw new IllegalArgumentException("No implementation for coder "
+ coderName + " of codec " + codecName);
}
/**
* Get all codec names and their corresponding coder list.
* @return a map of all codec names, and their corresponding code list
* separated by ','.
*/
public HashMap<String, String> getCodec2CoderCompactMap() {
return coderNameCompactMap;
}
}

View File

@ -2763,6 +2763,13 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
}
}
public HashMap<String, String> getErasureCodingCodecs() throws IOException {
checkOpen();
try (TraceScope ignored = tracer.newScope("getErasureCodingCodecs")) {
return namenode.getErasureCodingCodecs();
}
}
public AddingECPolicyResponse[] addErasureCodingPolicies(
ErasureCodingPolicy[] policies) throws IOException {
checkOpen();

View File

@ -26,6 +26,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -2535,6 +2536,18 @@ public class DistributedFileSystem extends FileSystem {
return Arrays.asList(dfs.getErasureCodingPolicies());
}
/**
* Retrieve all the erasure coding codecs and coders supported by this file
* system.
*
* @return all erasure coding codecs and coders supported by this file system.
* @throws IOException
*/
public HashMap<String, String> getAllErasureCodingCodecs()
throws IOException {
return dfs.getErasureCodingCodecs();
}
/**
* Add Erasure coding policies to HDFS.
*

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.protocol;
import java.io.IOException;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
@ -1536,7 +1537,7 @@ public interface ClientProtocol {
ErasureCodingPolicy[] policies) throws IOException;
/**
* Get the erasure coding policies loaded in Namenode
* Get the erasure coding policies loaded in Namenode.
*
* @throws IOException
*/
@ -1544,7 +1545,15 @@ public interface ClientProtocol {
ErasureCodingPolicy[] getErasureCodingPolicies() throws IOException;
/**
* Get the information about the EC policy for the path
* Get the erasure coding codecs loaded in Namenode.
*
* @throws IOException
*/
@Idempotent
HashMap<String, String> getErasureCodingCodecs() throws IOException;
/**
* Get the information about the EC policy for the path.
*
* @param src path to get the info for
* @throws IOException

View File

@ -21,6 +21,7 @@ import java.io.Closeable;
import java.io.IOException;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import com.google.common.collect.Lists;
@ -176,8 +177,11 @@ import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodin
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPoliciesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingPolicyResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ErasureCodingPolicyProto;
import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.GetXAttrsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos.ListXAttrsRequestProto;
@ -246,6 +250,10 @@ public class ClientNamenodeProtocolTranslatorPB implements
VOID_GET_EC_POLICIES_REQUEST = GetErasureCodingPoliciesRequestProto
.newBuilder().build();
private final static GetErasureCodingCodecsRequestProto
VOID_GET_EC_CODEC_REQUEST = GetErasureCodingCodecsRequestProto
.newBuilder().build();
public ClientNamenodeProtocolTranslatorPB(ClientNamenodeProtocolPB proxy) {
rpcProxy = proxy;
}
@ -1668,6 +1676,21 @@ public class ClientNamenodeProtocolTranslatorPB implements
}
}
@Override
public HashMap<String, String> getErasureCodingCodecs() throws IOException {
try {
GetErasureCodingCodecsResponseProto response = rpcProxy
.getErasureCodingCodecs(null, VOID_GET_EC_CODEC_REQUEST);
HashMap<String, String> ecCodecs = new HashMap<String, String>();
for (CodecProto codec : response.getCodecList()) {
ecCodecs.put(codec.getCodec(), codec.getCoders());
}
return ecCodecs;
} catch (ServiceException e) {
throw ProtobufHelper.getRemoteException(e);
}
}
@Override
public ErasureCodingPolicy getErasureCodingPolicy(String src)
throws IOException {

View File

@ -172,6 +172,7 @@ import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage.State;
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorageReport;
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.CodecProto;
import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.ShmId;
import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.SlotId;
import org.apache.hadoop.io.EnumSetWritable;
@ -2716,6 +2717,13 @@ public class PBHelperClient {
return builder.build();
}
public static CodecProto convertErasureCodingCodec(String codec,
String coders) {
CodecProto.Builder builder = CodecProto.newBuilder()
.setCodec(codec).setCoders(coders);
return builder.build();
}
public static AddingECPolicyResponseProto convertAddingECPolicyResponse(
AddingECPolicyResponse response) {
AddingECPolicyResponseProto.Builder builder =

View File

@ -912,6 +912,8 @@ service ClientNamenodeProtocol {
returns(AddErasureCodingPoliciesResponseProto);
rpc getErasureCodingPolicy(GetErasureCodingPolicyRequestProto)
returns(GetErasureCodingPolicyResponseProto);
rpc getErasureCodingCodecs(GetErasureCodingCodecsRequestProto)
returns(GetErasureCodingCodecsResponseProto);
rpc getQuotaUsage(GetQuotaUsageRequestProto)
returns(GetQuotaUsageResponseProto);
}

View File

@ -38,6 +38,13 @@ message GetErasureCodingPoliciesResponseProto {
repeated ErasureCodingPolicyProto ecPolicies = 1;
}
message GetErasureCodingCodecsRequestProto { // void request
}
message GetErasureCodingCodecsResponseProto {
repeated CodecProto codec = 1;
}
message GetErasureCodingPolicyRequestProto {
required string src = 1; // path to get the policy info
}
@ -73,3 +80,11 @@ message BlockECReconstructionInfoProto {
required bytes liveBlockIndices = 6;
required ErasureCodingPolicyProto ecPolicy = 7;
}
/**
* Codec and it's corresponding coders
*/
message CodecProto {
required string codec = 1;
required string coders = 2;
}

View File

@ -21,7 +21,9 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.hadoop.classification.InterfaceAudience;
@ -223,6 +225,8 @@ import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodin
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.SetErasureCodingPolicyResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.UnsetErasureCodingPolicyResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ErasureCodingProtos.GetErasureCodingCodecsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockStoragePolicyProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto;
@ -1618,6 +1622,25 @@ public class ClientNamenodeProtocolServerSideTranslatorPB implements
}
}
@Override
public GetErasureCodingCodecsResponseProto getErasureCodingCodecs(
RpcController controller, GetErasureCodingCodecsRequestProto request)
throws ServiceException {
try {
HashMap<String, String> codecs = server.getErasureCodingCodecs();
GetErasureCodingCodecsResponseProto.Builder resBuilder =
GetErasureCodingCodecsResponseProto.newBuilder();
for (Map.Entry<String, String> codec : codecs.entrySet()) {
resBuilder.addCodec(
PBHelperClient.convertErasureCodingCodec(
codec.getKey(), codec.getValue()));
}
return resBuilder.build();
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public AddErasureCodingPoliciesResponseProto addErasureCodingPolicies(
RpcController controller, AddErasureCodingPoliciesRequestProto request)

View File

@ -25,6 +25,7 @@ import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.stream.Collectors;
@ -43,6 +44,7 @@ import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.server.namenode.FSDirectory.DirOp;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.io.erasurecode.CodecRegistry;
import org.apache.hadoop.security.AccessControlException;
import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.XATTR_ERASURECODING_POLICY;
@ -311,6 +313,18 @@ final class FSDirErasureCodingOp {
return fsn.getErasureCodingPolicyManager().getEnabledPolicies();
}
/**
* Get available erasure coding codecs and coders.
*
* @param fsn namespace
* @return {@link java.util.HashMap} array
*/
static HashMap<String, String> getErasureCodingCodecs(final FSNamesystem fsn)
throws IOException {
assert fsn.hasReadLock();
return CodecRegistry.getInstance().getCodec2CoderCompactMap();
}
private static ErasureCodingPolicy getErasureCodingPolicyForPath(
FSDirectory fsd, INodesInPath iip) throws IOException {
Preconditions.checkNotNull(iip, "INodes cannot be null");

View File

@ -6930,6 +6930,20 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean,
}
}
/**
* Get available erasure coding codecs and corresponding coders.
*/
HashMap<String, String> getErasureCodingCodecs() throws IOException {
checkOperation(OperationCategory.READ);
readLock();
try {
checkOperation(OperationCategory.READ);
return FSDirErasureCodingOp.getErasureCodingCodecs(this);
} finally {
readUnlock("getErasureCodingCodecs");
}
}
void setXAttr(String src, XAttr xAttr, EnumSet<XAttrSetFlag> flag,
boolean logRetryCache)
throws IOException {

View File

@ -37,6 +37,7 @@ import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@ -2240,6 +2241,12 @@ public class NameNodeRpcServer implements NamenodeProtocols {
return namesystem.getErasureCodingPolicies();
}
@Override // ClientProtocol
public HashMap<String, String> getErasureCodingCodecs() throws IOException {
checkNNStartup();
return namesystem.getErasureCodingCodecs();
}
@Override // ClientProtocol
public ErasureCodingPolicy getErasureCodingPolicy(String src) throws IOException {
checkNNStartup();

View File

@ -33,8 +33,10 @@ import org.apache.hadoop.util.ToolRunner;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* CLI for the erasure code encoding operations.
@ -361,11 +363,66 @@ public class ECAdmin extends Configured implements Tool {
}
}
/** Command to list the set of supported erasure coding codecs and coders. */
private static class ListECCodecsCommand
implements AdminHelper.Command {
@Override
public String getName() {
return "-listCodecs";
}
@Override
public String getShortUsage() {
return "[" + getName() + "]\n";
}
@Override
public String getLongUsage() {
return getShortUsage() + "\n" +
"Get the list of supported erasure coding codecs and coders.\n" +
"A coder is an implementation of a codec. A codec can have " +
"different implementations, thus different coders.\n" +
"The coders for a codec are listed in a fall back order.\n";
}
@Override
public int run(Configuration conf, List<String> args) throws IOException {
if (args.size() > 0) {
System.err.println(getName() + ": Too many arguments");
return 1;
}
final DistributedFileSystem dfs = AdminHelper.getDFS(conf);
try {
HashMap<String, String> codecs =
dfs.getAllErasureCodingCodecs();
if (codecs.isEmpty()) {
System.out.println("No erasure coding codecs are supported on the " +
"cluster.");
} else {
System.out.println("Erasure Coding Codecs: Codec [Coder List]");
for (Map.Entry<String, String> codec : codecs.entrySet()) {
if (codec != null) {
System.out.println("\t" + codec.getKey().toUpperCase() + " ["
+ codec.getValue().toUpperCase() +"]");
}
}
}
} catch (IOException e) {
System.err.println(AdminHelper.prettifyException(e));
return 2;
}
return 0;
}
}
private static final AdminHelper.Command[] COMMANDS = {
new ListECPoliciesCommand(),
new AddECPoliciesCommand(),
new GetECPolicyCommand(),
new SetECPolicyCommand(),
new UnsetECPolicyCommand()
new UnsetECPolicyCommand(),
new ListECCodecsCommand()
};
}

View File

@ -154,6 +154,7 @@ Deployment
[-getPolicy -path <path>]
[-unsetPolicy -path <path>]
[-listPolicies]
[-listCodecs]
[-usage [cmd ...]]
[-help [cmd ...]]
@ -181,4 +182,8 @@ Below are the details about each command.
* `[-addPolicies -policyFile <file>]`
Add a list of erasure coding policies. Please refer etc/hadoop/user_ec_policies.xml.template for the example policy file.
Add a list of erasure coding policies. Please refer etc/hadoop/user_ec_policies.xml.template for the example policy file.
* `[-listCodecs]`
Get the list of supported erasure coding codecs and coders in system. A coder is an implementation of a codec. A codec can have different implementations, thus different coders. The coders for a codec are listed in a fall back order.

View File

@ -48,6 +48,7 @@ import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains;
@ -622,4 +623,20 @@ public class TestErasureCodingPolicies {
assertNull(fs.getErasureCodingPolicy(filePath));
fs.delete(dirPath, true);
}
@Test
public void testGetAllErasureCodingCodecs() throws Exception {
HashMap<String, String> allECCodecs = fs
.getAllErasureCodingCodecs();
assertTrue("At least 3 system codecs should be enabled",
allECCodecs.size() >= 3);
System.out.println("Erasure Coding Codecs: Codec [Coder List]");
for (String codec : allECCodecs.keySet()) {
String coders = allECCodecs.get(codec);
if (codec != null && coders != null) {
System.out.println("\t" + codec.toUpperCase() + "["
+ coders.toUpperCase() + "]");
}
}
}
}

View File

@ -541,5 +541,34 @@
</comparators>
</test>
<test>
<description>listCodecs : illegal parameters - too many parameters</description>
<test-commands>
<ec-admin-command>-fs NAMENODE -listCodecs /ecdir</ec-admin-command>
</test-commands>
<cleanup-commands>
</cleanup-commands>
<comparators>
<comparator>
<type>SubstringComparator</type>
<expected-output>-listCodecs: Too many arguments</expected-output>
</comparator>
</comparators>
</test>
<test>
<description>listCodecs : successful list codecs</description>
<test-commands>
<ec-admin-command>-fs NAMENODE -listCodecs</ec-admin-command>
</test-commands>
<cleanup-commands>
</cleanup-commands>
<comparators>
<comparator>
<type>SubstringComparator</type>
<expected-output>Erasure Coding Codecs: Codec [Coder List]</expected-output>
</comparator>
</comparators>
</test>
</tests>
</configuration>