HDFS-12269. Better to return a Map rather than HashMap in getErasureCodingCodecs. Contributed by Huafeng Wang.
This commit is contained in:
parent
ab051bd42e
commit
08aaa4b36f
|
@ -176,7 +176,7 @@ public final class CodecRegistry {
|
|||
* @return a map of all codec names, and their corresponding code list
|
||||
* separated by ','.
|
||||
*/
|
||||
public HashMap<String, String> getCodec2CoderCompactMap() {
|
||||
public Map<String, String> getCodec2CoderCompactMap() {
|
||||
return coderNameCompactMap;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2764,7 +2764,7 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
|
|||
}
|
||||
}
|
||||
|
||||
public HashMap<String, String> getErasureCodingCodecs() throws IOException {
|
||||
public Map<String, String> getErasureCodingCodecs() throws IOException {
|
||||
checkOpen();
|
||||
try (TraceScope ignored = tracer.newScope("getErasureCodingCodecs")) {
|
||||
return namenode.getErasureCodingCodecs();
|
||||
|
|
|
@ -26,7 +26,6 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -2585,7 +2584,7 @@ public class DistributedFileSystem extends FileSystem {
|
|||
* @return all erasure coding codecs and coders supported by this file system.
|
||||
* @throws IOException
|
||||
*/
|
||||
public HashMap<String, String> getAllErasureCodingCodecs()
|
||||
public Map<String, String> getAllErasureCodingCodecs()
|
||||
throws IOException {
|
||||
return dfs.getErasureCodingCodecs();
|
||||
}
|
||||
|
|
|
@ -19,8 +19,8 @@ package org.apache.hadoop.hdfs.protocol;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
@ -1601,7 +1601,7 @@ public interface ClientProtocol {
|
|||
* @throws IOException
|
||||
*/
|
||||
@Idempotent
|
||||
HashMap<String, String> getErasureCodingCodecs() throws IOException;
|
||||
Map<String, String> getErasureCodingCodecs() throws IOException;
|
||||
|
||||
/**
|
||||
* Get the information about the EC policy for the path.
|
||||
|
|
|
@ -26,6 +26,7 @@ import java.util.List;
|
|||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -1760,11 +1761,11 @@ public class ClientNamenodeProtocolTranslatorPB implements
|
|||
}
|
||||
|
||||
@Override
|
||||
public HashMap<String, String> getErasureCodingCodecs() throws IOException {
|
||||
public Map<String, String> getErasureCodingCodecs() throws IOException {
|
||||
try {
|
||||
GetErasureCodingCodecsResponseProto response = rpcProxy
|
||||
.getErasureCodingCodecs(null, VOID_GET_EC_CODEC_REQUEST);
|
||||
HashMap<String, String> ecCodecs = new HashMap<String, String>();
|
||||
Map<String, String> ecCodecs = new HashMap<>();
|
||||
for (CodecProto codec : response.getCodecList()) {
|
||||
ecCodecs.put(codec.getCodec(), codec.getCoders());
|
||||
}
|
||||
|
|
|
@ -21,7 +21,6 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -1664,7 +1663,7 @@ public class ClientNamenodeProtocolServerSideTranslatorPB implements
|
|||
RpcController controller, GetErasureCodingCodecsRequestProto request)
|
||||
throws ServiceException {
|
||||
try {
|
||||
HashMap<String, String> codecs = server.getErasureCodingCodecs();
|
||||
Map<String, String> codecs = server.getErasureCodingCodecs();
|
||||
GetErasureCodingCodecsResponseProto.Builder resBuilder =
|
||||
GetErasureCodingCodecsResponseProto.newBuilder();
|
||||
for (Map.Entry<String, String> codec : codecs.entrySet()) {
|
||||
|
|
|
@ -25,8 +25,8 @@ import java.io.FileNotFoundException;
|
|||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
|
@ -344,7 +344,7 @@ final class FSDirErasureCodingOp {
|
|||
* @param fsn namespace
|
||||
* @return {@link java.util.HashMap} array
|
||||
*/
|
||||
static HashMap<String, String> getErasureCodingCodecs(final FSNamesystem fsn)
|
||||
static Map<String, String> getErasureCodingCodecs(final FSNamesystem fsn)
|
||||
throws IOException {
|
||||
assert fsn.hasReadLock();
|
||||
return CodecRegistry.getInstance().getCodec2CoderCompactMap();
|
||||
|
|
|
@ -7255,14 +7255,14 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean,
|
|||
/**
|
||||
* Get available erasure coding codecs and corresponding coders.
|
||||
*/
|
||||
HashMap<String, String> getErasureCodingCodecs() throws IOException {
|
||||
Map<String, String> getErasureCodingCodecs() throws IOException {
|
||||
final String operationName = "getErasureCodingCodecs";
|
||||
boolean success = false;
|
||||
checkOperation(OperationCategory.READ);
|
||||
readLock();
|
||||
try {
|
||||
checkOperation(OperationCategory.READ);
|
||||
final HashMap<String, String> ret =
|
||||
final Map<String, String> ret =
|
||||
FSDirErasureCodingOp.getErasureCodingCodecs(this);
|
||||
success = true;
|
||||
return ret;
|
||||
|
|
|
@ -37,9 +37,9 @@ import java.net.InetSocketAddress;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
|
||||
|
@ -2278,7 +2278,7 @@ public class NameNodeRpcServer implements NamenodeProtocols {
|
|||
}
|
||||
|
||||
@Override // ClientProtocol
|
||||
public HashMap<String, String> getErasureCodingCodecs() throws IOException {
|
||||
public Map<String, String> getErasureCodingCodecs() throws IOException {
|
||||
checkNNStartup();
|
||||
return namesystem.getErasureCodingCodecs();
|
||||
}
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.apache.hadoop.util.ToolRunner;
|
|||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -441,7 +440,7 @@ public class ECAdmin extends Configured implements Tool {
|
|||
|
||||
final DistributedFileSystem dfs = AdminHelper.getDFS(conf);
|
||||
try {
|
||||
HashMap<String, String> codecs =
|
||||
Map<String, String> codecs =
|
||||
dfs.getAllErasureCodingCodecs();
|
||||
if (codecs.isEmpty()) {
|
||||
System.out.println("No erasure coding codecs are supported on the " +
|
||||
|
|
|
@ -50,8 +50,8 @@ import java.io.IOException;
|
|||
import java.security.PrivilegedExceptionAction;
|
||||
import java.util.Collection;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains;
|
||||
import static org.junit.Assert.*;
|
||||
|
@ -647,7 +647,7 @@ public class TestErasureCodingPolicies {
|
|||
|
||||
@Test
|
||||
public void testGetAllErasureCodingCodecs() throws Exception {
|
||||
HashMap<String, String> allECCodecs = fs
|
||||
Map<String, String> allECCodecs = fs
|
||||
.getAllErasureCodingCodecs();
|
||||
assertTrue("At least 3 system codecs should be enabled",
|
||||
allECCodecs.size() >= 3);
|
||||
|
|
Loading…
Reference in New Issue