HDFS-12682. ECAdmin -listPolicies will always show SystemErasureCodingPolicies state as DISABLED.

This commit is contained in:
Xiao Chen 2017-11-02 21:26:45 -07:00
parent 1700adc6f7
commit e565b5277d
23 changed files with 421 additions and 162 deletions

View File

@ -119,6 +119,7 @@ import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.EncryptionZone; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.EncryptionZoneIterator; import org.apache.hadoop.hdfs.protocol.EncryptionZoneIterator;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType; import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
@ -2793,7 +2794,8 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
} }
} }
public ErasureCodingPolicy[] getErasureCodingPolicies() throws IOException { public ErasureCodingPolicyInfo[] getErasureCodingPolicies()
throws IOException {
checkOpen(); checkOpen();
try (TraceScope ignored = tracer.newScope("getErasureCodingPolicies")) { try (TraceScope ignored = tracer.newScope("getErasureCodingPolicies")) {
return namenode.getErasureCodingPolicies(); return namenode.getErasureCodingPolicies();

View File

@ -77,6 +77,7 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.EncryptionZone; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType; import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.ReencryptAction; import org.apache.hadoop.hdfs.protocol.HdfsConstants.ReencryptAction;
@ -2676,7 +2677,7 @@ public class DistributedFileSystem extends FileSystem {
* @return all erasure coding policies supported by this file system. * @return all erasure coding policies supported by this file system.
* @throws IOException * @throws IOException
*/ */
public Collection<ErasureCodingPolicy> getAllErasureCodingPolicies() public Collection<ErasureCodingPolicyInfo> getAllErasureCodingPolicies()
throws IOException { throws IOException {
return Arrays.asList(dfs.getErasureCodingPolicies()); return Arrays.asList(dfs.getErasureCodingPolicies());
} }

View File

@ -42,6 +42,7 @@ import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
import org.apache.hadoop.hdfs.protocol.CachePoolInfo; import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
import org.apache.hadoop.hdfs.protocol.EncryptionZone; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.ReencryptAction; import org.apache.hadoop.hdfs.protocol.HdfsConstants.ReencryptAction;
import org.apache.hadoop.hdfs.protocol.OpenFileEntry; import org.apache.hadoop.hdfs.protocol.OpenFileEntry;
@ -537,7 +538,8 @@ public class HdfsAdmin {
* *
* @throws IOException * @throws IOException
*/ */
public ErasureCodingPolicy[] getErasureCodingPolicies() throws IOException { public ErasureCodingPolicyInfo[] getErasureCodingPolicies()
throws IOException {
return dfs.getClient().getErasureCodingPolicies(); return dfs.getClient().getErasureCodingPolicies();
} }

View File

@ -1619,7 +1619,7 @@ public interface ClientProtocol {
* @throws IOException * @throws IOException
*/ */
@Idempotent @Idempotent
ErasureCodingPolicy[] getErasureCodingPolicies() throws IOException; ErasureCodingPolicyInfo[] getErasureCodingPolicies() throws IOException;
/** /**
* Get the erasure coding codecs loaded in Namenode. * Get the erasure coding codecs loaded in Namenode.

View File

@ -21,7 +21,6 @@ import com.google.common.base.Preconditions;
import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.erasurecode.ECSchema; import org.apache.hadoop.io.erasurecode.ECSchema;
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants; import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
@ -29,22 +28,23 @@ import java.io.Serializable;
/** /**
* A policy about how to write/read/code an erasure coding file. * A policy about how to write/read/code an erasure coding file.
* <p>
* Note this class should be lightweight and immutable, because it's cached
* by {@link SystemErasureCodingPolicies}, to be returned as a part of
* {@link HdfsFileStatus}.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public final class ErasureCodingPolicy implements Serializable { public final class ErasureCodingPolicy implements Serializable {
private static final long serialVersionUID = 0x0079fe4e; private static final long serialVersionUID = 0x0079fe4e;
private String name; private final String name;
private final ECSchema schema; private final ECSchema schema;
private final int cellSize; private final int cellSize;
private byte id; private final byte id;
private ErasureCodingPolicyState state;
public ErasureCodingPolicy(String name, ECSchema schema, public ErasureCodingPolicy(String name, ECSchema schema,
int cellSize, byte id, ErasureCodingPolicyState state) { int cellSize, byte id) {
Preconditions.checkNotNull(name); Preconditions.checkNotNull(name);
Preconditions.checkNotNull(schema); Preconditions.checkNotNull(schema);
Preconditions.checkArgument(cellSize > 0, "cellSize must be positive"); Preconditions.checkArgument(cellSize > 0, "cellSize must be positive");
@ -54,22 +54,14 @@ public final class ErasureCodingPolicy implements Serializable {
this.schema = schema; this.schema = schema;
this.cellSize = cellSize; this.cellSize = cellSize;
this.id = id; this.id = id;
this.state = state;
}
public ErasureCodingPolicy(String name, ECSchema schema, int cellSize,
byte id) {
this(name, schema, cellSize, id, ErasureCodingPolicyState.DISABLED);
} }
public ErasureCodingPolicy(ECSchema schema, int cellSize, byte id) { public ErasureCodingPolicy(ECSchema schema, int cellSize, byte id) {
this(composePolicyName(schema, cellSize), schema, cellSize, id, this(composePolicyName(schema, cellSize), schema, cellSize, id);
ErasureCodingPolicyState.DISABLED);
} }
public ErasureCodingPolicy(ECSchema schema, int cellSize) { public ErasureCodingPolicy(ECSchema schema, int cellSize) {
this(composePolicyName(schema, cellSize), schema, cellSize, (byte) -1, this(composePolicyName(schema, cellSize), schema, cellSize, (byte) -1);
ErasureCodingPolicyState.DISABLED);
} }
public static String composePolicyName(ECSchema schema, int cellSize) { public static String composePolicyName(ECSchema schema, int cellSize) {
@ -86,10 +78,6 @@ public final class ErasureCodingPolicy implements Serializable {
return name; return name;
} }
public void setName(String name) {
this.name = name;
}
public ECSchema getSchema() { public ECSchema getSchema() {
return schema; return schema;
} }
@ -114,39 +102,14 @@ public final class ErasureCodingPolicy implements Serializable {
return id; return id;
} }
public void setId(byte id) {
this.id = id;
}
public boolean isReplicationPolicy() { public boolean isReplicationPolicy() {
return (id == ErasureCodeConstants.REPLICATION_POLICY_ID); return (id == ErasureCodeConstants.REPLICATION_POLICY_ID);
} }
public ErasureCodingPolicyState getState() {
return state;
}
public void setState(ErasureCodingPolicyState state) {
this.state = state;
}
public boolean isSystemPolicy() { public boolean isSystemPolicy() {
return (this.id < ErasureCodeConstants.USER_DEFINED_POLICY_START_ID); return (this.id < ErasureCodeConstants.USER_DEFINED_POLICY_START_ID);
} }
public boolean isEnabled() {
return (this.state == ErasureCodingPolicyState.ENABLED);
}
public boolean isDisabled() {
return (this.state == ErasureCodingPolicyState.DISABLED);
}
public boolean isRemoved() {
return (this.state == ErasureCodingPolicyState.REMOVED);
}
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (o == null) { if (o == null) {
@ -164,7 +127,6 @@ public final class ErasureCodingPolicy implements Serializable {
.append(schema, rhs.schema) .append(schema, rhs.schema)
.append(cellSize, rhs.cellSize) .append(cellSize, rhs.cellSize)
.append(id, rhs.id) .append(id, rhs.id)
.append(state, rhs.state)
.isEquals(); .isEquals();
} }
@ -175,7 +137,6 @@ public final class ErasureCodingPolicy implements Serializable {
.append(schema) .append(schema)
.append(cellSize) .append(cellSize)
.append(id) .append(id)
.append(state)
.toHashCode(); .toHashCode();
} }
@ -184,8 +145,7 @@ public final class ErasureCodingPolicy implements Serializable {
return "ErasureCodingPolicy=[" + "Name=" + name + ", " return "ErasureCodingPolicy=[" + "Name=" + name + ", "
+ "Schema=[" + schema.toString() + "], " + "Schema=[" + schema.toString() + "], "
+ "CellSize=" + cellSize + ", " + "CellSize=" + cellSize + ", "
+ "Id=" + id + ", " + "Id=" + id
+ "State=" + state.toString()
+ "]"; + "]";
} }
} }

View File

@ -0,0 +1,106 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.protocol;
import com.google.common.base.Preconditions;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience;
import java.io.Serializable;
/**
* HDFS internal presentation of a {@link ErasureCodingPolicy}. Also contains
* additional information such as {@link ErasureCodingPolicyState}.
*/
@InterfaceAudience.Private
public class ErasureCodingPolicyInfo implements Serializable {
private static final long serialVersionUID = 0x31;
private final ErasureCodingPolicy policy;
private ErasureCodingPolicyState state;
public ErasureCodingPolicyInfo(final ErasureCodingPolicy thePolicy,
final ErasureCodingPolicyState theState) {
Preconditions.checkNotNull(thePolicy);
Preconditions.checkNotNull(theState);
policy = thePolicy;
state = theState;
}
public ErasureCodingPolicyInfo(final ErasureCodingPolicy thePolicy) {
this(thePolicy, ErasureCodingPolicyState.DISABLED);
}
public ErasureCodingPolicy getPolicy() {
return policy;
}
public ErasureCodingPolicyState getState() {
return state;
}
public void setState(final ErasureCodingPolicyState newState) {
Preconditions.checkNotNull(newState, "New state should not be null.");
state = newState;
}
public boolean isEnabled() {
return (this.state == ErasureCodingPolicyState.ENABLED);
}
public boolean isDisabled() {
return (this.state == ErasureCodingPolicyState.DISABLED);
}
public boolean isRemoved() {
return (this.state == ErasureCodingPolicyState.REMOVED);
}
@Override
public boolean equals(Object o) {
if (o == null) {
return false;
}
if (o == this) {
return true;
}
if (o.getClass() != getClass()) {
return false;
}
ErasureCodingPolicyInfo rhs = (ErasureCodingPolicyInfo) o;
return new EqualsBuilder()
.append(policy, rhs.policy)
.append(state, rhs.state)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(303855623, 582626729)
.append(policy)
.append(state)
.toHashCode();
}
@Override
public String toString() {
return policy.toString() + ", State=" + state.toString();
}
}

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.hdfs.protocol; package org.apache.hadoop.hdfs.protocol;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import java.io.DataInput; import java.io.DataInput;
import java.io.DataOutput; import java.io.DataOutput;
@ -27,8 +26,7 @@ import java.io.IOException;
/** /**
* Value denotes the possible states of an ErasureCodingPolicy. * Value denotes the possible states of an ErasureCodingPolicy.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public enum ErasureCodingPolicyState { public enum ErasureCodingPolicyState {
/** Policy is disabled. It's policy default state. */ /** Policy is disabled. It's policy default state. */

View File

@ -64,6 +64,7 @@ import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats; import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats;
import org.apache.hadoop.hdfs.protocol.EncryptionZone; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType; import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.ReencryptAction; import org.apache.hadoop.hdfs.protocol.HdfsConstants.ReencryptAction;
@ -1782,17 +1783,17 @@ public class ClientNamenodeProtocolTranslatorPB implements
} }
@Override @Override
public ErasureCodingPolicy[] getErasureCodingPolicies() throws IOException { public ErasureCodingPolicyInfo[] getErasureCodingPolicies()
throws IOException {
try { try {
GetErasureCodingPoliciesResponseProto response = rpcProxy GetErasureCodingPoliciesResponseProto response = rpcProxy
.getErasureCodingPolicies(null, VOID_GET_EC_POLICIES_REQUEST); .getErasureCodingPolicies(null, VOID_GET_EC_POLICIES_REQUEST);
ErasureCodingPolicy[] ecPolicies = ErasureCodingPolicyInfo[] ecPolicies =
new ErasureCodingPolicy[response.getEcPoliciesCount()]; new ErasureCodingPolicyInfo[response.getEcPoliciesCount()];
int i = 0; int i = 0;
for (ErasureCodingPolicyProto ecPolicyProto : for (ErasureCodingPolicyProto proto : response.getEcPoliciesList()) {
response.getEcPoliciesList()) {
ecPolicies[i++] = ecPolicies[i++] =
PBHelperClient.convertErasureCodingPolicy(ecPolicyProto); PBHelperClient.convertErasureCodingPolicyInfo(proto);
} }
return ecPolicies; return ecPolicies;
} catch (ServiceException e) { } catch (ServiceException e) {

View File

@ -55,6 +55,7 @@ import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSUtilClient; import org.apache.hadoop.hdfs.DFSUtilClient;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.inotify.Event; import org.apache.hadoop.hdfs.inotify.Event;
import org.apache.hadoop.hdfs.inotify.EventBatch; import org.apache.hadoop.hdfs.inotify.EventBatch;
import org.apache.hadoop.hdfs.inotify.EventBatchList; import org.apache.hadoop.hdfs.inotify.EventBatchList;
@ -79,6 +80,7 @@ import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats; import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats;
import org.apache.hadoop.hdfs.protocol.EncryptionZone; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.FsPermissionExtension; import org.apache.hadoop.hdfs.protocol.FsPermissionExtension;
@ -2956,6 +2958,9 @@ public class PBHelperClient {
return HdfsProtos.ErasureCodingPolicyState.valueOf(state.getValue()); return HdfsProtos.ErasureCodingPolicyState.valueOf(state.getValue());
} }
/**
* Convert the protobuf to a {@link ErasureCodingPolicy}.
*/
public static ErasureCodingPolicy convertErasureCodingPolicy( public static ErasureCodingPolicy convertErasureCodingPolicy(
ErasureCodingPolicyProto proto) { ErasureCodingPolicyProto proto) {
final byte id = (byte) (proto.getId() & 0xFF); final byte id = (byte) (proto.getId() & 0xFF);
@ -2969,28 +2974,62 @@ public class PBHelperClient {
"Missing schema field in ErasureCodingPolicy proto"); "Missing schema field in ErasureCodingPolicy proto");
Preconditions.checkArgument(proto.hasCellSize(), Preconditions.checkArgument(proto.hasCellSize(),
"Missing cellsize field in ErasureCodingPolicy proto"); "Missing cellsize field in ErasureCodingPolicy proto");
Preconditions.checkArgument(proto.hasState(),
"Missing state field in ErasureCodingPolicy proto");
return new ErasureCodingPolicy(proto.getName(), return new ErasureCodingPolicy(proto.getName(),
convertECSchema(proto.getSchema()), convertECSchema(proto.getSchema()),
proto.getCellSize(), id, convertECState(proto.getState())); proto.getCellSize(), id);
} }
return policy; return policy;
} }
public static ErasureCodingPolicyProto convertErasureCodingPolicy( /**
* Convert the protobuf to a {@link ErasureCodingPolicyInfo}. This should only
* be needed when the caller is interested in the state of the policy.
*/
public static ErasureCodingPolicyInfo convertErasureCodingPolicyInfo(
ErasureCodingPolicyProto proto) {
ErasureCodingPolicy policy = convertErasureCodingPolicy(proto);
ErasureCodingPolicyInfo info = new ErasureCodingPolicyInfo(policy);
Preconditions.checkArgument(proto.hasState(),
"Missing state field in ErasureCodingPolicy proto");
info.setState(convertECState(proto.getState()));
return info;
}
private static ErasureCodingPolicyProto.Builder createECPolicyProtoBuilder(
ErasureCodingPolicy policy) { ErasureCodingPolicy policy) {
ErasureCodingPolicyProto.Builder builder = ErasureCodingPolicyProto final ErasureCodingPolicyProto.Builder builder =
.newBuilder() ErasureCodingPolicyProto.newBuilder().setId(policy.getId());
.setId(policy.getId());
// If it's not a built-in policy, need to set the optional fields. // If it's not a built-in policy, need to set the optional fields.
if (SystemErasureCodingPolicies.getByID(policy.getId()) == null) { if (SystemErasureCodingPolicies.getByID(policy.getId()) == null) {
builder.setName(policy.getName()) builder.setName(policy.getName())
.setSchema(convertECSchema(policy.getSchema())) .setSchema(convertECSchema(policy.getSchema()))
.setCellSize(policy.getCellSize()) .setCellSize(policy.getCellSize());
.setState(convertECState(policy.getState()));
} }
return builder;
}
/**
* Convert a {@link ErasureCodingPolicy} to protobuf.
* This means no state of the policy will be set on the protobuf.
*/
public static ErasureCodingPolicyProto convertErasureCodingPolicy(
ErasureCodingPolicy policy) {
return createECPolicyProtoBuilder(policy).build();
}
/**
* Convert a {@link ErasureCodingPolicyInfo} to protobuf.
* The protobuf will have the policy, and state. State is relevant when:
* 1. Persisting a policy to fsimage
* 2. Returning the policy to the RPC call
* {@link DistributedFileSystem#getAllErasureCodingPolicies()}
*/
public static ErasureCodingPolicyProto convertErasureCodingPolicy(
ErasureCodingPolicyInfo info) {
final ErasureCodingPolicyProto.Builder builder =
createECPolicyProtoBuilder(info.getPolicy());
builder.setState(convertECState(info.getState()));
return builder.build(); return builder.build();
} }

View File

@ -0,0 +1,72 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.protocol;
import org.junit.Test;
import static org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies.RS_6_3_POLICY_ID;
import static org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState.DISABLED;
import static org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState.ENABLED;
import static org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState.REMOVED;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* Test {@link ErasureCodingPolicyInfo}.
*/
public class TestErasureCodingPolicyInfo {
@Test
public void testPolicyAndStateCantBeNull() {
try {
new ErasureCodingPolicyInfo(null);
fail("Null policy should fail");
} catch (NullPointerException expected) {
}
try {
new ErasureCodingPolicyInfo(SystemErasureCodingPolicies
.getByID(RS_6_3_POLICY_ID), null);
fail("Null policy should fail");
} catch (NullPointerException expected) {
}
}
@Test
public void testStates() {
ErasureCodingPolicyInfo info =
new ErasureCodingPolicyInfo(SystemErasureCodingPolicies
.getByID(RS_6_3_POLICY_ID));
info.setState(ENABLED);
assertFalse(info.isDisabled());
assertTrue(info.isEnabled());
assertFalse(info.isRemoved());
info.setState(REMOVED);
assertFalse(info.isDisabled());
assertFalse(info.isEnabled());
assertTrue(info.isRemoved());
info.setState(DISABLED);
assertTrue(info.isDisabled());
assertFalse(info.isEnabled());
assertFalse(info.isRemoved());
}
}

View File

@ -46,6 +46,7 @@ import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.EncryptionZone; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LastBlockWithStatus; import org.apache.hadoop.hdfs.protocol.LastBlockWithStatus;
@ -1683,11 +1684,12 @@ public class ClientNamenodeProtocolServerSideTranslatorPB implements
public GetErasureCodingPoliciesResponseProto getErasureCodingPolicies(RpcController controller, public GetErasureCodingPoliciesResponseProto getErasureCodingPolicies(RpcController controller,
GetErasureCodingPoliciesRequestProto request) throws ServiceException { GetErasureCodingPoliciesRequestProto request) throws ServiceException {
try { try {
ErasureCodingPolicy[] ecPolicies = server.getErasureCodingPolicies(); ErasureCodingPolicyInfo[] ecpInfos = server.getErasureCodingPolicies();
GetErasureCodingPoliciesResponseProto.Builder resBuilder = GetErasureCodingPoliciesResponseProto GetErasureCodingPoliciesResponseProto.Builder resBuilder = GetErasureCodingPoliciesResponseProto
.newBuilder(); .newBuilder();
for (ErasureCodingPolicy ecPolicy : ecPolicies) { for (ErasureCodingPolicyInfo info : ecpInfos) {
resBuilder.addEcPolicies(PBHelperClient.convertErasureCodingPolicy(ecPolicy)); resBuilder.addEcPolicies(
PBHelperClient.convertErasureCodingPolicy(info));
} }
return resBuilder.build(); return resBuilder.build();
} catch (IOException e) { } catch (IOException e) {

View File

@ -78,6 +78,7 @@ import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats; import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats;
import org.apache.hadoop.hdfs.protocol.EncryptionZone; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType; import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.ReencryptAction; import org.apache.hadoop.hdfs.protocol.HdfsConstants.ReencryptAction;
@ -1785,7 +1786,8 @@ public class RouterRpcServer extends AbstractService implements ClientProtocol {
} }
@Override @Override
public ErasureCodingPolicy[] getErasureCodingPolicies() throws IOException { public ErasureCodingPolicyInfo[] getErasureCodingPolicies()
throws IOException {
checkOperation(OperationCategory.READ, false); checkOperation(OperationCategory.READ, false);
return null; return null;
} }

View File

@ -23,6 +23,7 @@ import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState;
import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies; import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
@ -65,18 +66,18 @@ public final class ErasureCodingPolicyManager {
* All policies sorted by name for fast querying, include built-in policy, * All policies sorted by name for fast querying, include built-in policy,
* user defined policy, removed policy. * user defined policy, removed policy.
*/ */
private Map<String, ErasureCodingPolicy> policiesByName; private Map<String, ErasureCodingPolicyInfo> policiesByName;
/** /**
* All policies sorted by ID for fast querying, including built-in policy, * All policies sorted by ID for fast querying, including built-in policy,
* user defined policy, removed policy. * user defined policy, removed policy.
*/ */
private Map<Byte, ErasureCodingPolicy> policiesByID; private Map<Byte, ErasureCodingPolicyInfo> policiesByID;
/** /**
* For better performance when query all Policies. * For better performance when query all Policies.
*/ */
private ErasureCodingPolicy[] allPolicies; private ErasureCodingPolicyInfo[] allPolicies;
/** /**
* All enabled policies sorted by name for fast querying, including built-in * All enabled policies sorted by name for fast querying, including built-in
@ -120,15 +121,17 @@ public final class ErasureCodingPolicyManager {
*/ */
for (ErasureCodingPolicy policy : for (ErasureCodingPolicy policy :
SystemErasureCodingPolicies.getPolicies()) { SystemErasureCodingPolicies.getPolicies()) {
policiesByName.put(policy.getName(), policy); final ErasureCodingPolicyInfo info = new ErasureCodingPolicyInfo(policy);
policiesByID.put(policy.getId(), policy); policiesByName.put(policy.getName(), info);
policiesByID.put(policy.getId(), info);
} }
if (!defaultPolicyName.trim().isEmpty()) { if (!defaultPolicyName.isEmpty()) {
ErasureCodingPolicy ecPolicy = policiesByName.get(defaultPolicyName); final ErasureCodingPolicyInfo info =
if (ecPolicy == null) { policiesByName.get(defaultPolicyName);
if (info == null) {
String names = policiesByName.values() String names = policiesByName.values()
.stream().map(ErasureCodingPolicy::getName) .stream().map((pi) -> pi.getPolicy().getName())
.collect(Collectors.joining(", ")); .collect(Collectors.joining(", "));
String msg = String.format("EC policy '%s' specified at %s is not a " String msg = String.format("EC policy '%s' specified at %s is not a "
+ "valid policy. Please choose from list of available " + "valid policy. Please choose from list of available "
@ -138,11 +141,13 @@ public final class ErasureCodingPolicyManager {
names); names);
throw new HadoopIllegalArgumentException(msg); throw new HadoopIllegalArgumentException(msg);
} }
enabledPoliciesByName.put(ecPolicy.getName(), ecPolicy); info.setState(ErasureCodingPolicyState.ENABLED);
enabledPoliciesByName.put(info.getPolicy().getName(), info.getPolicy());
} }
enabledPolicies = enabledPolicies =
enabledPoliciesByName.values().toArray(new ErasureCodingPolicy[0]); enabledPoliciesByName.values().toArray(new ErasureCodingPolicy[0]);
allPolicies = policiesByName.values().toArray(new ErasureCodingPolicy[0]); allPolicies =
policiesByName.values().toArray(new ErasureCodingPolicyInfo[0]);
maxCellSize = conf.getInt( maxCellSize = conf.getInt(
DFSConfigKeys.DFS_NAMENODE_EC_POLICIES_MAX_CELLSIZE_KEY, DFSConfigKeys.DFS_NAMENODE_EC_POLICIES_MAX_CELLSIZE_KEY,
@ -190,23 +195,50 @@ public final class ErasureCodingPolicyManager {
* Get all system defined policies and user defined policies. * Get all system defined policies and user defined policies.
* @return all policies * @return all policies
*/ */
public ErasureCodingPolicy[] getPolicies() { public ErasureCodingPolicyInfo[] getPolicies() {
return allPolicies; return allPolicies;
} }
/** /**
* Get a policy by policy ID, including system policy and user defined policy. * Get a {@link ErasureCodingPolicy} by policy ID, including system policy
* and user defined policy.
* @return ecPolicy, or null if not found * @return ecPolicy, or null if not found
*/ */
public ErasureCodingPolicy getByID(byte id) { public ErasureCodingPolicy getByID(byte id) {
final ErasureCodingPolicyInfo ecpi = getPolicyInfoByID(id);
if (ecpi == null) {
return null;
}
return ecpi.getPolicy();
}
/**
* Get a {@link ErasureCodingPolicyInfo} by policy ID, including system policy
* and user defined policy.
*/
private ErasureCodingPolicyInfo getPolicyInfoByID(final byte id) {
return this.policiesByID.get(id); return this.policiesByID.get(id);
} }
/** /**
* Get a policy by policy ID, including system policy and user defined policy. * Get a {@link ErasureCodingPolicy} by policy name, including system
* policy and user defined policy.
* @return ecPolicy, or null if not found * @return ecPolicy, or null if not found
*/ */
public ErasureCodingPolicy getByName(String name) { public ErasureCodingPolicy getByName(String name) {
final ErasureCodingPolicyInfo ecpi = getPolicyInfoByName(name);
if (ecpi == null) {
return null;
}
return ecpi.getPolicy();
}
/**
* Get a {@link ErasureCodingPolicyInfo} by policy name, including system
* policy and user defined policy.
* @return ecPolicy, or null if not found
*/
private ErasureCodingPolicyInfo getPolicyInfoByName(final String name) {
return this.policiesByName.get(name); return this.policiesByName.get(name);
} }
@ -224,9 +256,6 @@ public final class ErasureCodingPolicyManager {
*/ */
public synchronized ErasureCodingPolicy addPolicy( public synchronized ErasureCodingPolicy addPolicy(
ErasureCodingPolicy policy) { ErasureCodingPolicy policy) {
// Set policy state into DISABLED when adding into Hadoop.
policy.setState(ErasureCodingPolicyState.DISABLED);
if (!CodecUtil.hasCodec(policy.getCodecName())) { if (!CodecUtil.hasCodec(policy.getCodecName())) {
throw new HadoopIllegalArgumentException("Codec name " throw new HadoopIllegalArgumentException("Codec name "
+ policy.getCodecName() + " is not supported"); + policy.getCodecName() + " is not supported");
@ -240,7 +269,8 @@ public final class ErasureCodingPolicyManager {
String assignedNewName = ErasureCodingPolicy.composePolicyName( String assignedNewName = ErasureCodingPolicy.composePolicyName(
policy.getSchema(), policy.getCellSize()); policy.getSchema(), policy.getCellSize());
for (ErasureCodingPolicy p : getPolicies()) { for (ErasureCodingPolicyInfo info : getPolicies()) {
final ErasureCodingPolicy p = info.getPolicy();
if (p.getName().equals(assignedNewName)) { if (p.getName().equals(assignedNewName)) {
LOG.info("The policy name " + assignedNewName + " already exists"); LOG.info("The policy name " + assignedNewName + " already exists");
return p; return p;
@ -261,11 +291,13 @@ public final class ErasureCodingPolicyManager {
ErasureCodeConstants.MAX_POLICY_ID); ErasureCodeConstants.MAX_POLICY_ID);
} }
policy.setName(assignedNewName); policy = new ErasureCodingPolicy(assignedNewName, policy.getSchema(),
policy.setId(getNextAvailablePolicyID()); policy.getCellSize(), getNextAvailablePolicyID());
this.policiesByName.put(policy.getName(), policy); final ErasureCodingPolicyInfo pi = new ErasureCodingPolicyInfo(policy);
this.policiesByID.put(policy.getId(), policy); this.policiesByName.put(policy.getName(), pi);
allPolicies = policiesByName.values().toArray(new ErasureCodingPolicy[0]); this.policiesByID.put(policy.getId(), pi);
allPolicies =
policiesByName.values().toArray(new ErasureCodingPolicyInfo[0]);
return policy; return policy;
} }
@ -283,12 +315,13 @@ public final class ErasureCodingPolicyManager {
* Remove an User erasure coding policy by policyName. * Remove an User erasure coding policy by policyName.
*/ */
public synchronized void removePolicy(String name) { public synchronized void removePolicy(String name) {
ErasureCodingPolicy ecPolicy = policiesByName.get(name); final ErasureCodingPolicyInfo info = policiesByName.get(name);
if (ecPolicy == null) { if (info == null) {
throw new HadoopIllegalArgumentException("The policy name " + throw new HadoopIllegalArgumentException("The policy name " +
name + " does not exist"); name + " does not exist");
} }
final ErasureCodingPolicy ecPolicy = info.getPolicy();
if (ecPolicy.isSystemPolicy()) { if (ecPolicy.isSystemPolicy()) {
throw new HadoopIllegalArgumentException("System erasure coding policy " + throw new HadoopIllegalArgumentException("System erasure coding policy " +
name + " cannot be removed"); name + " cannot be removed");
@ -299,7 +332,7 @@ public final class ErasureCodingPolicyManager {
enabledPolicies = enabledPolicies =
enabledPoliciesByName.values().toArray(new ErasureCodingPolicy[0]); enabledPoliciesByName.values().toArray(new ErasureCodingPolicy[0]);
} }
ecPolicy.setState(ErasureCodingPolicyState.REMOVED); info.setState(ErasureCodingPolicyState.REMOVED);
LOG.info("Remove erasure coding policy " + name); LOG.info("Remove erasure coding policy " + name);
/* /*
@ -310,10 +343,10 @@ public final class ErasureCodingPolicyManager {
@VisibleForTesting @VisibleForTesting
public List<ErasureCodingPolicy> getRemovedPolicies() { public List<ErasureCodingPolicy> getRemovedPolicies() {
ArrayList<ErasureCodingPolicy> removedPolicies = ArrayList<ErasureCodingPolicy> removedPolicies = new ArrayList<>();
new ArrayList<ErasureCodingPolicy>(); for (ErasureCodingPolicyInfo info : policiesByName.values()) {
for (ErasureCodingPolicy ecPolicy : policiesByName.values()) { final ErasureCodingPolicy ecPolicy = info.getPolicy();
if (ecPolicy.isRemoved()) { if (info.isRemoved()) {
removedPolicies.add(ecPolicy); removedPolicies.add(ecPolicy);
} }
} }
@ -324,8 +357,8 @@ public final class ErasureCodingPolicyManager {
* Disable an erasure coding policy by policyName. * Disable an erasure coding policy by policyName.
*/ */
public synchronized void disablePolicy(String name) { public synchronized void disablePolicy(String name) {
ErasureCodingPolicy ecPolicy = policiesByName.get(name); ErasureCodingPolicyInfo info = policiesByName.get(name);
if (ecPolicy == null) { if (info == null) {
throw new HadoopIllegalArgumentException("The policy name " + throw new HadoopIllegalArgumentException("The policy name " +
name + " does not exist"); name + " does not exist");
} }
@ -335,7 +368,7 @@ public final class ErasureCodingPolicyManager {
enabledPolicies = enabledPolicies =
enabledPoliciesByName.values().toArray(new ErasureCodingPolicy[0]); enabledPoliciesByName.values().toArray(new ErasureCodingPolicy[0]);
} }
ecPolicy.setState(ErasureCodingPolicyState.DISABLED); info.setState(ErasureCodingPolicyState.DISABLED);
LOG.info("Disable the erasure coding policy " + name); LOG.info("Disable the erasure coding policy " + name);
} }
@ -343,14 +376,15 @@ public final class ErasureCodingPolicyManager {
* Enable an erasure coding policy by policyName. * Enable an erasure coding policy by policyName.
*/ */
public synchronized void enablePolicy(String name) { public synchronized void enablePolicy(String name) {
ErasureCodingPolicy ecPolicy = policiesByName.get(name); final ErasureCodingPolicyInfo info = policiesByName.get(name);
if (ecPolicy == null) { if (info == null) {
throw new HadoopIllegalArgumentException("The policy name " + throw new HadoopIllegalArgumentException("The policy name " +
name + " does not exist"); name + " does not exist");
} }
final ErasureCodingPolicy ecPolicy = info.getPolicy();
enabledPoliciesByName.put(name, ecPolicy); enabledPoliciesByName.put(name, ecPolicy);
ecPolicy.setState(ErasureCodingPolicyState.ENABLED); info.setState(ErasureCodingPolicyState.ENABLED);
enabledPolicies = enabledPolicies =
enabledPoliciesByName.values().toArray(new ErasureCodingPolicy[0]); enabledPoliciesByName.values().toArray(new ErasureCodingPolicy[0]);
LOG.info("Enable the erasure coding policy " + name); LOG.info("Enable the erasure coding policy " + name);
@ -359,17 +393,19 @@ public final class ErasureCodingPolicyManager {
/** /**
* Load an erasure coding policy into erasure coding manager. * Load an erasure coding policy into erasure coding manager.
*/ */
private void loadPolicy(ErasureCodingPolicy policy) { private void loadPolicy(ErasureCodingPolicyInfo info) {
Preconditions.checkNotNull(info);
final ErasureCodingPolicy policy = info.getPolicy();
if (!CodecUtil.hasCodec(policy.getCodecName()) || if (!CodecUtil.hasCodec(policy.getCodecName()) ||
policy.getCellSize() > maxCellSize) { policy.getCellSize() > maxCellSize) {
// If policy is not supported in current system, set the policy state to // If policy is not supported in current system, set the policy state to
// DISABLED; // DISABLED;
policy.setState(ErasureCodingPolicyState.DISABLED); info.setState(ErasureCodingPolicyState.DISABLED);
} }
this.policiesByName.put(policy.getName(), policy); this.policiesByName.put(policy.getName(), info);
this.policiesByID.put(policy.getId(), policy); this.policiesByID.put(policy.getId(), info);
if (policy.isEnabled()) { if (info.isEnabled()) {
enablePolicy(policy.getName()); enablePolicy(policy.getName());
} }
} }
@ -380,11 +416,13 @@ public final class ErasureCodingPolicyManager {
* @param ecPolicies contains ErasureCodingPolicy list * @param ecPolicies contains ErasureCodingPolicy list
* *
*/ */
public synchronized void loadPolicies(List<ErasureCodingPolicy> ecPolicies) { public synchronized void loadPolicies(
List<ErasureCodingPolicyInfo> ecPolicies) {
Preconditions.checkNotNull(ecPolicies); Preconditions.checkNotNull(ecPolicies);
for (ErasureCodingPolicy p : ecPolicies) { for (ErasureCodingPolicyInfo p : ecPolicies) {
loadPolicy(p); loadPolicy(p);
} }
allPolicies = policiesByName.values().toArray(new ErasureCodingPolicy[0]); allPolicies =
policiesByName.values().toArray(new ErasureCodingPolicyInfo[0]);
} }
} }

View File

@ -26,6 +26,7 @@ import org.apache.hadoop.fs.XAttrSetFlag;
import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.hdfs.XAttrHelper; import org.apache.hadoop.hdfs.XAttrHelper;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.server.namenode.FSDirectory.DirOp; import org.apache.hadoop.hdfs.server.namenode.FSDirectory.DirOp;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.WritableUtils;
@ -366,10 +367,10 @@ final class FSDirErasureCodingOp {
* Get available erasure coding polices. * Get available erasure coding polices.
* *
* @param fsn namespace * @param fsn namespace
* @return {@link ErasureCodingPolicy} array * @return {@link ErasureCodingPolicyInfo} array
*/ */
static ErasureCodingPolicy[] getErasureCodingPolicies(final FSNamesystem fsn) static ErasureCodingPolicyInfo[] getErasureCodingPolicies(
throws IOException { final FSNamesystem fsn) throws IOException {
assert fsn.hasReadLock(); assert fsn.hasReadLock();
return fsn.getErasureCodingPolicyManager().getPolicies(); return fsn.getErasureCodingPolicyManager().getPolicies();
} }

View File

@ -41,7 +41,7 @@ import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Set; import java.util.Set;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.protocolPB.PBHelperClient; import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -380,10 +380,10 @@ public final class FSImageFormatProtobuf {
private void loadErasureCodingSection(InputStream in) private void loadErasureCodingSection(InputStream in)
throws IOException { throws IOException {
ErasureCodingSection s = ErasureCodingSection.parseDelimitedFrom(in); ErasureCodingSection s = ErasureCodingSection.parseDelimitedFrom(in);
List<ErasureCodingPolicy> ecPolicies = Lists List<ErasureCodingPolicyInfo> ecPolicies = Lists
.newArrayListWithCapacity(s.getPoliciesCount()); .newArrayListWithCapacity(s.getPoliciesCount());
for (int i = 0; i < s.getPoliciesCount(); ++i) { for (int i = 0; i < s.getPoliciesCount(); ++i) {
ecPolicies.add(PBHelperClient.convertErasureCodingPolicy( ecPolicies.add(PBHelperClient.convertErasureCodingPolicyInfo(
s.getPolicies(i))); s.getPolicies(i)));
} }
fsn.getErasureCodingPolicyManager().loadPolicies(ecPolicies); fsn.getErasureCodingPolicyManager().loadPolicies(ecPolicies);
@ -586,11 +586,11 @@ public final class FSImageFormatProtobuf {
private void saveErasureCodingSection( private void saveErasureCodingSection(
FileSummary.Builder summary) throws IOException { FileSummary.Builder summary) throws IOException {
final FSNamesystem fsn = context.getSourceNamesystem(); final FSNamesystem fsn = context.getSourceNamesystem();
ErasureCodingPolicy[] ecPolicies = ErasureCodingPolicyInfo[] ecPolicies =
fsn.getErasureCodingPolicyManager().getPolicies(); fsn.getErasureCodingPolicyManager().getPolicies();
ArrayList<ErasureCodingPolicyProto> ecPolicyProtoes = ArrayList<ErasureCodingPolicyProto> ecPolicyProtoes =
new ArrayList<ErasureCodingPolicyProto>(); new ArrayList<ErasureCodingPolicyProto>();
for (ErasureCodingPolicy p : ecPolicies) { for (ErasureCodingPolicyInfo p : ecPolicies) {
ecPolicyProtoes.add(PBHelperClient.convertErasureCodingPolicy(p)); ecPolicyProtoes.add(PBHelperClient.convertErasureCodingPolicy(p));
} }

View File

@ -90,6 +90,7 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_REPLICATION_KEY;
import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import static org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.*; import static org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.*;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.protocol.ReplicatedBlockStats; import org.apache.hadoop.hdfs.protocol.ReplicatedBlockStats;
import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats; import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats;
import org.apache.hadoop.hdfs.protocol.OpenFileEntry; import org.apache.hadoop.hdfs.protocol.OpenFileEntry;
@ -7418,16 +7419,16 @@ public class FSNamesystem implements Namesystem, FSNamesystemMBean,
} }
/** /**
* Get available erasure coding polices * Get all erasure coding polices.
*/ */
ErasureCodingPolicy[] getErasureCodingPolicies() throws IOException { ErasureCodingPolicyInfo[] getErasureCodingPolicies() throws IOException {
final String operationName = "getErasureCodingPolicies"; final String operationName = "getErasureCodingPolicies";
boolean success = false; boolean success = false;
checkOperation(OperationCategory.READ); checkOperation(OperationCategory.READ);
readLock(); readLock();
try { try {
checkOperation(OperationCategory.READ); checkOperation(OperationCategory.READ);
final ErasureCodingPolicy[] ret = final ErasureCodingPolicyInfo[] ret =
FSDirErasureCodingOp.getErasureCodingPolicies(this); FSDirErasureCodingOp.getErasureCodingPolicies(this);
success = true; success = true;
return ret; return ret;

View File

@ -101,6 +101,7 @@ import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats; import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.EncryptionZone; import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.FSLimitException; import org.apache.hadoop.hdfs.protocol.FSLimitException;
import org.apache.hadoop.hdfs.protocol.LastBlockWithStatus; import org.apache.hadoop.hdfs.protocol.LastBlockWithStatus;
@ -2303,7 +2304,8 @@ public class NameNodeRpcServer implements NamenodeProtocols {
} }
@Override // ClientProtocol @Override // ClientProtocol
public ErasureCodingPolicy[] getErasureCodingPolicies() throws IOException { public ErasureCodingPolicyInfo[] getErasureCodingPolicies()
throws IOException {
checkNNStartup(); checkNNStartup();
return namesystem.getErasureCodingPolicies(); return namesystem.getErasureCodingPolicies();
} }

View File

@ -23,6 +23,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.util.ECPolicyLoader; import org.apache.hadoop.hdfs.util.ECPolicyLoader;
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants; import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
import org.apache.hadoop.tools.TableListing; import org.apache.hadoop.tools.TableListing;
@ -111,16 +112,16 @@ public class ECAdmin extends Configured implements Tool {
final DistributedFileSystem dfs = AdminHelper.getDFS(conf); final DistributedFileSystem dfs = AdminHelper.getDFS(conf);
try { try {
Collection<ErasureCodingPolicy> policies = final Collection<ErasureCodingPolicyInfo> policies =
dfs.getAllErasureCodingPolicies(); dfs.getAllErasureCodingPolicies();
if (policies.isEmpty()) { if (policies.isEmpty()) {
System.out.println("There is no erasure coding policies in the " + System.out.println("There is no erasure coding policies in the " +
"cluster."); "cluster.");
} else { } else {
System.out.println("Erasure Coding Policies:"); System.out.println("Erasure Coding Policies:");
for (ErasureCodingPolicy policy : policies) { for (ErasureCodingPolicyInfo policy : policies) {
if (policy != null) { if (policy != null) {
System.out.println(policy.toString()); System.out.println(policy);
} }
} }
} }

View File

@ -118,6 +118,8 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates; import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo.DatanodeInfoBuilder; import org.apache.hadoop.hdfs.protocol.DatanodeInfo.DatanodeInfoBuilder;
import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats; import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState;
import org.apache.hadoop.hdfs.protocol.ReplicatedBlockStats; import org.apache.hadoop.hdfs.protocol.ReplicatedBlockStats;
import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies; import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
@ -147,6 +149,7 @@ import org.apache.hadoop.hdfs.server.datanode.DataNodeLayoutVersion;
import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset; import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset;
import org.apache.hadoop.hdfs.server.datanode.TestTransferRbw; import org.apache.hadoop.hdfs.server.datanode.TestTransferRbw;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
import org.apache.hadoop.hdfs.server.namenode.ErasureCodingPolicyManager;
import org.apache.hadoop.hdfs.server.namenode.FSDirectory; import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
import org.apache.hadoop.hdfs.server.namenode.FSEditLog; import org.apache.hadoop.hdfs.server.namenode.FSEditLog;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
@ -300,6 +303,19 @@ public class DFSTestUtil {
} }
} }
public static ErasureCodingPolicyState getECPolicyState(
final ErasureCodingPolicy policy) {
final ErasureCodingPolicyInfo[] policyInfos =
ErasureCodingPolicyManager.getInstance().getPolicies();
for (ErasureCodingPolicyInfo pi : policyInfos) {
if (pi.getPolicy().equals(policy)) {
return pi.getState();
}
}
throw new IllegalArgumentException("ErasureCodingPolicy <" + policy
+ "> doesn't exist in the policies:" + Arrays.toString(policyInfos));
}
/** class MyFile contains enough information to recreate the contents of /** class MyFile contains enough information to recreate the contents of
* a single file. * a single file.
*/ */

View File

@ -25,6 +25,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies; import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies;
import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
@ -49,6 +50,7 @@ import org.junit.rules.Timeout;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.List; import java.util.List;
@ -375,10 +377,16 @@ public class TestErasureCodingPolicies {
@Test @Test
public void testGetAllErasureCodingPolicies() throws Exception { public void testGetAllErasureCodingPolicies() throws Exception {
Collection<ErasureCodingPolicy> allECPolicies = fs Collection<ErasureCodingPolicyInfo> allECPolicies = fs
.getAllErasureCodingPolicies(); .getAllErasureCodingPolicies();
assertTrue("All system policies should be enabled", final List<ErasureCodingPolicy> sysPolicies =
allECPolicies.containsAll(SystemErasureCodingPolicies.getPolicies())); new ArrayList<>(SystemErasureCodingPolicies.getPolicies());
for (ErasureCodingPolicyInfo ecpi : allECPolicies) {
if (ecpi.isEnabled()) {
sysPolicies.remove(ecpi.getPolicy());
}
}
assertTrue("All system policies should be enabled", sysPolicies.isEmpty());
// Query after add a new policy // Query after add a new policy
ECSchema toAddSchema = new ECSchema("rs", 5, 2); ECSchema toAddSchema = new ECSchema("rs", 5, 2);
@ -609,11 +617,11 @@ public class TestErasureCodingPolicies {
fs.setErasureCodingPolicy(dirPath, ecPolicy.getName()); fs.setErasureCodingPolicy(dirPath, ecPolicy.getName());
String ecPolicyName = null; String ecPolicyName = null;
Collection<ErasureCodingPolicy> allPolicies = final Collection<ErasureCodingPolicyInfo> allPoliciesInfo =
fs.getAllErasureCodingPolicies(); fs.getAllErasureCodingPolicies();
for (ErasureCodingPolicy policy : allPolicies) { for (ErasureCodingPolicyInfo info : allPoliciesInfo) {
if (!ecPolicy.equals(policy)) { if (!ecPolicy.equals(info.getPolicy())) {
ecPolicyName = policy.getName(); ecPolicyName = info.getPolicy().getName();
break; break;
} }
} }

View File

@ -748,7 +748,8 @@ public class TestFSEditLogLoader {
// check if new policy is reapplied through edit log // check if new policy is reapplied through edit log
ErasureCodingPolicy ecPolicy = ErasureCodingPolicy ecPolicy =
ErasureCodingPolicyManager.getInstance().getByID(newPolicy.getId()); ErasureCodingPolicyManager.getInstance().getByID(newPolicy.getId());
assertEquals(ErasureCodingPolicyState.DISABLED, ecPolicy.getState()); assertEquals(ErasureCodingPolicyState.DISABLED,
DFSTestUtil.getECPolicyState(ecPolicy));
// 2. enable policy // 2. enable policy
fs.enableErasureCodingPolicy(newPolicy.getName()); fs.enableErasureCodingPolicy(newPolicy.getName());
@ -756,7 +757,8 @@ public class TestFSEditLogLoader {
cluster.waitActive(); cluster.waitActive();
ecPolicy = ecPolicy =
ErasureCodingPolicyManager.getInstance().getByID(newPolicy.getId()); ErasureCodingPolicyManager.getInstance().getByID(newPolicy.getId());
assertEquals(ErasureCodingPolicyState.ENABLED, ecPolicy.getState()); assertEquals(ErasureCodingPolicyState.ENABLED,
DFSTestUtil.getECPolicyState(ecPolicy));
// create a new file, use the policy // create a new file, use the policy
final Path dirPath = new Path("/striped"); final Path dirPath = new Path("/striped");
@ -773,7 +775,8 @@ public class TestFSEditLogLoader {
cluster.waitActive(); cluster.waitActive();
ecPolicy = ecPolicy =
ErasureCodingPolicyManager.getInstance().getByID(newPolicy.getId()); ErasureCodingPolicyManager.getInstance().getByID(newPolicy.getId());
assertEquals(ErasureCodingPolicyState.DISABLED, ecPolicy.getState()); assertEquals(ErasureCodingPolicyState.DISABLED,
DFSTestUtil.getECPolicyState(ecPolicy));
// read file // read file
DFSTestUtil.readFileAsBytes(fs, filePath); DFSTestUtil.readFileAsBytes(fs, filePath);
@ -783,7 +786,8 @@ public class TestFSEditLogLoader {
cluster.waitActive(); cluster.waitActive();
ecPolicy = ecPolicy =
ErasureCodingPolicyManager.getInstance().getByID(newPolicy.getId()); ErasureCodingPolicyManager.getInstance().getByID(newPolicy.getId());
assertEquals(ErasureCodingPolicyState.REMOVED, ecPolicy.getState()); assertEquals(ErasureCodingPolicyState.REMOVED,
DFSTestUtil.getECPolicyState(ecPolicy));
// read file // read file
DFSTestUtil.readFileAsBytes(fs, filePath); DFSTestUtil.readFileAsBytes(fs, filePath);

View File

@ -870,7 +870,8 @@ public class TestFSImage {
newPolicy, ecPolicy); newPolicy, ecPolicy);
assertEquals( assertEquals(
"Newly added erasure coding policy should be of disabled state", "Newly added erasure coding policy should be of disabled state",
ErasureCodingPolicyState.DISABLED, ecPolicy.getState()); ErasureCodingPolicyState.DISABLED,
DFSTestUtil.getECPolicyState(ecPolicy));
// Test enable/disable/remove user customized erasure coding policy // Test enable/disable/remove user customized erasure coding policy
testChangeErasureCodingPolicyState(cluster, blockSize, newPolicy); testChangeErasureCodingPolicyState(cluster, blockSize, newPolicy);
@ -880,14 +881,12 @@ public class TestFSImage {
} }
} }
private void testChangeErasureCodingPolicyState(MiniDFSCluster cluster, private void testChangeErasureCodingPolicyState(MiniDFSCluster cluster,
int blockSize, ErasureCodingPolicy targetPolicy) throws IOException { int blockSize, ErasureCodingPolicy targetPolicy) throws IOException {
DistributedFileSystem fs = cluster.getFileSystem(); DistributedFileSystem fs = cluster.getFileSystem();
// 1. Enable an erasure coding policy // 1. Enable an erasure coding policy
fs.enableErasureCodingPolicy(targetPolicy.getName()); fs.enableErasureCodingPolicy(targetPolicy.getName());
targetPolicy.setState(ErasureCodingPolicyState.ENABLED);
// Create file, using the new policy // Create file, using the new policy
final Path dirPath = new Path("/striped"); final Path dirPath = new Path("/striped");
final Path filePath = new Path(dirPath, "file"); final Path filePath = new Path(dirPath, "file");
@ -910,13 +909,13 @@ public class TestFSImage {
assertEquals("The erasure coding policy is not found", assertEquals("The erasure coding policy is not found",
targetPolicy, ecPolicy); targetPolicy, ecPolicy);
assertEquals("The erasure coding policy should be of enabled state", assertEquals("The erasure coding policy should be of enabled state",
ErasureCodingPolicyState.ENABLED, ecPolicy.getState()); ErasureCodingPolicyState.ENABLED,
DFSTestUtil.getECPolicyState(ecPolicy));
// Read file regardless of the erasure coding policy state // Read file regardless of the erasure coding policy state
DFSTestUtil.readFileAsBytes(fs, filePath); DFSTestUtil.readFileAsBytes(fs, filePath);
// 2. Disable an erasure coding policy // 2. Disable an erasure coding policy
fs.disableErasureCodingPolicy(ecPolicy.getName()); fs.disableErasureCodingPolicy(ecPolicy.getName());
targetPolicy.setState(ErasureCodingPolicyState.DISABLED);
// Save namespace and restart NameNode // Save namespace and restart NameNode
fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER); fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER);
fs.saveNamespace(); fs.saveNamespace();
@ -929,7 +928,8 @@ public class TestFSImage {
assertEquals("The erasure coding policy is not found", assertEquals("The erasure coding policy is not found",
targetPolicy, ecPolicy); targetPolicy, ecPolicy);
assertEquals("The erasure coding policy should be of disabled state", assertEquals("The erasure coding policy should be of disabled state",
ErasureCodingPolicyState.DISABLED, ecPolicy.getState()); ErasureCodingPolicyState.DISABLED,
DFSTestUtil.getECPolicyState(ecPolicy));
// Read file regardless of the erasure coding policy state // Read file regardless of the erasure coding policy state
DFSTestUtil.readFileAsBytes(fs, filePath); DFSTestUtil.readFileAsBytes(fs, filePath);
@ -944,7 +944,7 @@ public class TestFSImage {
return; return;
} }
targetPolicy.setState(ErasureCodingPolicyState.REMOVED); fs.removeErasureCodingPolicy(ecPolicy.getName());
// Save namespace and restart NameNode // Save namespace and restart NameNode
fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER); fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER);
fs.saveNamespace(); fs.saveNamespace();
@ -957,7 +957,8 @@ public class TestFSImage {
assertEquals("The erasure coding policy saved into and loaded from " + assertEquals("The erasure coding policy saved into and loaded from " +
"fsImage is bad", targetPolicy, ecPolicy); "fsImage is bad", targetPolicy, ecPolicy);
assertEquals("The erasure coding policy should be of removed state", assertEquals("The erasure coding policy should be of removed state",
ErasureCodingPolicyState.REMOVED, ecPolicy.getState()); ErasureCodingPolicyState.REMOVED,
DFSTestUtil.getECPolicyState(ecPolicy));
// Read file regardless of the erasure coding policy state // Read file regardless of the erasure coding policy state
DFSTestUtil.readFileAsBytes(fs, filePath); DFSTestUtil.readFileAsBytes(fs, filePath);
fs.delete(dirPath, true); fs.delete(dirPath, true);

View File

@ -41,6 +41,7 @@ import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy; import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.SequenceFile.CompressionType;
@ -925,11 +926,11 @@ public class TestDFSIO implements Tool {
private boolean checkErasureCodePolicy(String erasureCodePolicyName, private boolean checkErasureCodePolicy(String erasureCodePolicyName,
FileSystem fs, TestType testType) throws IOException { FileSystem fs, TestType testType) throws IOException {
Collection<ErasureCodingPolicy> list = Collection<ErasureCodingPolicyInfo> list =
((DistributedFileSystem) fs).getAllErasureCodingPolicies(); ((DistributedFileSystem) fs).getAllErasureCodingPolicies();
boolean isValid = false; boolean isValid = false;
for (ErasureCodingPolicy ec : list) { for (ErasureCodingPolicyInfo ec : list) {
if (erasureCodePolicyName.equals(ec.getName())) { if (erasureCodePolicyName.equals(ec.getPolicy().getName())) {
isValid = true; isValid = true;
break; break;
} }
@ -939,8 +940,8 @@ public class TestDFSIO implements Tool {
System.out.println("Invalid erasure code policy: " + System.out.println("Invalid erasure code policy: " +
erasureCodePolicyName); erasureCodePolicyName);
System.out.println("Current supported erasure code policy list: "); System.out.println("Current supported erasure code policy list: ");
for (ErasureCodingPolicy ec : list) { for (ErasureCodingPolicyInfo ec : list) {
System.out.println(ec.getName()); System.out.println(ec.getPolicy().getName());
} }
return false; return false;
} }
@ -999,9 +1000,10 @@ public class TestDFSIO implements Tool {
getConf().get(ERASURE_CODE_POLICY_NAME_KEY, null); getConf().get(ERASURE_CODE_POLICY_NAME_KEY, null);
fs.mkdirs(path); fs.mkdirs(path);
Collection<ErasureCodingPolicy> list = Collection<ErasureCodingPolicyInfo> list =
((DistributedFileSystem) fs).getAllErasureCodingPolicies(); ((DistributedFileSystem) fs).getAllErasureCodingPolicies();
for (ErasureCodingPolicy ec : list) { for (ErasureCodingPolicyInfo info : list) {
final ErasureCodingPolicy ec = info.getPolicy();
if (erasureCodePolicyName.equals(ec.getName())) { if (erasureCodePolicyName.equals(ec.getName())) {
((DistributedFileSystem) fs).setErasureCodingPolicy(path, ec.getName()); ((DistributedFileSystem) fs).setErasureCodingPolicy(path, ec.getName());
LOG.info("enable erasureCodePolicy = " + erasureCodePolicyName + LOG.info("enable erasureCodePolicy = " + erasureCodePolicyName +