HDFS-8632. Add InterfaceAudience annotation to the erasure coding classes. Contributed by Rakesh R.

This commit is contained in:
Andrew Wang 2015-10-07 18:12:26 -07:00
parent fde729feeb
commit 66e2cfa1a0
46 changed files with 108 additions and 1 deletions

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.io.erasurecode.rawcoder.*;
@ -24,6 +25,7 @@ import org.apache.hadoop.io.erasurecode.rawcoder.*;
/**
* A codec & coder utility to help create raw coders conveniently.
*/
@InterfaceAudience.Private
public final class CodecUtil {
private CodecUtil() { }

View File

@ -17,12 +17,15 @@
*/
package org.apache.hadoop.io.erasurecode;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* A wrapper of block level data source/output that {@link ECChunk}s can be
* extracted from. For HDFS, it can be an HDFS block (250MB). Note it only cares
* about erasure coding specific logic thus avoids coupling with any HDFS block
* details. We can have something like HdfsBlock extend it.
*/
@InterfaceAudience.Private
public class ECBlock {
private boolean isParity;

View File

@ -17,9 +17,12 @@
*/
package org.apache.hadoop.io.erasurecode;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* A group of blocks or {@link ECBlock} incurred in an erasure coding task.
*/
@InterfaceAudience.Private
public class ECBlockGroup {
private ECBlock[] dataBlocks;

View File

@ -19,9 +19,12 @@ package org.apache.hadoop.io.erasurecode;
import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* A wrapper for ByteBuffer or bytes array for an erasure code chunk.
*/
@InterfaceAudience.Private
public class ECChunk {
private ByteBuffer chunkBuffer;

View File

@ -21,9 +21,14 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* Erasure coding schema to housekeeper relevant information.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public final class ECSchema {
public static final String NUM_DATA_UNITS_KEY = "numDataUnits";
public static final String NUM_PARITY_UNITS_KEY = "numParityUnits";

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.codec;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.io.erasurecode.ECSchema;
import org.apache.hadoop.io.erasurecode.grouper.BlockGrouper;
@ -24,6 +25,7 @@ import org.apache.hadoop.io.erasurecode.grouper.BlockGrouper;
/**
* Abstract Erasure Codec that implements {@link ErasureCodec}.
*/
@InterfaceAudience.Private
public abstract class AbstractErasureCodec extends Configured
implements ErasureCodec {

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.codec;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.io.erasurecode.coder.ErasureCoder;
import org.apache.hadoop.io.erasurecode.grouper.BlockGrouper;
@ -26,6 +27,7 @@ import org.apache.hadoop.io.erasurecode.grouper.BlockGrouper;
* Currently it cares only block grouper and erasure coder. In future we may
* add more aspects here to make the behaviors customizable.
*/
@InterfaceAudience.Private
public interface ErasureCodec extends Configurable {
/**

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.codec;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECSchema;
import org.apache.hadoop.io.erasurecode.coder.ErasureCoder;
import org.apache.hadoop.io.erasurecode.coder.RSErasureDecoder;
@ -25,6 +26,7 @@ import org.apache.hadoop.io.erasurecode.coder.RSErasureEncoder;
/**
* A Reed-Solomon erasure codec.
*/
@InterfaceAudience.Private
public class RSErasureCodec extends AbstractErasureCodec {
public RSErasureCodec(ECSchema schema) {

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.codec;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECSchema;
import org.apache.hadoop.io.erasurecode.coder.ErasureCoder;
import org.apache.hadoop.io.erasurecode.coder.XORErasureDecoder;
@ -25,6 +26,7 @@ import org.apache.hadoop.io.erasurecode.coder.XORErasureEncoder;
/**
* A XOR erasure codec.
*/
@InterfaceAudience.Private
public class XORErasureCodec extends AbstractErasureCodec {
public XORErasureCodec(ECSchema schema) {

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.io.erasurecode.ECSchema;
@ -25,6 +26,7 @@ import org.apache.hadoop.io.erasurecode.ECSchema;
*
* It implements the {@link ErasureCoder} interface.
*/
@InterfaceAudience.Private
public abstract class AbstractErasureCoder
extends Configured implements ErasureCoder {

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECBlock;
/**
@ -25,6 +26,7 @@ import org.apache.hadoop.io.erasurecode.ECBlock;
*
* It implements {@link ErasureEncodingStep}.
*/
@InterfaceAudience.Private
public abstract class AbstractErasureCodingStep implements ErasureCodingStep {
private ECBlock[] inputBlocks;

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.ECSchema;
@ -26,6 +27,7 @@ import org.apache.hadoop.io.erasurecode.ECSchema;
*
* It implements the {@link ErasureCoder} interface.
*/
@InterfaceAudience.Private
public abstract class AbstractErasureDecoder extends AbstractErasureCoder {
public AbstractErasureDecoder(int numDataUnits, int numParityUnits) {

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.ECSchema;
@ -26,6 +27,7 @@ import org.apache.hadoop.io.erasurecode.ECSchema;
*
* It implements the {@link ErasureCoder} interface.
*/
@InterfaceAudience.Private
public abstract class AbstractErasureEncoder extends AbstractErasureCoder {
public AbstractErasureEncoder(int numDataUnits, int numParityUnits) {

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
@ -34,6 +35,7 @@ import org.apache.hadoop.io.erasurecode.ECBlockGroup;
* of multiple coding steps.
*
*/
@InterfaceAudience.Private
public interface ErasureCoder extends Configurable {
/**

View File

@ -17,12 +17,14 @@
*/
package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECChunk;
/**
* Erasure coding step that's involved in encoding/decoding of a block group.
*/
@InterfaceAudience.Private
public interface ErasureCodingStep {
/**

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECChunk;
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
@ -25,6 +26,7 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
* Erasure decoding step, a wrapper of all the necessary information to perform
* a decoding step involved in the whole process of decoding a block group.
*/
@InterfaceAudience.Private
public class ErasureDecodingStep extends AbstractErasureCodingStep {
private int[] erasedIndexes;
private RawErasureDecoder rawDecoder;

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECChunk;
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
@ -25,6 +26,7 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
* Erasure encoding step, a wrapper of all the necessary information to perform
* an encoding step involved in the whole process of encoding a block group.
*/
@InterfaceAudience.Private
public class ErasureEncodingStep extends AbstractErasureCodingStep {
private RawErasureEncoder rawEncoder;

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.CodecUtil;
import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
@ -28,6 +29,7 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
*
* It implements {@link ErasureCoder}.
*/
@InterfaceAudience.Private
public class RSErasureDecoder extends AbstractErasureDecoder {
private RawErasureDecoder rsRawDecoder;

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.CodecUtil;
import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
@ -28,6 +29,7 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
*
* It implements {@link ErasureCoder}.
*/
@InterfaceAudience.Private
public class RSErasureEncoder extends AbstractErasureEncoder {
private RawErasureEncoder rawEncoder;

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.CodecUtil;
import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
@ -28,6 +29,7 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
*
* It implements {@link ErasureCoder}.
*/
@InterfaceAudience.Private
public class XORErasureDecoder extends AbstractErasureDecoder {
public XORErasureDecoder(int numDataUnits, int numParityUnits) {

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.CodecUtil;
import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
@ -28,6 +29,7 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
*
* It implements {@link ErasureCoder}.
*/
@InterfaceAudience.Private
public class XORErasureEncoder extends AbstractErasureEncoder {
public XORErasureEncoder(int numDataUnits, int numParityUnits) {

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.grouper;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.ECSchema;
@ -25,6 +26,7 @@ import org.apache.hadoop.io.erasurecode.ECSchema;
* As part of a codec, to handle how to form a block group for encoding
* and provide instructions on how to recover erased blocks from a block group
*/
@InterfaceAudience.Private
public class BlockGrouper {
private ECSchema schema;

View File

@ -18,16 +18,17 @@
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configured;
import java.nio.ByteBuffer;
import java.util.Arrays;
/**
* A common class of basic facilities to be shared by encoder and decoder
*
* It implements the {@link RawErasureCoder} interface.
*/
@InterfaceAudience.Private
public abstract class AbstractRawErasureCoder
extends Configured implements RawErasureCoder {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECChunk;
import java.nio.ByteBuffer;
@ -28,6 +29,7 @@ import java.util.Arrays;
*
* It implements the {@link RawErasureDecoder} interface.
*/
@InterfaceAudience.Private
public abstract class AbstractRawErasureDecoder extends AbstractRawErasureCoder
implements RawErasureDecoder {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECChunk;
import java.nio.ByteBuffer;
@ -27,6 +28,7 @@ import java.nio.ByteBuffer;
*
* It implements the {@link RawErasureEncoder} interface.
*/
@InterfaceAudience.Private
public abstract class AbstractRawErasureEncoder extends AbstractRawErasureCoder
implements RawErasureEncoder {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil;
import java.nio.ByteBuffer;
@ -31,6 +32,7 @@ import java.nio.ByteBuffer;
* unnecessarily due to the underlying implementation limit in GF. This will be
* addressed in HADOOP-11871.
*/
@InterfaceAudience.Private
public class RSRawDecoder extends AbstractRawErasureDecoder {
// To describe and calculate the needed Vandermonde matrix
private int[] errSignature;

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil;
import java.nio.ByteBuffer;
@ -26,6 +27,7 @@ import java.nio.ByteBuffer;
* isn't available in some environment. Please always use native implementations
* when possible.
*/
@InterfaceAudience.Private
public class RSRawEncoder extends AbstractRawErasureEncoder {
private int[] generatingPolynomial;

View File

@ -17,9 +17,12 @@
*/
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* A raw coder factory for raw Reed-Solomon coder in Java.
*/
@InterfaceAudience.Private
public class RSRawErasureCoderFactory implements RawErasureCoderFactory {
@Override

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configurable;
/**
@ -33,6 +34,7 @@ import org.apache.hadoop.conf.Configurable;
* low level constructs, since it only takes care of the math calculation with
* a group of byte buffers.
*/
@InterfaceAudience.Private
public interface RawErasureCoder extends Configurable {
/**

View File

@ -17,11 +17,14 @@
*/
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* Raw erasure coder factory that can be used to create raw encoder and decoder.
* It helps in configuration since only one factory class is needed to be
* configured.
*/
@InterfaceAudience.Private
public interface RawErasureCoderFactory {
/**

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECChunk;
import java.nio.ByteBuffer;
@ -28,6 +29,7 @@ import java.nio.ByteBuffer;
*
* It extends the {@link RawErasureCoder} interface.
*/
@InterfaceAudience.Private
public interface RawErasureDecoder extends RawErasureCoder {
/**

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECChunk;
import java.nio.ByteBuffer;
@ -28,6 +29,7 @@ import java.nio.ByteBuffer;
*
* It extends the {@link RawErasureCoder} interface.
*/
@InterfaceAudience.Private
public interface RawErasureEncoder extends RawErasureCoder {
/**

View File

@ -19,6 +19,8 @@ package org.apache.hadoop.io.erasurecode.rawcoder;
import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* A raw decoder in XOR code scheme in pure Java, adapted from HDFS-RAID.
*
@ -26,6 +28,7 @@ import java.nio.ByteBuffer;
* used in advanced codes, like HitchHiker and LRC, though itself is rarely
* deployed independently.
*/
@InterfaceAudience.Private
public class XORRawDecoder extends AbstractRawErasureDecoder {
public XORRawDecoder(int numDataUnits, int numParityUnits) {

View File

@ -19,6 +19,8 @@ package org.apache.hadoop.io.erasurecode.rawcoder;
import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* A raw encoder in XOR code scheme in pure Java, adapted from HDFS-RAID.
*
@ -26,6 +28,7 @@ import java.nio.ByteBuffer;
* used in advanced codes, like HitchHiker and LRC, though itself is rarely
* deployed independently.
*/
@InterfaceAudience.Private
public class XORRawEncoder extends AbstractRawErasureEncoder {
public XORRawEncoder(int numDataUnits, int numParityUnits) {

View File

@ -17,9 +17,12 @@
*/
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* A raw coder factory for raw XOR coder.
*/
@InterfaceAudience.Private
public class XORRawErasureCoderFactory implements RawErasureCoderFactory {
@Override

View File

@ -17,12 +17,14 @@
*/
package org.apache.hadoop.io.erasurecode.rawcoder.util;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECChunk;
/**
* A dump utility class for debugging data erasure coding/decoding issues. Don't
* suggest they are used in runtime production codes.
*/
@InterfaceAudience.Private
public final class DumpUtil {
private static final String HEX_CHARS_STR = "0123456789ABCDEF";
private static final char[] HEX_CHARS = HEX_CHARS_STR.toCharArray();

View File

@ -21,10 +21,13 @@ import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* Implementation of Galois field arithmetic with 2^p elements. The input must
* be unsigned integers. It's ported from HDFS-RAID, slightly adapted.
*/
@InterfaceAudience.Private
public class GaloisField {
// Field size 256 is good for byte based system

View File

@ -17,9 +17,12 @@
*/
package org.apache.hadoop.io.erasurecode.rawcoder.util;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* Some utilities for Reed-Solomon coding.
*/
@InterfaceAudience.Private
public class RSUtil {
// We always use the byte system (with symbol size 8, field size 256,

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hdfs;
import com.google.common.base.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.ChecksumException;
import org.apache.hadoop.fs.ReadOption;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
@ -61,6 +62,7 @@ import java.util.concurrent.Future;
/**
* DFSStripedInputStream reads from striped block groups
*/
@InterfaceAudience.Private
public class DFSStripedInputStream extends DFSInputStream {
private static class ReaderRetryPolicy {

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hdfs;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.DFSStripedOutputStream.Coordinator;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
@ -41,6 +42,7 @@ import com.google.common.annotations.VisibleForTesting;
* sends an rpc call to the namenode and then populates the result for the
* other streamers.
*/
@InterfaceAudience.Private
public class StripedDataStreamer extends DataStreamer {
private final Coordinator coordinator;
private final int index;

View File

@ -17,11 +17,15 @@
*/
package org.apache.hadoop.hdfs.protocol;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.erasurecode.ECSchema;
/**
* A policy about how to write/read/code an erasure coding file.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public final class ErasureCodingPolicy {
private final String name;

View File

@ -173,6 +173,9 @@ Trunk (Unreleased)
HDFS-9182. Cleanup the findbugs and other issues after HDFS EC merged to trunk.
(umamahesh)
HDFS-8632. Add InterfaceAudience annotation to the erasure coding classes.
(Rakesh R via wang)
OPTIMIZATIONS
BUG FIXES

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hdfs.server.blockmanagement;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState;
import org.apache.hadoop.hdfs.util.StripedBlockUtil;
@ -35,6 +36,7 @@ import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
* array's size can be larger than (m+k). Thus currently we use an extra byte
* array to record the block index for each triplet.
*/
@InterfaceAudience.Private
public class BlockInfoStriped extends BlockInfo {
private final ErasureCodingPolicy ecPolicy;
/**

View File

@ -17,11 +17,13 @@
*/
package org.apache.hadoop.hdfs.server.blockmanagement;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.util.ReflectionUtils;
@InterfaceAudience.Private
public class BlockPlacementPolicies{
private final BlockPlacementPolicy replicationPolicy;

View File

@ -45,6 +45,7 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.BlockReader;
@ -86,6 +87,7 @@ import static org.apache.hadoop.hdfs.util.StripedBlockUtil.convertIndex4Decode;
* response. BPOfferService delegates the work to this class for handling EC
* commands.
*/
@InterfaceAudience.Private
public final class ErasureCodingWorker {
private static final Log LOG = DataNode.LOG;

View File

@ -16,6 +16,7 @@
*/
package org.apache.hadoop.hdfs.tools.erasurecode;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.fs.shell.CommandFactory;
@ -25,6 +26,7 @@ import org.apache.hadoop.util.ToolRunner;
/**
* CLI for the erasure code encoding operations.
*/
@InterfaceAudience.Private
public class ECCli extends FsShell {
private final static String usagePrefix =