HDFS-8632. Add InterfaceAudience annotation to the erasure coding classes. Contributed by Rakesh R.

This commit is contained in:
Andrew Wang 2015-10-07 18:12:26 -07:00
parent fde729feeb
commit 66e2cfa1a0
46 changed files with 108 additions and 1 deletions

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode; package org.apache.hadoop.io.erasurecode;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.io.erasurecode.rawcoder.*; import org.apache.hadoop.io.erasurecode.rawcoder.*;
@ -24,6 +25,7 @@
/** /**
* A codec & coder utility to help create raw coders conveniently. * A codec & coder utility to help create raw coders conveniently.
*/ */
@InterfaceAudience.Private
public final class CodecUtil { public final class CodecUtil {
private CodecUtil() { } private CodecUtil() { }

View File

@ -17,12 +17,15 @@
*/ */
package org.apache.hadoop.io.erasurecode; package org.apache.hadoop.io.erasurecode;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* A wrapper of block level data source/output that {@link ECChunk}s can be * A wrapper of block level data source/output that {@link ECChunk}s can be
* extracted from. For HDFS, it can be an HDFS block (250MB). Note it only cares * extracted from. For HDFS, it can be an HDFS block (250MB). Note it only cares
* about erasure coding specific logic thus avoids coupling with any HDFS block * about erasure coding specific logic thus avoids coupling with any HDFS block
* details. We can have something like HdfsBlock extend it. * details. We can have something like HdfsBlock extend it.
*/ */
@InterfaceAudience.Private
public class ECBlock { public class ECBlock {
private boolean isParity; private boolean isParity;

View File

@ -17,9 +17,12 @@
*/ */
package org.apache.hadoop.io.erasurecode; package org.apache.hadoop.io.erasurecode;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* A group of blocks or {@link ECBlock} incurred in an erasure coding task. * A group of blocks or {@link ECBlock} incurred in an erasure coding task.
*/ */
@InterfaceAudience.Private
public class ECBlockGroup { public class ECBlockGroup {
private ECBlock[] dataBlocks; private ECBlock[] dataBlocks;

View File

@ -19,9 +19,12 @@
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* A wrapper for ByteBuffer or bytes array for an erasure code chunk. * A wrapper for ByteBuffer or bytes array for an erasure code chunk.
*/ */
@InterfaceAudience.Private
public class ECChunk { public class ECChunk {
private ByteBuffer chunkBuffer; private ByteBuffer chunkBuffer;

View File

@ -21,9 +21,14 @@
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/** /**
* Erasure coding schema to housekeeper relevant information. * Erasure coding schema to housekeeper relevant information.
*/ */
@InterfaceAudience.Public
@InterfaceStability.Evolving
public final class ECSchema { public final class ECSchema {
public static final String NUM_DATA_UNITS_KEY = "numDataUnits"; public static final String NUM_DATA_UNITS_KEY = "numDataUnits";
public static final String NUM_PARITY_UNITS_KEY = "numParityUnits"; public static final String NUM_PARITY_UNITS_KEY = "numParityUnits";

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.codec; package org.apache.hadoop.io.erasurecode.codec;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.io.erasurecode.ECSchema; import org.apache.hadoop.io.erasurecode.ECSchema;
import org.apache.hadoop.io.erasurecode.grouper.BlockGrouper; import org.apache.hadoop.io.erasurecode.grouper.BlockGrouper;
@ -24,6 +25,7 @@
/** /**
* Abstract Erasure Codec that implements {@link ErasureCodec}. * Abstract Erasure Codec that implements {@link ErasureCodec}.
*/ */
@InterfaceAudience.Private
public abstract class AbstractErasureCodec extends Configured public abstract class AbstractErasureCodec extends Configured
implements ErasureCodec { implements ErasureCodec {

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.codec; package org.apache.hadoop.io.erasurecode.codec;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.io.erasurecode.coder.ErasureCoder; import org.apache.hadoop.io.erasurecode.coder.ErasureCoder;
import org.apache.hadoop.io.erasurecode.grouper.BlockGrouper; import org.apache.hadoop.io.erasurecode.grouper.BlockGrouper;
@ -26,6 +27,7 @@
* Currently it cares only block grouper and erasure coder. In future we may * Currently it cares only block grouper and erasure coder. In future we may
* add more aspects here to make the behaviors customizable. * add more aspects here to make the behaviors customizable.
*/ */
@InterfaceAudience.Private
public interface ErasureCodec extends Configurable { public interface ErasureCodec extends Configurable {
/** /**

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.codec; package org.apache.hadoop.io.erasurecode.codec;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECSchema; import org.apache.hadoop.io.erasurecode.ECSchema;
import org.apache.hadoop.io.erasurecode.coder.ErasureCoder; import org.apache.hadoop.io.erasurecode.coder.ErasureCoder;
import org.apache.hadoop.io.erasurecode.coder.RSErasureDecoder; import org.apache.hadoop.io.erasurecode.coder.RSErasureDecoder;
@ -25,6 +26,7 @@
/** /**
* A Reed-Solomon erasure codec. * A Reed-Solomon erasure codec.
*/ */
@InterfaceAudience.Private
public class RSErasureCodec extends AbstractErasureCodec { public class RSErasureCodec extends AbstractErasureCodec {
public RSErasureCodec(ECSchema schema) { public RSErasureCodec(ECSchema schema) {

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.codec; package org.apache.hadoop.io.erasurecode.codec;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECSchema; import org.apache.hadoop.io.erasurecode.ECSchema;
import org.apache.hadoop.io.erasurecode.coder.ErasureCoder; import org.apache.hadoop.io.erasurecode.coder.ErasureCoder;
import org.apache.hadoop.io.erasurecode.coder.XORErasureDecoder; import org.apache.hadoop.io.erasurecode.coder.XORErasureDecoder;
@ -25,6 +26,7 @@
/** /**
* A XOR erasure codec. * A XOR erasure codec.
*/ */
@InterfaceAudience.Private
public class XORErasureCodec extends AbstractErasureCodec { public class XORErasureCodec extends AbstractErasureCodec {
public XORErasureCodec(ECSchema schema) { public XORErasureCodec(ECSchema schema) {

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.coder; package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.io.erasurecode.ECSchema; import org.apache.hadoop.io.erasurecode.ECSchema;
@ -25,6 +26,7 @@
* *
* It implements the {@link ErasureCoder} interface. * It implements the {@link ErasureCoder} interface.
*/ */
@InterfaceAudience.Private
public abstract class AbstractErasureCoder public abstract class AbstractErasureCoder
extends Configured implements ErasureCoder { extends Configured implements ErasureCoder {

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.coder; package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
/** /**
@ -25,6 +26,7 @@
* *
* It implements {@link ErasureEncodingStep}. * It implements {@link ErasureEncodingStep}.
*/ */
@InterfaceAudience.Private
public abstract class AbstractErasureCodingStep implements ErasureCodingStep { public abstract class AbstractErasureCodingStep implements ErasureCodingStep {
private ECBlock[] inputBlocks; private ECBlock[] inputBlocks;

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.coder; package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup; import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.ECSchema; import org.apache.hadoop.io.erasurecode.ECSchema;
@ -26,6 +27,7 @@
* *
* It implements the {@link ErasureCoder} interface. * It implements the {@link ErasureCoder} interface.
*/ */
@InterfaceAudience.Private
public abstract class AbstractErasureDecoder extends AbstractErasureCoder { public abstract class AbstractErasureDecoder extends AbstractErasureCoder {
public AbstractErasureDecoder(int numDataUnits, int numParityUnits) { public AbstractErasureDecoder(int numDataUnits, int numParityUnits) {

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.coder; package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup; import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.ECSchema; import org.apache.hadoop.io.erasurecode.ECSchema;
@ -26,6 +27,7 @@
* *
* It implements the {@link ErasureCoder} interface. * It implements the {@link ErasureCoder} interface.
*/ */
@InterfaceAudience.Private
public abstract class AbstractErasureEncoder extends AbstractErasureCoder { public abstract class AbstractErasureEncoder extends AbstractErasureCoder {
public AbstractErasureEncoder(int numDataUnits, int numParityUnits) { public AbstractErasureEncoder(int numDataUnits, int numParityUnits) {

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.coder; package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.io.erasurecode.ECBlockGroup; import org.apache.hadoop.io.erasurecode.ECBlockGroup;
@ -34,6 +35,7 @@
* of multiple coding steps. * of multiple coding steps.
* *
*/ */
@InterfaceAudience.Private
public interface ErasureCoder extends Configurable { public interface ErasureCoder extends Configurable {
/** /**

View File

@ -17,12 +17,14 @@
*/ */
package org.apache.hadoop.io.erasurecode.coder; package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECChunk; import org.apache.hadoop.io.erasurecode.ECChunk;
/** /**
* Erasure coding step that's involved in encoding/decoding of a block group. * Erasure coding step that's involved in encoding/decoding of a block group.
*/ */
@InterfaceAudience.Private
public interface ErasureCodingStep { public interface ErasureCodingStep {
/** /**

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.coder; package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECChunk; import org.apache.hadoop.io.erasurecode.ECChunk;
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder; import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
@ -25,6 +26,7 @@
* Erasure decoding step, a wrapper of all the necessary information to perform * Erasure decoding step, a wrapper of all the necessary information to perform
* a decoding step involved in the whole process of decoding a block group. * a decoding step involved in the whole process of decoding a block group.
*/ */
@InterfaceAudience.Private
public class ErasureDecodingStep extends AbstractErasureCodingStep { public class ErasureDecodingStep extends AbstractErasureCodingStep {
private int[] erasedIndexes; private int[] erasedIndexes;
private RawErasureDecoder rawDecoder; private RawErasureDecoder rawDecoder;

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.coder; package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECChunk; import org.apache.hadoop.io.erasurecode.ECChunk;
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder; import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
@ -25,6 +26,7 @@
* Erasure encoding step, a wrapper of all the necessary information to perform * Erasure encoding step, a wrapper of all the necessary information to perform
* an encoding step involved in the whole process of encoding a block group. * an encoding step involved in the whole process of encoding a block group.
*/ */
@InterfaceAudience.Private
public class ErasureEncodingStep extends AbstractErasureCodingStep { public class ErasureEncodingStep extends AbstractErasureCodingStep {
private RawErasureEncoder rawEncoder; private RawErasureEncoder rawEncoder;

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.coder; package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.CodecUtil; import org.apache.hadoop.io.erasurecode.CodecUtil;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup; import org.apache.hadoop.io.erasurecode.ECBlockGroup;
@ -28,6 +29,7 @@
* *
* It implements {@link ErasureCoder}. * It implements {@link ErasureCoder}.
*/ */
@InterfaceAudience.Private
public class RSErasureDecoder extends AbstractErasureDecoder { public class RSErasureDecoder extends AbstractErasureDecoder {
private RawErasureDecoder rsRawDecoder; private RawErasureDecoder rsRawDecoder;

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.coder; package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.CodecUtil; import org.apache.hadoop.io.erasurecode.CodecUtil;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup; import org.apache.hadoop.io.erasurecode.ECBlockGroup;
@ -28,6 +29,7 @@
* *
* It implements {@link ErasureCoder}. * It implements {@link ErasureCoder}.
*/ */
@InterfaceAudience.Private
public class RSErasureEncoder extends AbstractErasureEncoder { public class RSErasureEncoder extends AbstractErasureEncoder {
private RawErasureEncoder rawEncoder; private RawErasureEncoder rawEncoder;

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.coder; package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.CodecUtil; import org.apache.hadoop.io.erasurecode.CodecUtil;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup; import org.apache.hadoop.io.erasurecode.ECBlockGroup;
@ -28,6 +29,7 @@
* *
* It implements {@link ErasureCoder}. * It implements {@link ErasureCoder}.
*/ */
@InterfaceAudience.Private
public class XORErasureDecoder extends AbstractErasureDecoder { public class XORErasureDecoder extends AbstractErasureDecoder {
public XORErasureDecoder(int numDataUnits, int numParityUnits) { public XORErasureDecoder(int numDataUnits, int numParityUnits) {

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.coder; package org.apache.hadoop.io.erasurecode.coder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.CodecUtil; import org.apache.hadoop.io.erasurecode.CodecUtil;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup; import org.apache.hadoop.io.erasurecode.ECBlockGroup;
@ -28,6 +29,7 @@
* *
* It implements {@link ErasureCoder}. * It implements {@link ErasureCoder}.
*/ */
@InterfaceAudience.Private
public class XORErasureEncoder extends AbstractErasureEncoder { public class XORErasureEncoder extends AbstractErasureEncoder {
public XORErasureEncoder(int numDataUnits, int numParityUnits) { public XORErasureEncoder(int numDataUnits, int numParityUnits) {

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.grouper; package org.apache.hadoop.io.erasurecode.grouper;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECBlock;
import org.apache.hadoop.io.erasurecode.ECBlockGroup; import org.apache.hadoop.io.erasurecode.ECBlockGroup;
import org.apache.hadoop.io.erasurecode.ECSchema; import org.apache.hadoop.io.erasurecode.ECSchema;
@ -25,6 +26,7 @@
* As part of a codec, to handle how to form a block group for encoding * As part of a codec, to handle how to form a block group for encoding
* and provide instructions on how to recover erased blocks from a block group * and provide instructions on how to recover erased blocks from a block group
*/ */
@InterfaceAudience.Private
public class BlockGrouper { public class BlockGrouper {
private ECSchema schema; private ECSchema schema;

View File

@ -18,16 +18,17 @@
package org.apache.hadoop.io.erasurecode.rawcoder; package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays;
/** /**
* A common class of basic facilities to be shared by encoder and decoder * A common class of basic facilities to be shared by encoder and decoder
* *
* It implements the {@link RawErasureCoder} interface. * It implements the {@link RawErasureCoder} interface.
*/ */
@InterfaceAudience.Private
public abstract class AbstractRawErasureCoder public abstract class AbstractRawErasureCoder
extends Configured implements RawErasureCoder { extends Configured implements RawErasureCoder {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.io.erasurecode.rawcoder; package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECChunk; import org.apache.hadoop.io.erasurecode.ECChunk;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@ -28,6 +29,7 @@
* *
* It implements the {@link RawErasureDecoder} interface. * It implements the {@link RawErasureDecoder} interface.
*/ */
@InterfaceAudience.Private
public abstract class AbstractRawErasureDecoder extends AbstractRawErasureCoder public abstract class AbstractRawErasureDecoder extends AbstractRawErasureCoder
implements RawErasureDecoder { implements RawErasureDecoder {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.io.erasurecode.rawcoder; package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECChunk; import org.apache.hadoop.io.erasurecode.ECChunk;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@ -27,6 +28,7 @@
* *
* It implements the {@link RawErasureEncoder} interface. * It implements the {@link RawErasureEncoder} interface.
*/ */
@InterfaceAudience.Private
public abstract class AbstractRawErasureEncoder extends AbstractRawErasureCoder public abstract class AbstractRawErasureEncoder extends AbstractRawErasureCoder
implements RawErasureEncoder { implements RawErasureEncoder {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.io.erasurecode.rawcoder; package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@ -31,6 +32,7 @@
* unnecessarily due to the underlying implementation limit in GF. This will be * unnecessarily due to the underlying implementation limit in GF. This will be
* addressed in HADOOP-11871. * addressed in HADOOP-11871.
*/ */
@InterfaceAudience.Private
public class RSRawDecoder extends AbstractRawErasureDecoder { public class RSRawDecoder extends AbstractRawErasureDecoder {
// To describe and calculate the needed Vandermonde matrix // To describe and calculate the needed Vandermonde matrix
private int[] errSignature; private int[] errSignature;

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.rawcoder; package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@ -26,6 +27,7 @@
* isn't available in some environment. Please always use native implementations * isn't available in some environment. Please always use native implementations
* when possible. * when possible.
*/ */
@InterfaceAudience.Private
public class RSRawEncoder extends AbstractRawErasureEncoder { public class RSRawEncoder extends AbstractRawErasureEncoder {
private int[] generatingPolynomial; private int[] generatingPolynomial;

View File

@ -17,9 +17,12 @@
*/ */
package org.apache.hadoop.io.erasurecode.rawcoder; package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* A raw coder factory for raw Reed-Solomon coder in Java. * A raw coder factory for raw Reed-Solomon coder in Java.
*/ */
@InterfaceAudience.Private
public class RSRawErasureCoderFactory implements RawErasureCoderFactory { public class RSRawErasureCoderFactory implements RawErasureCoderFactory {
@Override @Override

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.rawcoder; package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configurable;
/** /**
@ -33,6 +34,7 @@
* low level constructs, since it only takes care of the math calculation with * low level constructs, since it only takes care of the math calculation with
* a group of byte buffers. * a group of byte buffers.
*/ */
@InterfaceAudience.Private
public interface RawErasureCoder extends Configurable { public interface RawErasureCoder extends Configurable {
/** /**

View File

@ -17,11 +17,14 @@
*/ */
package org.apache.hadoop.io.erasurecode.rawcoder; package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* Raw erasure coder factory that can be used to create raw encoder and decoder. * Raw erasure coder factory that can be used to create raw encoder and decoder.
* It helps in configuration since only one factory class is needed to be * It helps in configuration since only one factory class is needed to be
* configured. * configured.
*/ */
@InterfaceAudience.Private
public interface RawErasureCoderFactory { public interface RawErasureCoderFactory {
/** /**

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.rawcoder; package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECChunk; import org.apache.hadoop.io.erasurecode.ECChunk;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@ -28,6 +29,7 @@
* *
* It extends the {@link RawErasureCoder} interface. * It extends the {@link RawErasureCoder} interface.
*/ */
@InterfaceAudience.Private
public interface RawErasureDecoder extends RawErasureCoder { public interface RawErasureDecoder extends RawErasureCoder {
/** /**

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.io.erasurecode.rawcoder; package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECChunk; import org.apache.hadoop.io.erasurecode.ECChunk;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@ -28,6 +29,7 @@
* *
* It extends the {@link RawErasureCoder} interface. * It extends the {@link RawErasureCoder} interface.
*/ */
@InterfaceAudience.Private
public interface RawErasureEncoder extends RawErasureCoder { public interface RawErasureEncoder extends RawErasureCoder {
/** /**

View File

@ -19,6 +19,8 @@
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* A raw decoder in XOR code scheme in pure Java, adapted from HDFS-RAID. * A raw decoder in XOR code scheme in pure Java, adapted from HDFS-RAID.
* *
@ -26,6 +28,7 @@
* used in advanced codes, like HitchHiker and LRC, though itself is rarely * used in advanced codes, like HitchHiker and LRC, though itself is rarely
* deployed independently. * deployed independently.
*/ */
@InterfaceAudience.Private
public class XORRawDecoder extends AbstractRawErasureDecoder { public class XORRawDecoder extends AbstractRawErasureDecoder {
public XORRawDecoder(int numDataUnits, int numParityUnits) { public XORRawDecoder(int numDataUnits, int numParityUnits) {

View File

@ -19,6 +19,8 @@
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* A raw encoder in XOR code scheme in pure Java, adapted from HDFS-RAID. * A raw encoder in XOR code scheme in pure Java, adapted from HDFS-RAID.
* *
@ -26,6 +28,7 @@
* used in advanced codes, like HitchHiker and LRC, though itself is rarely * used in advanced codes, like HitchHiker and LRC, though itself is rarely
* deployed independently. * deployed independently.
*/ */
@InterfaceAudience.Private
public class XORRawEncoder extends AbstractRawErasureEncoder { public class XORRawEncoder extends AbstractRawErasureEncoder {
public XORRawEncoder(int numDataUnits, int numParityUnits) { public XORRawEncoder(int numDataUnits, int numParityUnits) {

View File

@ -17,9 +17,12 @@
*/ */
package org.apache.hadoop.io.erasurecode.rawcoder; package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* A raw coder factory for raw XOR coder. * A raw coder factory for raw XOR coder.
*/ */
@InterfaceAudience.Private
public class XORRawErasureCoderFactory implements RawErasureCoderFactory { public class XORRawErasureCoderFactory implements RawErasureCoderFactory {
@Override @Override

View File

@ -17,12 +17,14 @@
*/ */
package org.apache.hadoop.io.erasurecode.rawcoder.util; package org.apache.hadoop.io.erasurecode.rawcoder.util;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ECChunk; import org.apache.hadoop.io.erasurecode.ECChunk;
/** /**
* A dump utility class for debugging data erasure coding/decoding issues. Don't * A dump utility class for debugging data erasure coding/decoding issues. Don't
* suggest they are used in runtime production codes. * suggest they are used in runtime production codes.
*/ */
@InterfaceAudience.Private
public final class DumpUtil { public final class DumpUtil {
private static final String HEX_CHARS_STR = "0123456789ABCDEF"; private static final String HEX_CHARS_STR = "0123456789ABCDEF";
private static final char[] HEX_CHARS = HEX_CHARS_STR.toCharArray(); private static final char[] HEX_CHARS = HEX_CHARS_STR.toCharArray();

View File

@ -21,10 +21,13 @@
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* Implementation of Galois field arithmetic with 2^p elements. The input must * Implementation of Galois field arithmetic with 2^p elements. The input must
* be unsigned integers. It's ported from HDFS-RAID, slightly adapted. * be unsigned integers. It's ported from HDFS-RAID, slightly adapted.
*/ */
@InterfaceAudience.Private
public class GaloisField { public class GaloisField {
// Field size 256 is good for byte based system // Field size 256 is good for byte based system

View File

@ -17,9 +17,12 @@
*/ */
package org.apache.hadoop.io.erasurecode.rawcoder.util; package org.apache.hadoop.io.erasurecode.rawcoder.util;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* Some utilities for Reed-Solomon coding. * Some utilities for Reed-Solomon coding.
*/ */
@InterfaceAudience.Private
public class RSUtil { public class RSUtil {
// We always use the byte system (with symbol size 8, field size 256, // We always use the byte system (with symbol size 8, field size 256,

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hdfs; package org.apache.hadoop.hdfs;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.ChecksumException; import org.apache.hadoop.fs.ChecksumException;
import org.apache.hadoop.fs.ReadOption; import org.apache.hadoop.fs.ReadOption;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
@ -61,6 +62,7 @@
/** /**
* DFSStripedInputStream reads from striped block groups * DFSStripedInputStream reads from striped block groups
*/ */
@InterfaceAudience.Private
public class DFSStripedInputStream extends DFSInputStream { public class DFSStripedInputStream extends DFSInputStream {
private static class ReaderRetryPolicy { private static class ReaderRetryPolicy {

View File

@ -21,6 +21,7 @@
import java.io.IOException; import java.io.IOException;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.DFSStripedOutputStream.Coordinator; import org.apache.hadoop.hdfs.DFSStripedOutputStream.Coordinator;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
@ -41,6 +42,7 @@
* sends an rpc call to the namenode and then populates the result for the * sends an rpc call to the namenode and then populates the result for the
* other streamers. * other streamers.
*/ */
@InterfaceAudience.Private
public class StripedDataStreamer extends DataStreamer { public class StripedDataStreamer extends DataStreamer {
private final Coordinator coordinator; private final Coordinator coordinator;
private final int index; private final int index;

View File

@ -17,11 +17,15 @@
*/ */
package org.apache.hadoop.hdfs.protocol; package org.apache.hadoop.hdfs.protocol;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.erasurecode.ECSchema; import org.apache.hadoop.io.erasurecode.ECSchema;
/** /**
* A policy about how to write/read/code an erasure coding file. * A policy about how to write/read/code an erasure coding file.
*/ */
@InterfaceAudience.Public
@InterfaceStability.Evolving
public final class ErasureCodingPolicy { public final class ErasureCodingPolicy {
private final String name; private final String name;

View File

@ -173,6 +173,9 @@ Trunk (Unreleased)
HDFS-9182. Cleanup the findbugs and other issues after HDFS EC merged to trunk. HDFS-9182. Cleanup the findbugs and other issues after HDFS EC merged to trunk.
(umamahesh) (umamahesh)
HDFS-8632. Add InterfaceAudience annotation to the erasure coding classes.
(Rakesh R via wang)
OPTIMIZATIONS OPTIMIZATIONS
BUG FIXES BUG FIXES

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.hdfs.server.blockmanagement; package org.apache.hadoop.hdfs.server.blockmanagement;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState;
import org.apache.hadoop.hdfs.util.StripedBlockUtil; import org.apache.hadoop.hdfs.util.StripedBlockUtil;
@ -35,6 +36,7 @@
* array's size can be larger than (m+k). Thus currently we use an extra byte * array's size can be larger than (m+k). Thus currently we use an extra byte
* array to record the block index for each triplet. * array to record the block index for each triplet.
*/ */
@InterfaceAudience.Private
public class BlockInfoStriped extends BlockInfo { public class BlockInfoStriped extends BlockInfo {
private final ErasureCodingPolicy ecPolicy; private final ErasureCodingPolicy ecPolicy;
/** /**

View File

@ -17,11 +17,13 @@
*/ */
package org.apache.hadoop.hdfs.server.blockmanagement; package org.apache.hadoop.hdfs.server.blockmanagement;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.net.NetworkTopology; import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
@InterfaceAudience.Private
public class BlockPlacementPolicies{ public class BlockPlacementPolicies{
private final BlockPlacementPolicy replicationPolicy; private final BlockPlacementPolicy replicationPolicy;

View File

@ -45,6 +45,7 @@
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.BlockReader; import org.apache.hadoop.hdfs.BlockReader;
@ -86,6 +87,7 @@
* response. BPOfferService delegates the work to this class for handling EC * response. BPOfferService delegates the work to this class for handling EC
* commands. * commands.
*/ */
@InterfaceAudience.Private
public final class ErasureCodingWorker { public final class ErasureCodingWorker {
private static final Log LOG = DataNode.LOG; private static final Log LOG = DataNode.LOG;

View File

@ -16,6 +16,7 @@
*/ */
package org.apache.hadoop.hdfs.tools.erasurecode; package org.apache.hadoop.hdfs.tools.erasurecode;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.fs.shell.CommandFactory; import org.apache.hadoop.fs.shell.CommandFactory;
@ -25,6 +26,7 @@
/** /**
* CLI for the erasure code encoding operations. * CLI for the erasure code encoding operations.
*/ */
@InterfaceAudience.Private
public class ECCli extends FsShell { public class ECCli extends FsShell {
private final static String usagePrefix = private final static String usagePrefix =