HDFS-12885. Add visibility/stability annotations. Contributed by Chris Douglas

This commit is contained in:
Virajith Jalaparti 2017-12-05 09:51:09 -08:00 committed by Chris Douglas
parent b634053c4d
commit a027055dd2
30 changed files with 119 additions and 1 deletions

View File

@ -16,6 +16,8 @@
*/
package org.apache.hadoop.hdfs.protocol;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path;
import javax.annotation.Nonnull;
@ -25,6 +27,8 @@
* ProvidedStorageLocation is a location in an external storage system
* containing the data for a block (~Replica).
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class ProvidedStorageLocation {
private final Path path;
private final long offset;

View File

@ -17,6 +17,7 @@
package org.apache.hadoop.hdfs.protocolPB;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdfs.protocol.proto.AliasMapProtocolProtos;
import org.apache.hadoop.ipc.ProtocolInfo;
@ -30,6 +31,7 @@
"org.apache.hadoop.hdfs.server.aliasmap.AliasMapProtocol",
protocolVersion = 1)
@InterfaceAudience.Private
@InterfaceStability.Unstable
public interface AliasMapProtocolPB extends
AliasMapProtocolProtos.AliasMapProtocolService.BlockingInterface {
}

View File

@ -18,6 +18,8 @@
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.ProvidedStorageLocation;
import org.apache.hadoop.hdfs.protocol.proto.AliasMapProtocolProtos.KeyValueProto;
@ -40,6 +42,8 @@
* AliasMapProtocolServerSideTranslatorPB is responsible for translating RPC
* calls and forwarding them to the internal InMemoryAliasMap.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class AliasMapProtocolServerSideTranslatorPB
implements AliasMapProtocolPB {

View File

@ -17,6 +17,8 @@
package org.apache.hadoop.hdfs.protocolPB;
import com.google.protobuf.ServiceException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.ProvidedStorageLocation;
@ -47,6 +49,8 @@
* {@link InMemoryAliasMapProtocol} interface to the RPC server implementing
* {@link AliasMapProtocolPB}.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class InMemoryAliasMapProtocolClientSideTranslatorPB
implements InMemoryAliasMapProtocol {

View File

@ -19,6 +19,8 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
@ -47,6 +49,8 @@
* InMemoryAliasMap is an implementation of the InMemoryAliasMapProtocol for
* use with LevelDB.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public class InMemoryAliasMap implements InMemoryAliasMapProtocol,
Configurable {

View File

@ -16,6 +16,8 @@
*/
package org.apache.hadoop.hdfs.server.aliasmap;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.ProvidedStorageLocation;
import org.apache.hadoop.hdfs.server.common.FileRegion;
@ -30,6 +32,8 @@
* provided blocks for an in-memory implementation of the
* {@link org.apache.hadoop.hdfs.server.common.blockaliasmap.BlockAliasMap}.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public interface InMemoryAliasMapProtocol {
/**

View File

@ -17,6 +17,8 @@
package org.apache.hadoop.hdfs.server.aliasmap;
import com.google.protobuf.BlockingService;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.ipc.ProtobufRpcEngine;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
@ -42,6 +44,8 @@
* InMemoryLevelDBAliasMapServer is the entry point from the Namenode into
* the {@link InMemoryAliasMap}.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class InMemoryLevelDBAliasMapServer implements InMemoryAliasMapProtocol,
Configurable, Closeable {

View File

@ -30,6 +30,8 @@
import java.util.concurrent.ConcurrentSkipListMap;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.DFSConfigKeys;
@ -57,6 +59,8 @@
* This class allows us to manage and multiplex between storages local to
* datanodes, and provided storage.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class ProvidedStorageMap {
private static final Logger LOG =

View File

@ -17,11 +17,15 @@
*/
package org.apache.hadoop.hdfs.server.common;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdfs.protocol.Block;
/**
* Interface used to load provided blocks.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public interface BlockAlias {
Block getBlock();

View File

@ -18,6 +18,8 @@
package org.apache.hadoop.hdfs.server.common;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
@ -27,6 +29,8 @@
* This class is used to represent provided blocks that are file regions,
* i.e., can be described using (path, offset, length).
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public class FileRegion implements BlockAlias {
private final Pair<Block, ProvidedStorageLocation> pair;

View File

@ -22,12 +22,16 @@
import java.util.Iterator;
import java.util.Optional;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.server.common.BlockAlias;
/**
* An abstract class used to read and write block maps for provided blocks.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public abstract class BlockAliasMap<T extends BlockAlias> {
/**

View File

@ -16,6 +16,8 @@
*/
package org.apache.hadoop.hdfs.server.common.blockaliasmap.impl;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.protocol.Block;
@ -37,6 +39,8 @@
* This is used by the Datanode and fs2img to store and retrieve FileRegions
* based on the given Block.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class InMemoryLevelDBAliasMapClient extends BlockAliasMap<FileRegion>
implements Configurable {

View File

@ -24,6 +24,8 @@
import java.util.Map;
import java.util.Optional;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.iq80.leveldb.DB;
import org.iq80.leveldb.DBIterator;
import static org.fusesource.leveldbjni.JniDBFactory.factory;
@ -45,6 +47,8 @@
/**
* A LevelDB based implementation of {@link BlockAliasMap}.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public class LevelDBFileRegionAliasMap
extends BlockAliasMap<FileRegion> implements Configurable {

View File

@ -34,6 +34,8 @@
import java.util.NoSuchElementException;
import java.util.Optional;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.LocalFileSystem;
@ -56,6 +58,8 @@
* This class is used for block maps stored as text files,
* with a specified delimiter.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public class TextFileRegionAliasMap
extends BlockAliasMap<FileRegion> implements Configurable {

View File

@ -19,6 +19,8 @@
import java.net.URI;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -30,6 +32,8 @@
/**
* This class is used for provided replicas that are finalized.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class FinalizedProvidedReplica extends ProvidedReplica {
public FinalizedProvidedReplica(long blockId, URI fileURI,

View File

@ -25,6 +25,8 @@
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.io.input.BoundedInputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
@ -42,6 +44,8 @@
/**
* This abstract class is used as a base class for provided replicas.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public abstract class ProvidedReplica extends ReplicaInfo {
public static final Logger LOG =

View File

@ -28,6 +28,7 @@
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -65,7 +66,8 @@
/**
* This class is used to create provided volumes.
*/
public class ProvidedVolumeImpl extends FsVolumeImpl {
@InterfaceAudience.Private
class ProvidedVolumeImpl extends FsVolumeImpl {
/**
* Get a suffix of the full path, excluding the given prefix.

View File

@ -20,6 +20,8 @@
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto;
@ -27,6 +29,8 @@
* Given an external reference, create a sequence of blocks and associated
* metadata.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public abstract class BlockResolver {
protected BlockProto buildBlock(long blockId, long bytes) {

View File

@ -23,6 +23,8 @@
import java.util.Collections;
import java.util.ConcurrentModificationException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@ -31,6 +33,8 @@
/**
* Traversal of an external FileSystem.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public class FSTreeWalk extends TreeWalk {
private final Path root;

View File

@ -26,6 +26,8 @@
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -37,6 +39,8 @@
/**
* Create FSImage from an external namespace.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public class FileSystemImage implements Tool {
private Configuration conf;

View File

@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hdfs.server.namenode;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
@ -24,6 +26,8 @@
* Resolver mapping all files to a configurable, uniform blocksize
* and replication.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public class FixedBlockMultiReplicaResolver extends FixedBlockResolver {
public static final String REPLICATION =

View File

@ -21,6 +21,8 @@
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
@ -28,6 +30,8 @@
/**
* Resolver mapping all files to a configurable, uniform blocksize.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public class FixedBlockResolver extends BlockResolver implements Configurable {
public static final String BLOCKSIZE =

View File

@ -17,6 +17,9 @@
*/
package org.apache.hadoop.hdfs.server.namenode;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import java.util.HashSet;
import java.util.Set;
@ -24,6 +27,8 @@
* Dynamically assign ids to users/groups as they appear in the external
* filesystem.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public class FsUGIResolver extends UGIResolver {
private int id;

View File

@ -38,6 +38,8 @@
import com.google.common.base.Charsets;
import com.google.protobuf.CodedOutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@ -72,6 +74,8 @@
* a valid FSImage/NN storage.
*/
// TODO: generalize to types beyond FileRegion
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class ImageWriter implements Closeable {
private static final int ONDISK_VERSION = 1;

View File

@ -22,6 +22,8 @@
import java.util.NoSuchElementException;
import java.util.Optional;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.server.common.FileRegion;
import org.apache.hadoop.hdfs.server.common.blockaliasmap.BlockAliasMap;
@ -29,6 +31,8 @@
/**
* Null sink for region information emitted from FSImage.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public class NullBlockAliasMap extends BlockAliasMap<FileRegion> {
@Override

View File

@ -19,6 +19,8 @@
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
@ -27,6 +29,8 @@
/**
* Map all owners/groups in external system to a single user in FSImage.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public class SingleUGIResolver extends UGIResolver implements Configurable {
public static final String UID = "hdfs.image.writer.ugi.single.uid";

View File

@ -21,6 +21,8 @@
import com.google.protobuf.ByteString;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto;
@ -37,6 +39,8 @@
* Traversal cursor in external filesystem.
* TODO: generalize, move FS/FileRegion to FSTreePath
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public class TreePath {
private long id = -1;
private final long parentId;

View File

@ -17,6 +17,9 @@
*/
package org.apache.hadoop.hdfs.server.namenode;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Iterator;
@ -24,6 +27,8 @@
/**
* Traversal yielding a hierarchical sequence of paths.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public abstract class TreeWalk implements Iterable<TreePath> {
/**

View File

@ -21,6 +21,8 @@
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.permission.FsPermission;
@ -28,6 +30,8 @@
* Pluggable class for mapping ownership and permissions from an external
* store to an FSImage.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public abstract class UGIResolver {
static final int USER_STRID_OFFSET = 40;

View File

@ -24,6 +24,8 @@
import java.util.Map;
import java.util.Random;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.LocatedFileStatus;
@ -32,6 +34,8 @@
/**
* Random, repeatable hierarchy generator.
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class RandomTreeWalk extends TreeWalk {
private final Path root;