HADOOP-6677. InterfaceAudience.LimitedPrivate should take a string not an enum.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@938563 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Thomas White 2010-04-27 16:53:40 +00:00
parent 2bec54de58
commit 9ad633f011
11 changed files with 17 additions and 33 deletions

View File

@ -229,6 +229,9 @@ Trunk (unreleased changes)
HADOOP-6667. RPC.waitForProxy should retry through NoRouteToHostException.
(Todd Lipcon via tomwhite)
HADOOP-6677. InterfaceAudience.LimitedPrivate should take a string not an
enum. (tomwhite)
OPTIMIZATIONS
HADOOP-6467. Improve the performance on HarFileSystem.listStatus(..).

View File

@ -29,13 +29,11 @@ public class InterfaceAudience {
@Documented public @interface Public {};
/**
* Intended only for the project(s) specified in the annotation
* Intended only for the project(s) specified in the annotation.
* For example, "Common", "HDFS", "MapReduce", "ZooKeeper", "HBase".
*/
@Documented public @interface LimitedPrivate {
public enum Project {COMMON, AVRO, CHUKWA, HBASE, HDFS,
HIVE, MAPREDUCE, PIG, ZOOKEEPER};
Project[] value();
String[] value();
};
/**

View File

@ -15,10 +15,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@InterfaceAudience.LimitedPrivate({COMMON, AVRO, CHUKWA, HBASE, HDFS, HIVE,
MAPREDUCE, PIG, ZOOKEEPER})
@InterfaceAudience.LimitedPrivate({"Common", "Avro", "Chukwa", "HBase", "HDFS",
"Hive", "MapReduce", "Pig", "ZooKeeper"})
package org.apache.hadoop.classification.tools;
import static org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate.Project.*;
import org.apache.hadoop.classification.InterfaceAudience;

View File

@ -39,7 +39,6 @@
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Options.CreateOpts;
import org.apache.hadoop.fs.Options.Rename;
@ -1135,7 +1134,7 @@ public Path next(final AbstractFileSystem fs, final Path p)
* RuntimeExceptions:
* @throws InvalidPathException If path <code>f</code> is invalid
*/
@InterfaceAudience.LimitedPrivate({Project.HDFS, Project.MAPREDUCE})
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Evolving
public BlockLocation[] getFileBlockLocations(final Path f, final long start,
final long len) throws AccessControlException, FileNotFoundException,

View File

@ -21,12 +21,11 @@
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate.*;
/**
* Thrown when a symbolic link is encountered in a path.
*/
@InterfaceAudience.LimitedPrivate({Project.HDFS})
@InterfaceAudience.LimitedPrivate({"HDFS"})
public class UnresolvedLinkException extends IOException {
private static final long serialVersionUID = 1L;

View File

@ -18,8 +18,6 @@
package org.apache.hadoop.security;
import static org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate.Project.MAPREDUCE;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
@ -42,7 +40,7 @@
* A class that provides the facilities of reading and writing
* secret keys and Tokens.
*/
@InterfaceAudience.LimitedPrivate({MAPREDUCE})
@InterfaceAudience.LimitedPrivate({"MapReduce"})
public class TokenStorage implements Writable {
private Map<Text, byte[]> secretKeysMap = new HashMap<Text, byte[]>();

View File

@ -17,8 +17,6 @@
*/
package org.apache.hadoop.security;
import static org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate.Project.HDFS;
import static org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate.Project.MAPREDUCE;
import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION;
import java.io.IOException;
@ -542,7 +540,7 @@ private void setUserGroups(String user, String[] groups) {
* @param userGroups the names of the groups that the user belongs to
* @return a fake user for running unit tests
*/
@InterfaceAudience.LimitedPrivate({HDFS, MAPREDUCE})
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
public static UserGroupInformation createUserForTesting(String user,
String[] userGroups) {
ensureInitialized();
@ -568,7 +566,7 @@ public static UserGroupInformation createUserForTesting(String user,
* the names of the groups that the user belongs to
* @return a fake user for running unit tests
*/
@InterfaceAudience.LimitedPrivate( { HDFS, MAPREDUCE })
@InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
public static UserGroupInformation createProxyUserForTesting(String user,
UserGroupInformation realUser, String[] userGroups) {
ensureInitialized();

View File

@ -19,8 +19,6 @@
package org.apache.hadoop.security.token.delegation;
import org.apache.hadoop.classification.InterfaceAudience;
import static org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate.Project.HDFS;
import static org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate.Project.MAPREDUCE;
import java.io.DataInput;
import java.io.DataOutput;
@ -31,7 +29,7 @@
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.TokenIdentifier;
@InterfaceAudience.LimitedPrivate({HDFS, MAPREDUCE})
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
public abstract class AbstractDelegationTokenIdentifier
extends TokenIdentifier {
private static final byte VERSION = 0;

View File

@ -21,9 +21,6 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.Text;
import static org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate.Project.HDFS;
import static org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate.Project.MAPREDUCE;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
@ -42,7 +39,7 @@
import org.apache.hadoop.util.Daemon;
import org.apache.hadoop.util.StringUtils;
@InterfaceAudience.LimitedPrivate({HDFS, MAPREDUCE})
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
public abstract
class AbstractDelegationTokenSecretManager<TokenIdent
extends AbstractDelegationTokenIdentifier>

View File

@ -25,14 +25,12 @@
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.security.token.TokenSelector;
import static org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate.Project.HDFS;
import static org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate.Project.MAPREDUCE;
/**
* Look through tokens to find the first delegation token that matches the
* service and return it.
*/
@InterfaceAudience.LimitedPrivate({HDFS, MAPREDUCE})
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
public
class AbstractDelegationTokenSelector<TokenIdent
extends AbstractDelegationTokenIdentifier>

View File

@ -27,13 +27,11 @@
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
import static org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate.Project.HDFS;
import static org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate.Project.MAPREDUCE;
/**
* Key used for generating and verifying delegation tokens
*/
@InterfaceAudience.LimitedPrivate({HDFS, MAPREDUCE})
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
public class DelegationKey implements Writable {
private int keyId;
private long expiryDate;