MAPREDUCE-4148. MapReduce should not have a compile-time dependency on HDFS.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1337199 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
5dbd09ee76
commit
aea890f7d2
@ -18,10 +18,15 @@
|
|||||||
|
|
||||||
package org.apache.hadoop.security.token;
|
package org.apache.hadoop.security.token;
|
||||||
|
|
||||||
|
import com.google.common.collect.Maps;
|
||||||
|
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.DataInput;
|
import java.io.DataInput;
|
||||||
|
import java.io.DataInputStream;
|
||||||
import java.io.DataOutput;
|
import java.io.DataOutput;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.ServiceLoader;
|
import java.util.ServiceLoader;
|
||||||
|
|
||||||
import org.apache.commons.codec.binary.Base64;
|
import org.apache.commons.codec.binary.Base64;
|
||||||
@ -37,6 +42,7 @@
|
|||||||
import org.apache.hadoop.io.Writable;
|
import org.apache.hadoop.io.Writable;
|
||||||
import org.apache.hadoop.io.WritableComparator;
|
import org.apache.hadoop.io.WritableComparator;
|
||||||
import org.apache.hadoop.io.WritableUtils;
|
import org.apache.hadoop.io.WritableUtils;
|
||||||
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The client-side form of the token.
|
* The client-side form of the token.
|
||||||
@ -45,6 +51,9 @@
|
|||||||
@InterfaceStability.Evolving
|
@InterfaceStability.Evolving
|
||||||
public class Token<T extends TokenIdentifier> implements Writable {
|
public class Token<T extends TokenIdentifier> implements Writable {
|
||||||
public static final Log LOG = LogFactory.getLog(Token.class);
|
public static final Log LOG = LogFactory.getLog(Token.class);
|
||||||
|
|
||||||
|
private static Map<Text, Class<? extends TokenIdentifier>> tokenKindMap;
|
||||||
|
|
||||||
private byte[] identifier;
|
private byte[] identifier;
|
||||||
private byte[] password;
|
private byte[] password;
|
||||||
private Text kind;
|
private Text kind;
|
||||||
@ -100,13 +109,49 @@ public Token(Token<T> other) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the token identifier
|
* Get the token identifier's byte representation
|
||||||
* @return the token identifier
|
* @return the token identifier's byte representation
|
||||||
*/
|
*/
|
||||||
public byte[] getIdentifier() {
|
public byte[] getIdentifier() {
|
||||||
return identifier;
|
return identifier;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static synchronized Class<? extends TokenIdentifier>
|
||||||
|
getClassForIdentifier(Text kind) {
|
||||||
|
if (tokenKindMap == null) {
|
||||||
|
tokenKindMap = Maps.newHashMap();
|
||||||
|
for (TokenIdentifier id : ServiceLoader.load(TokenIdentifier.class)) {
|
||||||
|
tokenKindMap.put(id.getKind(), id.getClass());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Class<? extends TokenIdentifier> cls = tokenKindMap.get(kind);
|
||||||
|
if (cls == null) {
|
||||||
|
LOG.warn("Cannot find class for token kind " + kind);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return cls;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the token identifier object, or null if it could not be constructed
|
||||||
|
* (because the class could not be loaded, for example).
|
||||||
|
* @return the token identifier, or null
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public T decodeIdentifier() throws IOException {
|
||||||
|
Class<? extends TokenIdentifier> cls = getClassForIdentifier(getKind());
|
||||||
|
if (cls == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
TokenIdentifier tokenIdentifier = ReflectionUtils.newInstance(cls, null);
|
||||||
|
ByteArrayInputStream buf = new ByteArrayInputStream(identifier);
|
||||||
|
DataInputStream in = new DataInputStream(buf);
|
||||||
|
tokenIdentifier.readFields(in);
|
||||||
|
in.close();
|
||||||
|
return (T) tokenIdentifier;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the token password/secret
|
* Get the token password/secret
|
||||||
* @return the token password/secret
|
* @return the token password/secret
|
||||||
@ -260,16 +305,31 @@ private static void addBinaryBuffer(StringBuilder buffer, byte[] bytes) {
|
|||||||
buffer.append(num);
|
buffer.append(num);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void identifierToString(StringBuilder buffer) {
|
||||||
|
T id = null;
|
||||||
|
try {
|
||||||
|
id = decodeIdentifier();
|
||||||
|
} catch (IOException e) {
|
||||||
|
// handle in the finally block
|
||||||
|
} finally {
|
||||||
|
if (id != null) {
|
||||||
|
buffer.append("(").append(id).append(")");
|
||||||
|
} else {
|
||||||
|
addBinaryBuffer(buffer, identifier);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
StringBuilder buffer = new StringBuilder();
|
StringBuilder buffer = new StringBuilder();
|
||||||
buffer.append("Ident: ");
|
buffer.append("Kind: ");
|
||||||
addBinaryBuffer(buffer, identifier);
|
|
||||||
buffer.append(", Kind: ");
|
|
||||||
buffer.append(kind.toString());
|
buffer.append(kind.toString());
|
||||||
buffer.append(", Service: ");
|
buffer.append(", Service: ");
|
||||||
buffer.append(service.toString());
|
buffer.append(service.toString());
|
||||||
|
buffer.append(", Ident: ");
|
||||||
|
identifierToString(buffer);
|
||||||
return buffer.toString();
|
return buffer.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,11 +18,15 @@
|
|||||||
|
|
||||||
package org.apache.hadoop.security.token;
|
package org.apache.hadoop.security.token;
|
||||||
|
|
||||||
|
import static junit.framework.Assert.assertEquals;
|
||||||
|
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
import org.apache.hadoop.io.*;
|
import org.apache.hadoop.io.*;
|
||||||
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
|
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
|
||||||
|
import org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDelegationTokenIdentifier;
|
||||||
|
import org.apache.hadoop.security.token.delegation.TestDelegationToken.TestDelegationTokenSecretManager;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import junit.framework.TestCase;
|
||||||
|
|
||||||
@ -94,5 +98,20 @@ public static void testEncodeWritable() throws Exception {
|
|||||||
checkUrlSafe(encode);
|
checkUrlSafe(encode);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testDecodeIdentifier() throws IOException {
|
||||||
|
TestDelegationTokenSecretManager secretManager =
|
||||||
|
new TestDelegationTokenSecretManager(0, 0, 0, 0);
|
||||||
|
secretManager.startThreads();
|
||||||
|
TestDelegationTokenIdentifier id = new TestDelegationTokenIdentifier(
|
||||||
|
new Text("owner"), new Text("renewer"), new Text("realUser"));
|
||||||
|
|
||||||
|
Token<TestDelegationTokenIdentifier> token =
|
||||||
|
new Token<TestDelegationTokenIdentifier>(id, secretManager);
|
||||||
|
TokenIdentifier idCopy = token.decodeIdentifier();
|
||||||
|
|
||||||
|
assertNotSame(id, idCopy);
|
||||||
|
assertEquals(id, idCopy);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,2 @@
|
|||||||
|
org.apache.hadoop.ipc.TestSaslRPC$TestTokenIdentifier
|
||||||
|
org.apache.hadoop.security.token.delegation.TestDelegationToken$TestDelegationTokenIdentifier
|
@ -0,0 +1,2 @@
|
|||||||
|
org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier
|
||||||
|
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier
|
@ -287,6 +287,9 @@ Release 2.0.0 - UNRELEASED
|
|||||||
MAPREDUCE-4231. Update RAID to use the new BlockCollection interface.
|
MAPREDUCE-4231. Update RAID to use the new BlockCollection interface.
|
||||||
(szetszwo)
|
(szetszwo)
|
||||||
|
|
||||||
|
MAPREDUCE-4148. MapReduce should not have a compile-time dependency on
|
||||||
|
HDFS. (tomwhite)
|
||||||
|
|
||||||
Release 0.23.3 - UNRELEASED
|
Release 0.23.3 - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -0,0 +1 @@
|
|||||||
|
org.apache.hadoop.mapreduce.v2.api.MRDelegationTokenIdentifier
|
@ -37,6 +37,7 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-hdfs</artifactId>
|
<artifactId>hadoop-hdfs</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
|
@ -38,7 +38,6 @@
|
|||||||
import org.apache.hadoop.fs.FileUtil;
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.hdfs.DFSClient;
|
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.mapred.JobConf;
|
import org.apache.hadoop.mapred.JobConf;
|
||||||
import org.apache.hadoop.mapred.QueueACL;
|
import org.apache.hadoop.mapred.QueueACL;
|
||||||
@ -433,8 +432,7 @@ private void printTokens(JobID jobId,
|
|||||||
LOG.debug("Printing tokens for job: " + jobId);
|
LOG.debug("Printing tokens for job: " + jobId);
|
||||||
for(Token<?> token: credentials.getAllTokens()) {
|
for(Token<?> token: credentials.getAllTokens()) {
|
||||||
if (token.getKind().toString().equals("HDFS_DELEGATION_TOKEN")) {
|
if (token.getKind().toString().equals("HDFS_DELEGATION_TOKEN")) {
|
||||||
LOG.debug("Submitting with " +
|
LOG.debug("Submitting with " + token);
|
||||||
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier.stringifyToken(token));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -30,7 +30,6 @@
|
|||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
|
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.mapred.JobConf;
|
import org.apache.hadoop.mapred.JobConf;
|
||||||
import org.apache.hadoop.mapred.Master;
|
import org.apache.hadoop.mapred.Master;
|
||||||
@ -179,16 +178,14 @@ private static void mergeBinaryTokens(Credentials creds, Configuration conf) {
|
|||||||
* @param namenode
|
* @param namenode
|
||||||
* @return delegation token
|
* @return delegation token
|
||||||
*/
|
*/
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public static Token<DelegationTokenIdentifier> getDelegationToken(
|
public static Token<?> getDelegationToken(
|
||||||
Credentials credentials, String namenode) {
|
Credentials credentials, String namenode) {
|
||||||
//No fs specific tokens issues by this fs. It may however issue tokens
|
//No fs specific tokens issues by this fs. It may however issue tokens
|
||||||
// for other filesystems - which would be keyed by that filesystems name.
|
// for other filesystems - which would be keyed by that filesystems name.
|
||||||
if (namenode == null)
|
if (namenode == null)
|
||||||
return null;
|
return null;
|
||||||
return (Token<DelegationTokenIdentifier>) credentials.getToken(new Text(
|
return (Token<?>) credentials.getToken(new Text(namenode));
|
||||||
namenode));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -39,7 +39,6 @@
|
|||||||
import org.apache.hadoop.security.Credentials;
|
import org.apache.hadoop.security.Credentials;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.security.token.Token;
|
import org.apache.hadoop.security.token.Token;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
|
||||||
|
|
||||||
|
@ -0,0 +1,2 @@
|
|||||||
|
org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier
|
||||||
|
org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier
|
@ -114,8 +114,8 @@
|
|||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<!-- needed for security and runtime -->
|
|
||||||
<artifactId>hadoop-hdfs</artifactId>
|
<artifactId>hadoop-hdfs</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.inject.extensions</groupId>
|
<groupId>com.google.inject.extensions</groupId>
|
||||||
|
@ -57,7 +57,7 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-hdfs</artifactId>
|
<artifactId>hadoop-hdfs</artifactId>
|
||||||
<scope>provided</scope>
|
<scope>runtime</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
@ -0,0 +1,4 @@
|
|||||||
|
org.apache.hadoop.yarn.security.ContainerTokenIdentifier
|
||||||
|
org.apache.hadoop.yarn.security.ApplicationTokenIdentifier
|
||||||
|
org.apache.hadoop.yarn.security.client.ClientTokenIdentifier
|
||||||
|
org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier
|
@ -0,0 +1 @@
|
|||||||
|
org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.security.LocalizerTokenIdentifier
|
@ -128,8 +128,8 @@
|
|||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<!-- needed for security and runtime -->
|
|
||||||
<artifactId>hadoop-hdfs</artifactId>
|
<artifactId>hadoop-hdfs</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.inject</groupId>
|
<groupId>com.google.inject</groupId>
|
||||||
|
Loading…
Reference in New Issue
Block a user