HADOOP-6873. using delegation token over hftp for long running clients

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@980648 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Boris Shkolnik 2010-07-30 04:55:23 +00:00
parent 7e7aff5c90
commit c15ae29a90
7 changed files with 67 additions and 19 deletions

View File

@ -166,6 +166,9 @@ Trunk (unreleased changes)
HADOOP-6536. Fixes FileUtil.fullyDelete() not to delete the contents of HADOOP-6536. Fixes FileUtil.fullyDelete() not to delete the contents of
the sym-linked directory. (Ravi Gummadi via amareshwari) the sym-linked directory. (Ravi Gummadi via amareshwari)
HADOOP-6873. using delegation token over hftp for long
running clients (boryas)
Release 0.21.0 - Unreleased Release 0.21.0 - Unreleased
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -27,7 +27,7 @@
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Stable @InterfaceStability.Stable
public class FSDataInputStream extends DataInputStream public class FSDataInputStream extends DataInputStream
implements Seekable, PositionedReadable { implements Seekable, PositionedReadable, Closeable {
public FSDataInputStream(InputStream in) public FSDataInputStream(InputStream in)
throws IOException { throws IOException {

View File

@ -47,9 +47,10 @@
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Options.Rename; import org.apache.hadoop.fs.Options.Rename;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.io.MultipleIOException; import org.apache.hadoop.io.MultipleIOException;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
@ -169,6 +170,22 @@ public void initialize(URI name, Configuration conf) throws IOException {
/** Returns a URI whose scheme and authority identify this FileSystem.*/ /** Returns a URI whose scheme and authority identify this FileSystem.*/
public abstract URI getUri(); public abstract URI getUri();
/**
* Get the default port for this file system.
* @return the default port or 0 if there isn't one
*/
protected int getDefaultPort() {
return 0;
}
/**
* Get a canonical name for this file system.
* @return a URI string that uniquely identifies this file system
*/
public String getCanonicalServiceName() {
return SecurityUtil.buildDTServiceName(getUri(), getDefaultPort());
}
/** @deprecated call #getUri() instead.*/ /** @deprecated call #getUri() instead.*/
@Deprecated @Deprecated
public String getName() { return getUri().toString(); } public String getName() { return getUri().toString(); }
@ -328,6 +345,16 @@ public Path makeQualified(Path path) {
return path.makeQualified(this.getUri(), this.getWorkingDirectory()); return path.makeQualified(this.getUri(), this.getWorkingDirectory());
} }
/**
* Get a new delegation token for this file system.
* @param renewer the account name that is allowed to renew the token.
* @return a new delegation token
* @throws IOException
*/
public Token<?> getDelegationToken(String renewer) throws IOException {
return null;
}
/** create a file with the provided permission /** create a file with the provided permission
* The permission of the file is set to be the provided permission as in * The permission of the file is set to be the provided permission as in
* setPermission, not permission&~umask * setPermission, not permission&~umask

View File

@ -28,20 +28,20 @@
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/** /**
* A class that provides the facilities of reading and writing * A class that provides the facilities of reading and writing
@ -124,15 +124,18 @@ public void addSecretKey(Text alias, byte[] key) {
* @param conf * @param conf
* @throws IOException * @throws IOException
*/ */
public void readTokenStorageFile(Path filename, public static Credentials readTokenStorageFile(Path filename, Configuration conf)
Configuration conf) throws IOException { throws IOException {
FSDataInputStream in = filename.getFileSystem(conf).open(filename); FSDataInputStream in = null;
Credentials credentials = new Credentials();
try { try {
readTokenStorageStream(in); in = filename.getFileSystem(conf).open(filename);
} catch(IOException ioe) { credentials.readTokenStorageStream(in);
throw new IOException("Exception reading " + filename, ioe);
} finally {
in.close(); in.close();
return credentials;
} catch(IOException ioe) {
IOUtils.cleanup(LOG, in);
throw new IOException("Exception reading " + filename, ioe);
} }
} }

View File

@ -234,7 +234,13 @@ public static String buildDTServiceName(URI uri, int defPort) {
// for whatever reason using NetUtils.createSocketAddr(target).toString() // for whatever reason using NetUtils.createSocketAddr(target).toString()
// returns "localhost/ip:port" // returns "localhost/ip:port"
StringBuffer sb = new StringBuffer(); StringBuffer sb = new StringBuffer();
sb.append(NetUtils.normalizeHostName(uri.getHost())).append(":").append(port); String host = uri.getHost();
if (host != null) {
host = NetUtils.normalizeHostName(host);
} else {
host = "";
}
sb.append(host).append(":").append(port);
return sb.toString(); return sb.toString();
} }
} }

View File

@ -415,8 +415,8 @@ static UserGroupInformation getLoginUser() throws IOException {
if (fileLocation != null && isSecurityEnabled()) { if (fileLocation != null && isSecurityEnabled()) {
// load the token storage file and put all of the tokens into the // load the token storage file and put all of the tokens into the
// user. // user.
Credentials cred = new Credentials(); Credentials cred = Credentials.readTokenStorageFile(
cred.readTokenStorageFile(new Path("file:///" + fileLocation), conf); new Path("file:///" + fileLocation), conf);
for (Token<?> token: cred.getAllTokens()) { for (Token<?> token: cred.getAllTokens()) {
loginUser.addToken(token); loginUser.addToken(token);
} }

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.fs; package org.apache.hadoop.fs;
import java.io.IOException;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.lang.reflect.Modifier; import java.lang.reflect.Modifier;
import java.util.EnumSet; import java.util.EnumSet;
@ -28,6 +29,8 @@
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.Options.CreateOpts; import org.apache.hadoop.fs.Options.CreateOpts;
import org.apache.hadoop.fs.Options.Rename; import org.apache.hadoop.fs.Options.Rename;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Progressable;
public class TestFilterFileSystem extends TestCase { public class TestFilterFileSystem extends TestCase {
@ -123,6 +126,12 @@ public FSDataOutputStream primitiveCreate(final Path f,
CreateOpts... opts) { return null; } CreateOpts... opts) { return null; }
public void primitiveMkdir(Path f, FsPermission absolutePermission, public void primitiveMkdir(Path f, FsPermission absolutePermission,
boolean createParent) { } boolean createParent) { }
public int getDefaultPort() { return 0; }
public String getCanonicalServiceName() { return null; }
public Token<?> getDelegationToken(String renewer) throws IOException {
return null;
}
} }
public void testFilterFileSystem() throws Exception { public void testFilterFileSystem() throws Exception {