HADOOP-6873. using delegation token over hftp for long running clients
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@980648 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
7e7aff5c90
commit
c15ae29a90
@ -166,6 +166,9 @@ Trunk (unreleased changes)
|
||||
HADOOP-6536. Fixes FileUtil.fullyDelete() not to delete the contents of
|
||||
the sym-linked directory. (Ravi Gummadi via amareshwari)
|
||||
|
||||
HADOOP-6873. using delegation token over hftp for long
|
||||
running clients (boryas)
|
||||
|
||||
Release 0.21.0 - Unreleased
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
@ -27,7 +27,7 @@
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Stable
|
||||
public class FSDataInputStream extends DataInputStream
|
||||
implements Seekable, PositionedReadable {
|
||||
implements Seekable, PositionedReadable, Closeable {
|
||||
|
||||
public FSDataInputStream(InputStream in)
|
||||
throws IOException {
|
||||
|
@ -47,9 +47,10 @@
|
||||
import org.apache.hadoop.conf.Configured;
|
||||
import org.apache.hadoop.fs.Options.Rename;
|
||||
import org.apache.hadoop.fs.permission.FsPermission;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||
import org.apache.hadoop.io.MultipleIOException;
|
||||
import org.apache.hadoop.security.SecurityUtil;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.util.Progressable;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
|
||||
@ -169,6 +170,22 @@ public void initialize(URI name, Configuration conf) throws IOException {
|
||||
/** Returns a URI whose scheme and authority identify this FileSystem.*/
|
||||
public abstract URI getUri();
|
||||
|
||||
/**
|
||||
* Get the default port for this file system.
|
||||
* @return the default port or 0 if there isn't one
|
||||
*/
|
||||
protected int getDefaultPort() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a canonical name for this file system.
|
||||
* @return a URI string that uniquely identifies this file system
|
||||
*/
|
||||
public String getCanonicalServiceName() {
|
||||
return SecurityUtil.buildDTServiceName(getUri(), getDefaultPort());
|
||||
}
|
||||
|
||||
/** @deprecated call #getUri() instead.*/
|
||||
@Deprecated
|
||||
public String getName() { return getUri().toString(); }
|
||||
@ -328,6 +345,16 @@ public Path makeQualified(Path path) {
|
||||
return path.makeQualified(this.getUri(), this.getWorkingDirectory());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a new delegation token for this file system.
|
||||
* @param renewer the account name that is allowed to renew the token.
|
||||
* @return a new delegation token
|
||||
* @throws IOException
|
||||
*/
|
||||
public Token<?> getDelegationToken(String renewer) throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
/** create a file with the provided permission
|
||||
* The permission of the file is set to be the provided permission as in
|
||||
* setPermission, not permission&~umask
|
||||
|
@ -28,20 +28,20 @@
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
import org.apache.hadoop.io.WritableUtils;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
/**
|
||||
* A class that provides the facilities of reading and writing
|
||||
@ -124,15 +124,18 @@ public void addSecretKey(Text alias, byte[] key) {
|
||||
* @param conf
|
||||
* @throws IOException
|
||||
*/
|
||||
public void readTokenStorageFile(Path filename,
|
||||
Configuration conf) throws IOException {
|
||||
FSDataInputStream in = filename.getFileSystem(conf).open(filename);
|
||||
public static Credentials readTokenStorageFile(Path filename, Configuration conf)
|
||||
throws IOException {
|
||||
FSDataInputStream in = null;
|
||||
Credentials credentials = new Credentials();
|
||||
try {
|
||||
readTokenStorageStream(in);
|
||||
} catch(IOException ioe) {
|
||||
throw new IOException("Exception reading " + filename, ioe);
|
||||
} finally {
|
||||
in = filename.getFileSystem(conf).open(filename);
|
||||
credentials.readTokenStorageStream(in);
|
||||
in.close();
|
||||
return credentials;
|
||||
} catch(IOException ioe) {
|
||||
IOUtils.cleanup(LOG, in);
|
||||
throw new IOException("Exception reading " + filename, ioe);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -234,7 +234,13 @@ public static String buildDTServiceName(URI uri, int defPort) {
|
||||
// for whatever reason using NetUtils.createSocketAddr(target).toString()
|
||||
// returns "localhost/ip:port"
|
||||
StringBuffer sb = new StringBuffer();
|
||||
sb.append(NetUtils.normalizeHostName(uri.getHost())).append(":").append(port);
|
||||
String host = uri.getHost();
|
||||
if (host != null) {
|
||||
host = NetUtils.normalizeHostName(host);
|
||||
} else {
|
||||
host = "";
|
||||
}
|
||||
sb.append(host).append(":").append(port);
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
||||
|
@ -415,8 +415,8 @@ static UserGroupInformation getLoginUser() throws IOException {
|
||||
if (fileLocation != null && isSecurityEnabled()) {
|
||||
// load the token storage file and put all of the tokens into the
|
||||
// user.
|
||||
Credentials cred = new Credentials();
|
||||
cred.readTokenStorageFile(new Path("file:///" + fileLocation), conf);
|
||||
Credentials cred = Credentials.readTokenStorageFile(
|
||||
new Path("file:///" + fileLocation), conf);
|
||||
for (Token<?> token: cred.getAllTokens()) {
|
||||
loginUser.addToken(token);
|
||||
}
|
||||
|
@ -18,6 +18,7 @@
|
||||
|
||||
package org.apache.hadoop.fs;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.EnumSet;
|
||||
@ -28,6 +29,8 @@
|
||||
import org.apache.hadoop.fs.permission.FsPermission;
|
||||
import org.apache.hadoop.fs.Options.CreateOpts;
|
||||
import org.apache.hadoop.fs.Options.Rename;
|
||||
import org.apache.hadoop.security.SecurityUtil;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.util.Progressable;
|
||||
|
||||
public class TestFilterFileSystem extends TestCase {
|
||||
@ -123,7 +126,13 @@ public FSDataOutputStream primitiveCreate(final Path f,
|
||||
CreateOpts... opts) { return null; }
|
||||
public void primitiveMkdir(Path f, FsPermission absolutePermission,
|
||||
boolean createParent) { }
|
||||
}
|
||||
public int getDefaultPort() { return 0; }
|
||||
public String getCanonicalServiceName() { return null; }
|
||||
public Token<?> getDelegationToken(String renewer) throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void testFilterFileSystem() throws Exception {
|
||||
for (Method m : FileSystem.class.getDeclaredMethods()) {
|
||||
|
Loading…
Reference in New Issue
Block a user