HADOOP-6520. Adds APIs to read/write Token and secret keys. Also adds the automatic loading of tokens into UserGroupInformation upon login. The tokens are read from a file specified in the environment variable. Contributed by Devaraj Das.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@904862 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Devaraj Das 2010-01-30 19:58:09 +00:00
parent ba8647f511
commit 96cd62f8d5
2 changed files with 311 additions and 0 deletions

View File

@ -0,0 +1,178 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.security;
import static org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate.Project.MAPREDUCE;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
/**
* A class that provides the facilities of reading and writing
* secret keys and Tokens.
*/
@InterfaceAudience.LimitedPrivate({MAPREDUCE})
public class TokenStorage implements Writable {
private Map<Text, byte[]> secretKeysMap = new HashMap<Text, byte[]>();
private Map<Text, Token<? extends TokenIdentifier>> tokenMap =
new HashMap<Text, Token<? extends TokenIdentifier>>();
/**
* Returns the key bytes for the alias
* @param alias the alias for the key
* @return key for this alias
*/
public byte[] getSecretKey(Text alias) {
return secretKeysMap.get(alias);
}
/**
* Returns the Token object for the alias
* @param alias the alias for the Token
* @return token for this alias
*/
public Token<? extends TokenIdentifier> getToken(Text alias) {
return tokenMap.get(alias);
}
/**
* Add a token in the storage (in memory)
* @param alias the alias for the key
* @param t the token object
*/
public void addToken(Text alias, Token<? extends TokenIdentifier> t) {
tokenMap.put(alias, t);
}
/**
* Return all the tokens in the in-memory map
*/
public Collection<Token<? extends TokenIdentifier>> getAllTokens() {
return tokenMap.values();
}
/**
* @return number of Tokens in the in-memory map
*/
public int numberOfTokens() {
return tokenMap.size();
}
/**
* @return number of keys in the in-memory map
*/
public int numberOfSecretKeys() {
return secretKeysMap.size();
}
/**
* Set the key for an alias
* @param alias the alias for the key
* @param key the key bytes
*/
public void addSecretKey(Text alias, byte[] key) {
secretKeysMap.put(alias, key);
}
/**
* Convenience method for reading a file, and loading the Tokens
* therein in the passed UGI
* @param filename
* @param conf
* @param ugi
* @throws IOException
*/
public static void readTokensAndLoadInUGI(String filename, Configuration conf,
UserGroupInformation ugi) throws IOException {
Path localTokensFile = new Path (filename);
FileSystem localFS = FileSystem.getLocal(conf);
FSDataInputStream in = localFS.open(localTokensFile);
TokenStorage ts = new TokenStorage();
ts.readFields(in);
for (Token<? extends TokenIdentifier> token : ts.getAllTokens()) {
ugi.addToken(token);
}
}
/**
* Stores all the keys to DataOutput
* @param out
* @throws IOException
*/
@Override
public void write(DataOutput out) throws IOException {
// write out tokens first
WritableUtils.writeVInt(out, tokenMap.size());
for(Map.Entry<Text,
Token<? extends TokenIdentifier>> e: tokenMap.entrySet()) {
e.getKey().write(out);
e.getValue().write(out);
}
// now write out secret keys
WritableUtils.writeVInt(out, secretKeysMap.size());
for(Map.Entry<Text, byte[]> e : secretKeysMap.entrySet()) {
e.getKey().write(out);
WritableUtils.writeCompressedByteArray(out, e.getValue());
}
}
/**
* Loads all the keys
* @param in
* @throws IOException
*/
@Override
public void readFields(DataInput in) throws IOException {
secretKeysMap.clear();
tokenMap.clear();
int size = WritableUtils.readVInt(in);
for(int i=0; i<size; i++) {
Text alias = new Text();
alias.readFields(in);
Token<? extends TokenIdentifier> t = new Token<TokenIdentifier>();
t.readFields(in);
tokenMap.put(alias, t);
}
size = WritableUtils.readVInt(in);
for(int i=0; i<size; i++) {
Text alias = new Text();
alias.readFields(in);
byte[] key = WritableUtils.readCompressedByteArray(in);
secretKeysMap.put(alias, key);
}
}
}

View File

@ -0,0 +1,133 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.security;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.security.Key;
import java.security.NoSuchAlgorithmException;
import java.util.HashMap;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.Collection;
import static org.mockito.Mockito.mock;
import javax.crypto.KeyGenerator;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.security.TokenStorage;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
public class TestTokenStorage {
private static final String DEFAULT_HMAC_ALGORITHM = "HmacSHA1";
private static final File tmpDir =
new File(System.getProperty("test.build.data", "/tmp"), "mapred");
@Before
public void setUp() {
tmpDir.mkdir();
}
@SuppressWarnings("unchecked")
@Test
public <T extends TokenIdentifier> void testReadWriteStorage()
throws IOException, NoSuchAlgorithmException{
// create tokenStorage Object
TokenStorage ts = new TokenStorage();
Token<T> token1 = new Token();
Token<T> token2 = new Token();
Text service1 = new Text("service1");
Text service2 = new Text("service2");
Collection<Text> services = new ArrayList<Text>();
services.add(service1);
services.add(service2);
token1.setService(service1);
token2.setService(service2);
ts.addToken(new Text("sometoken1"), token1);
ts.addToken(new Text("sometoken2"), token2);
// create keys and put it in
final KeyGenerator kg = KeyGenerator.getInstance(DEFAULT_HMAC_ALGORITHM);
String alias = "alias";
Map<Text, byte[]> m = new HashMap<Text, byte[]>(10);
for(int i=0; i<10; i++) {
Key key = kg.generateKey();
m.put(new Text(alias+i), key.getEncoded());
ts.addSecretKey(new Text(alias+i), key.getEncoded());
}
// create file to store
File tmpFileName = new File(tmpDir, "tokenStorageTest");
DataOutputStream dos =
new DataOutputStream(new FileOutputStream(tmpFileName));
ts.write(dos);
dos.close();
// open and read it back
DataInputStream dis =
new DataInputStream(new FileInputStream(tmpFileName));
ts = new TokenStorage();
ts.readFields(dis);
dis.close();
// get the tokens and compare the services
Collection<Token<? extends TokenIdentifier>> list = ts.getAllTokens();
assertEquals("getAllTokens should return collection of size 2",
list.size(), 2);
boolean foundFirst = false;
boolean foundSecond = false;
for (Token<? extends TokenIdentifier> token : list) {
if (token.getService().equals(service1)) {
foundFirst = true;
}
if (token.getService().equals(service2)) {
foundSecond = true;
}
}
assertTrue("Tokens for services service1 and service2 must be present",
foundFirst && foundSecond);
// compare secret keys
int mapLen = m.size();
assertEquals("wrong number of keys in the Storage",
mapLen, ts.numberOfSecretKeys());
for(Text a : m.keySet()) {
byte [] kTS = ts.getSecretKey(a);
byte [] kLocal = m.get(a);
assertTrue("keys don't match for " + a,
WritableComparator.compareBytes(kTS, 0, kTS.length, kLocal,
0, kLocal.length)==0);
}
}
}