HADOOP-6944. [Herriot] Implement a functionality for getting proxy users

definitions like groups and hosts. Contributed by Vinay Thota.


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1006356 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Konstantin Boudnik 2010-10-10 21:51:03 +00:00
parent 5fe802ea0a
commit e050832c93
3 changed files with 131 additions and 20 deletions

View File

@ -282,6 +282,9 @@ Release 0.21.1 - Unreleased
HADOOP-6993. Broken link on cluster setup page of docs. (eli)
HADOOP-6944. [Herriot] Implement a functionality for getting proxy users
definitions like groups and hosts. (Vinay Thota via cos)
Release 0.21.0 - 2010-08-13
INCOMPATIBLE CHANGES

View File

@ -27,8 +27,11 @@
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Iterator;
import java.util.Enumeration;
import java.util.Arrays;
import java.util.Hashtable;
import java.net.URI;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@ -50,8 +53,6 @@ public abstract class AbstractDaemonCluster {
private String newConfDir = null;
private static final String CONF_HADOOP_LOCAL_DIR =
"test.system.hdrc.hadoop.local.confdir";
private static final String CONF_HADOOP_MULTI_USER_LIST =
"test.system.hdrc.multi-user.list.path";
private final static Object waitLock = new Object();
/**
@ -302,27 +303,44 @@ protected void assertNoExceptionMessages() throws IOException {
}
/**
* Get the multi users list.
* @return ArrayList - users list as a array list.
* @throws IOException - if an I/O error occurs.
* Get the proxy user definitions from cluster from configuration.
* @return ProxyUserDefinitions - proxy users data like groups and hosts.
* @throws Exception - if no proxy users found in config.
*/
public ArrayList<String> getHadoopMultiUsersList() throws
IOException {
String hadoopUserListPath = conf.get(CONF_HADOOP_MULTI_USER_LIST);
if (hadoopUserListPath == null || hadoopUserListPath.isEmpty()) {
LOG.error("Proxy user list path has not been passed for "
+ CONF_HADOOP_MULTI_USER_LIST);
throw new IllegalArgumentException(
"Proxy user list hasn't been provided.");
public ProxyUserDefinitions getHadoopProxyUsers() throws
Exception {
Iterator itr = conf.iterator();
ArrayList<String> proxyUsers = new ArrayList<String>();
while (itr.hasNext()) {
if (itr.next().toString().indexOf("hadoop.proxyuser") >= 0 &&
itr.next().toString().indexOf("groups=") >= 0) {
proxyUsers.add(itr.next().toString().split("\\.")[2]);
}
}
File fileObj = new File(hadoopUserListPath);
DataInputStream disObj = new DataInputStream(new FileInputStream(fileObj));
ArrayList<String> usersList = new ArrayList<String>();
String strLine = null;
while((strLine = disObj.readLine()) != null){
usersList.add(strLine.substring(0,strLine.indexOf(',')));
if (proxyUsers.size() == 0) {
LOG.error("No proxy users found in the configuration.");
throw new Exception("No proxy users found in the configuration.");
}
return usersList;
ProxyUserDefinitions pud = new ProxyUserDefinitions() {
@Override
public boolean writeToFile(URI filePath) throws IOException {
throw new UnsupportedOperationException("No such method exists.");
};
};
for (String userName : proxyUsers) {
List<String> groups = Arrays.asList(conf.get("hadoop.proxyuser." +
userName + ".groups").split("//,"));
List<String> hosts = Arrays.asList(conf.get("hadoop.proxyuser." +
userName + ".hosts").split("//,"));
ProxyUserDefinitions.GroupsAndHost definitions =
pud.new GroupsAndHost();
definitions.setGroups(groups);
definitions.setHosts(hosts);
pud.addProxyUser(userName, definitions);
}
return pud;
}
/**

View File

@ -0,0 +1,90 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.test.system;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.io.IOException;
import java.net.URI;
/**
* Its the data container which contains host names and
* groups against each proxy user.
*/
public abstract class ProxyUserDefinitions {
/**
* Groups and host names container
*/
public class GroupsAndHost {
private List<String> groups;
private List<String> hosts;
public List<String> getGroups() {
return groups;
}
public void setGroups(List<String> groups) {
this.groups = groups;
}
public List<String> getHosts() {
return hosts;
}
public void setHosts(List<String> hosts) {
this.hosts = hosts;
}
}
protected Map<String, GroupsAndHost> proxyUsers;
protected ProxyUserDefinitions () {
proxyUsers = new HashMap<String, GroupsAndHost>();
}
/**
* Add proxy user data to a container.
* @param userName - proxy user name.
* @param definitions - groups and host names.
*/
public void addProxyUser (String userName, GroupsAndHost definitions) {
proxyUsers.put(userName, definitions);
}
/**
* Get the host names and groups against given proxy user.
* @return - GroupsAndHost object.
*/
public GroupsAndHost getProxyUser (String userName) {
return proxyUsers.get(userName);
}
/**
* Get the Proxy users data which contains the host names
* and groups against each user.
* @return - the proxy users data as hash map.
*/
public Map<String, GroupsAndHost> getProxyUsers () {
return proxyUsers;
}
/**
* The implementation of this method has to be provided by a child of the class
* @param filePath
* @return
* @throws IOException
*/
public abstract boolean writeToFile(URI filePath) throws IOException;
}