HADOOP-10566. Refactor proxyservers out of ProxyUsers. Contributed by Benoy Antony.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1594280 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Suresh Srinivas 2014-05-13 16:53:38 +00:00
parent 95ba9f47fc
commit 0077999972
6 changed files with 11 additions and 37 deletions

View File

@ -380,6 +380,9 @@ Release 2.5.0 - UNRELEASED
HADOOP-10158. SPNEGO should work with multiple interfaces/SPNs. HADOOP-10158. SPNEGO should work with multiple interfaces/SPNs.
(daryn via kihwal) (daryn via kihwal)
HADOOP-10566. Refactor proxyservers out of ProxyUsers.
(Benoy Antony via suresh)
OPTIMIZATIONS OPTIMIZATIONS
BUG FIXES BUG FIXES

View File

@ -19,12 +19,10 @@
package org.apache.hadoop.security.authorize; package org.apache.hadoop.security.authorize;
import java.net.InetAddress; import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException; import java.net.UnknownHostException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
@ -44,7 +42,6 @@ public class ProxyUsers {
private static final String CONF_GROUPS = ".groups"; private static final String CONF_GROUPS = ".groups";
private static final String CONF_HADOOP_PROXYUSER = "hadoop.proxyuser."; private static final String CONF_HADOOP_PROXYUSER = "hadoop.proxyuser.";
private static final String CONF_HADOOP_PROXYUSER_RE = "hadoop\\.proxyuser\\."; private static final String CONF_HADOOP_PROXYUSER_RE = "hadoop\\.proxyuser\\.";
public static final String CONF_HADOOP_PROXYSERVERS = "hadoop.proxyservers";
private static boolean init = false; private static boolean init = false;
//list of users, groups and hosts per proxyuser //list of users, groups and hosts per proxyuser
@ -54,8 +51,6 @@ public class ProxyUsers {
new HashMap<String, Collection<String>>(); new HashMap<String, Collection<String>>();
private static Map<String, Collection<String>> proxyHosts = private static Map<String, Collection<String>> proxyHosts =
new HashMap<String, Collection<String>>(); new HashMap<String, Collection<String>>();
private static Collection<String> proxyServers =
new HashSet<String>();
/** /**
* reread the conf and get new values for "hadoop.proxyuser.*.groups/users/hosts" * reread the conf and get new values for "hadoop.proxyuser.*.groups/users/hosts"
@ -75,7 +70,6 @@ public static synchronized void refreshSuperUserGroupsConfiguration(Configuratio
proxyGroups.clear(); proxyGroups.clear();
proxyHosts.clear(); proxyHosts.clear();
proxyUsers.clear(); proxyUsers.clear();
proxyServers.clear();
// get all the new keys for users // get all the new keys for users
String regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_USERS; String regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_USERS;
@ -103,22 +97,8 @@ public static synchronized void refreshSuperUserGroupsConfiguration(Configuratio
proxyHosts.put(entry.getKey(), proxyHosts.put(entry.getKey(),
StringUtils.getTrimmedStringCollection(entry.getValue())); StringUtils.getTrimmedStringCollection(entry.getValue()));
} }
// trusted proxy servers such as http proxies
for (String host : conf.getTrimmedStrings(CONF_HADOOP_PROXYSERVERS)) {
InetSocketAddress addr = new InetSocketAddress(host, 0);
if (!addr.isUnresolved()) {
proxyServers.add(addr.getAddress().getHostAddress());
}
}
init = true; init = true;
} ProxyServers.refresh(conf);
public static synchronized boolean isProxyServer(String remoteAddr) {
if(!init) {
refreshSuperUserGroupsConfiguration();
}
return proxyServers.contains(remoteAddr);
} }
/** /**

View File

@ -327,17 +327,6 @@ public void testWithDuplicateProxyHosts() throws Exception {
assertEquals (1,hosts.size()); assertEquals (1,hosts.size());
} }
@Test
public void testProxyServer() {
Configuration conf = new Configuration();
assertFalse(ProxyUsers.isProxyServer("1.1.1.1"));
conf.set(ProxyUsers.CONF_HADOOP_PROXYSERVERS, "2.2.2.2, 3.3.3.3");
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
assertFalse(ProxyUsers.isProxyServer("1.1.1.1"));
assertTrue(ProxyUsers.isProxyServer("2.2.2.2"));
assertTrue(ProxyUsers.isProxyServer("3.3.3.3"));
}
private void assertNotAuthorized(UserGroupInformation proxyUgi, String host) { private void assertNotAuthorized(UserGroupInformation proxyUgi, String host) {
try { try {
ProxyUsers.authorize(proxyUgi, host); ProxyUsers.authorize(proxyUgi, host);

View File

@ -34,6 +34,7 @@
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.authentication.util.KerberosName; import org.apache.hadoop.security.authentication.util.KerberosName;
import org.apache.hadoop.security.authorize.ProxyServers;
import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
@ -193,7 +194,7 @@ private static UserGroupInformation getTokenUGI(ServletContext context,
public static String getRemoteAddr(HttpServletRequest request) { public static String getRemoteAddr(HttpServletRequest request) {
String remoteAddr = request.getRemoteAddr(); String remoteAddr = request.getRemoteAddr();
String proxyHeader = request.getHeader("X-Forwarded-For"); String proxyHeader = request.getHeader("X-Forwarded-For");
if (proxyHeader != null && ProxyUsers.isProxyServer(remoteAddr)) { if (proxyHeader != null && ProxyServers.isProxyServer(remoteAddr)) {
final String clientAddr = proxyHeader.split(",")[0].trim(); final String clientAddr = proxyHeader.split(",")[0].trim();
if (!clientAddr.isEmpty()) { if (!clientAddr.isEmpty()) {
remoteAddr = clientAddr; remoteAddr = clientAddr;

View File

@ -30,6 +30,7 @@
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.security.authorize.ProxyServers;
import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
@ -51,7 +52,6 @@
public class TestJspHelper { public class TestJspHelper {
private final Configuration conf = new HdfsConfiguration(); private final Configuration conf = new HdfsConfiguration();
private String jspWriterOutput = "";
// allow user with TGT to run tests // allow user with TGT to run tests
@BeforeClass @BeforeClass
@ -450,7 +450,7 @@ private String getRemoteAddr(String clientAddr, String proxyAddr, boolean truste
when(req.getRemoteAddr()).thenReturn(proxyAddr); when(req.getRemoteAddr()).thenReturn(proxyAddr);
when(req.getHeader("X-Forwarded-For")).thenReturn(clientAddr); when(req.getHeader("X-Forwarded-For")).thenReturn(clientAddr);
if (trusted) { if (trusted) {
conf.set(ProxyUsers.CONF_HADOOP_PROXYSERVERS, proxyAddr); conf.set(ProxyServers.CONF_HADOOP_PROXYSERVERS, proxyAddr);
} }
} }
ProxyUsers.refreshSuperUserGroupsConfiguration(conf); ProxyUsers.refreshSuperUserGroupsConfiguration(conf);

View File

@ -32,14 +32,15 @@
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.web.resources.GetOpParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.authorize.ProxyServers;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -120,7 +121,7 @@ public void testWebHdfsAuditLogger() throws IOException, URISyntaxException {
assertEquals("127.0.0.1", DummyAuditLogger.remoteAddr); assertEquals("127.0.0.1", DummyAuditLogger.remoteAddr);
// trusted proxied request // trusted proxied request
conf.set(ProxyUsers.CONF_HADOOP_PROXYSERVERS, "127.0.0.1"); conf.set(ProxyServers.CONF_HADOOP_PROXYSERVERS, "127.0.0.1");
ProxyUsers.refreshSuperUserGroupsConfiguration(conf); ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
conn = (HttpURLConnection) uri.toURL().openConnection(); conn = (HttpURLConnection) uri.toURL().openConnection();
conn.setRequestMethod(op.getType().toString()); conn.setRequestMethod(op.getType().toString());