Remove parent's env vars from child processes
This commit is contained in:
parent
af9b000535
commit
9d4d30243b
@ -34,6 +34,7 @@
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.security.alias.AbstractJavaKeyStoreProvider;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@ -362,6 +363,9 @@ public static String[] getRunScriptCommand(File script) {
|
||||
/** If or not script timed out*/
|
||||
private final AtomicBoolean timedOut = new AtomicBoolean(false);
|
||||
|
||||
/** Indicates if the parent env vars should be inherited or not*/
|
||||
protected boolean inheritParentEnv = true;
|
||||
|
||||
/**
|
||||
* Centralized logic to discover and validate the sanity of the Hadoop
|
||||
* home directory.
|
||||
@ -854,9 +858,16 @@ private void runCommand() throws IOException {
|
||||
timedOut.set(false);
|
||||
completed.set(false);
|
||||
|
||||
// Remove all env vars from the Builder to prevent leaking of env vars from
|
||||
// the parent process.
|
||||
if (!inheritParentEnv) {
|
||||
builder.environment().clear();
|
||||
}
|
||||
|
||||
if (environment != null) {
|
||||
builder.environment().putAll(this.environment);
|
||||
}
|
||||
|
||||
if (dir != null) {
|
||||
builder.directory(this.dir);
|
||||
}
|
||||
@ -1084,6 +1095,11 @@ public ShellCommandExecutor(String[] execString, File dir,
|
||||
this(execString, dir, env , 0L);
|
||||
}
|
||||
|
||||
public ShellCommandExecutor(String[] execString, File dir,
|
||||
Map<String, String> env, long timeout) {
|
||||
this(execString, dir, env , timeout, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance of the ShellCommandExecutor to execute a command.
|
||||
*
|
||||
@ -1096,10 +1112,12 @@ public ShellCommandExecutor(String[] execString, File dir,
|
||||
* environment is not modified.
|
||||
* @param timeout Specifies the time in milliseconds, after which the
|
||||
* command will be killed and the status marked as timed-out.
|
||||
* If 0, the command will not be timed out.
|
||||
* If 0, the command will not be timed out.
|
||||
* @param inheritParentEnv Indicates if the process should inherit the env
|
||||
* vars from the parent process or not.
|
||||
*/
|
||||
public ShellCommandExecutor(String[] execString, File dir,
|
||||
Map<String, String> env, long timeout) {
|
||||
Map<String, String> env, long timeout, boolean inheritParentEnv) {
|
||||
command = execString.clone();
|
||||
if (dir != null) {
|
||||
setWorkingDirectory(dir);
|
||||
@ -1108,6 +1126,7 @@ public ShellCommandExecutor(String[] execString, File dir,
|
||||
setEnvironment(env);
|
||||
}
|
||||
timeOutInterval = timeout;
|
||||
this.inheritParentEnv = inheritParentEnv;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -18,6 +18,7 @@
|
||||
package org.apache.hadoop.util;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.hadoop.security.alias.AbstractJavaKeyStoreProvider;
|
||||
import org.junit.Assert;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
@ -29,6 +30,8 @@
|
||||
import java.lang.management.ManagementFactory;
|
||||
import java.lang.management.ThreadInfo;
|
||||
import java.lang.management.ThreadMXBean;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
@ -145,6 +148,40 @@ public void testShellCommandTimeout() throws Throwable {
|
||||
shellFile.delete();
|
||||
assertTrue("Script did not timeout" , shexc.isTimedOut());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEnvVarsWithInheritance() throws Exception {
|
||||
Assume.assumeFalse(WINDOWS);
|
||||
testEnvHelper(true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEnvVarsWithoutInheritance() throws Exception {
|
||||
Assume.assumeFalse(WINDOWS);
|
||||
testEnvHelper(false);
|
||||
}
|
||||
|
||||
private void testEnvHelper(boolean inheritParentEnv) throws Exception {
|
||||
Map<String, String> customEnv = new HashMap<>();
|
||||
customEnv.put("AAA" + System.currentTimeMillis(), "AAA");
|
||||
customEnv.put("BBB" + System.currentTimeMillis(), "BBB");
|
||||
customEnv.put("CCC" + System.currentTimeMillis(), "CCC");
|
||||
Shell.ShellCommandExecutor command = new ShellCommandExecutor(
|
||||
new String[]{"env"}, null, customEnv, 0L, inheritParentEnv);
|
||||
command.execute();
|
||||
String[] varsArr = command.getOutput().split("\n");
|
||||
Map<String, String> vars = new HashMap<>();
|
||||
for (String var : varsArr) {
|
||||
int eqIndex = var.indexOf('=');
|
||||
vars.put(var.substring(0, eqIndex), var.substring(eqIndex + 1));
|
||||
}
|
||||
Map<String, String> expectedEnv = new HashMap<>();
|
||||
expectedEnv.putAll(customEnv);
|
||||
if (inheritParentEnv) {
|
||||
expectedEnv.putAll(System.getenv());
|
||||
}
|
||||
assertEquals(expectedEnv, vars);
|
||||
}
|
||||
|
||||
private static int countTimerThreads() {
|
||||
ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
|
||||
|
@ -284,7 +284,9 @@ protected CommandExecutor buildCommandExecutor(String wrapperScriptPath,
|
||||
return new ShellCommandExecutor(
|
||||
command,
|
||||
wordDir,
|
||||
environment);
|
||||
environment,
|
||||
0L,
|
||||
false);
|
||||
}
|
||||
|
||||
protected LocalWrapperScriptBuilder getLocalWrapperScriptBuilder(
|
||||
|
@ -284,7 +284,9 @@ public int launchContainer(ContainerStartContext ctx) throws IOException {
|
||||
shExec = new ShellCommandExecutor(
|
||||
command,
|
||||
new File(containerWorkDir.toUri().getPath()),
|
||||
container.getLaunchContext().getEnvironment()); // sanitized env
|
||||
container.getLaunchContext().getEnvironment(), // sanitized env
|
||||
0L,
|
||||
false);
|
||||
if (isContainerActive(containerId)) {
|
||||
shExec.execute();
|
||||
} else {
|
||||
|
@ -282,7 +282,7 @@ public void startLocalizer(LocalizerStartContext ctx)
|
||||
PrivilegedOperationExecutor.getInstance(conf);
|
||||
|
||||
privilegedOperationExecutor.executePrivilegedOperation(prefixCommands,
|
||||
initializeContainerOp, null, null, false);
|
||||
initializeContainerOp, null, null, false, true);
|
||||
|
||||
} catch (PrivilegedOperationException e) {
|
||||
int exitCode = e.getExitCode();
|
||||
|
@ -133,18 +133,19 @@ public String[] getPrivilegedOperationExecutionCommand(List<String>
|
||||
* @param workingDir (optional) working directory for execution
|
||||
* @param env (optional) env of the command will include specified vars
|
||||
* @param grabOutput return (possibly large) shell command output
|
||||
* @param inheritParentEnv inherit the env vars from the parent process
|
||||
* @return stdout contents from shell executor - useful for some privileged
|
||||
* operations - e.g --tc_read
|
||||
* @throws org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.privileged.PrivilegedOperationException
|
||||
*/
|
||||
public String executePrivilegedOperation(List<String> prefixCommands,
|
||||
PrivilegedOperation operation, File workingDir,
|
||||
Map<String, String> env, boolean grabOutput)
|
||||
Map<String, String> env, boolean grabOutput, boolean inheritParentEnv)
|
||||
throws PrivilegedOperationException {
|
||||
String[] fullCommandArray = getPrivilegedOperationExecutionCommand
|
||||
(prefixCommands, operation);
|
||||
ShellCommandExecutor exec = new ShellCommandExecutor(fullCommandArray,
|
||||
workingDir, env);
|
||||
workingDir, env, 0L, inheritParentEnv);
|
||||
|
||||
try {
|
||||
exec.execute();
|
||||
@ -199,7 +200,8 @@ public String executePrivilegedOperation(List<String> prefixCommands,
|
||||
*/
|
||||
public String executePrivilegedOperation(PrivilegedOperation operation,
|
||||
boolean grabOutput) throws PrivilegedOperationException {
|
||||
return executePrivilegedOperation(null, operation, null, null, grabOutput);
|
||||
return executePrivilegedOperation(null, operation, null, null, grabOutput,
|
||||
true);
|
||||
}
|
||||
|
||||
//Utility functions for squashing together operations in supported ways
|
||||
|
@ -102,7 +102,7 @@ public void launchContainer(ContainerRuntimeContext ctx)
|
||||
try {
|
||||
privilegedOperationExecutor.executePrivilegedOperation(prefixCommands,
|
||||
launchOp, null, container.getLaunchContext().getEnvironment(),
|
||||
false);
|
||||
false, false);
|
||||
} catch (PrivilegedOperationException e) {
|
||||
LOG.warn("Launch container failed. Exception: ", e);
|
||||
|
||||
@ -134,7 +134,7 @@ public void signalContainer(ContainerRuntimeContext ctx)
|
||||
|
||||
executor.executePrivilegedOperation(null,
|
||||
signalOp, null, container.getLaunchContext().getEnvironment(),
|
||||
false);
|
||||
false, true);
|
||||
} catch (PrivilegedOperationException e) {
|
||||
//Don't log the failure here. Some kinds of signaling failures are
|
||||
// acceptable. Let the calling executor decide what to do.
|
||||
|
@ -331,7 +331,7 @@ public void launchContainer(ContainerRuntimeContext ctx)
|
||||
try {
|
||||
privilegedOperationExecutor.executePrivilegedOperation(null,
|
||||
launchOp, null, container.getLaunchContext().getEnvironment(),
|
||||
false);
|
||||
false, false);
|
||||
} catch (PrivilegedOperationException e) {
|
||||
LOG.warn("Launch container failed. Exception: ", e);
|
||||
|
||||
@ -360,7 +360,7 @@ public void signalContainer(ContainerRuntimeContext ctx)
|
||||
|
||||
executor.executePrivilegedOperation(null,
|
||||
signalOp, null, container.getLaunchContext().getEnvironment(),
|
||||
false);
|
||||
false, true);
|
||||
} catch (PrivilegedOperationException e) {
|
||||
LOG.warn("Signal container failed. Exception: ", e);
|
||||
|
||||
|
@ -179,7 +179,7 @@ private PrivilegedOperation capturePrivilegedOperationAndVerifyArgs()
|
||||
// warning annotation on the entire method
|
||||
verify(mockExecutor, times(1))
|
||||
.executePrivilegedOperation(anyList(), opCaptor.capture(), any(
|
||||
File.class), any(Map.class), eq(false));
|
||||
File.class), any(Map.class), eq(false), eq(false));
|
||||
|
||||
PrivilegedOperation op = opCaptor.getValue();
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user