HADOOP-11056. OsSecureRandom.setConf() might leak file descriptors. Contributed by Yongjun Zhang.

This commit is contained in:
Colin Patrick Mccabe 2014-09-03 19:35:39 -07:00
parent f4caedfcbf
commit 8f1a668575
3 changed files with 36 additions and 3 deletions

View File

@ -684,6 +684,9 @@ Release 2.6.0 - UNRELEASED
HADOOP-8815. RandomDatum needs to override hashCode().
(Brandon Li via suresh)
HADOOP-11056. OsSecureRandom.setConf() might leak file descriptors (yzhang
via cmccabe)
BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
HADOOP-10734. Implement high-performance secure random number sources.

View File

@ -23,6 +23,8 @@
import java.io.IOException;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
@ -37,6 +39,8 @@
*/
@InterfaceAudience.Private
public class OsSecureRandom extends Random implements Closeable, Configurable {
public static final Log LOG = LogFactory.getLog(OsSecureRandom.class);
private static final long serialVersionUID = 6391500337172057900L;
private transient Configuration conf;
@ -72,12 +76,20 @@ synchronized public void setConf(Configuration conf) {
HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_KEY,
HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_DEFAULT);
File randomDevFile = new File(randomDevPath);
try {
close();
this.stream = new FileInputStream(randomDevFile);
fillReservoir(0);
} catch (IOException e) {
throw new RuntimeException(e);
}
try {
fillReservoir(0);
} catch (RuntimeException e) {
close();
throw e;
}
}
@Override
@ -109,7 +121,10 @@ synchronized protected int next(int nbits) {
}
@Override
synchronized public void close() throws IOException {
stream.close();
synchronized public void close() {
if (stream != null) {
IOUtils.cleanup(LOG, stream);
stream = null;
}
}
}

View File

@ -22,6 +22,7 @@
import org.apache.commons.lang.SystemUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
import org.junit.Assume;
import org.junit.Test;
@ -136,4 +137,18 @@ public void testRefillReservoir() throws Exception {
}
random.close();
}
@Test(timeout=120000)
public void testOsSecureRandomSetConf() throws IOException {
Assume.assumeTrue(SystemUtils.IS_OS_LINUX);
OsSecureRandom random = new OsSecureRandom();
for(int n = 0; n < 10; ++n) {
random.setConf(new Configuration());
String[] scmd = new String[] {"/bin/sh", "-c", "lsof | wc -l"};
ShellCommandExecutor sce = new ShellCommandExecutor(scmd);
sce.execute();
System.out.println("==lsof result " + n + ":");
System.out.println(sce.getOutput());
}
}
}