MAPREDUCE-6881. Fix warnings from Spotbugs in hadoop-mapreduce. Contributed by Weiwei Yang.
This commit is contained in:
parent
28eb2aabeb
commit
3ed3062fe3
@ -27,6 +27,8 @@
|
|||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.concurrent.BlockingQueue;
|
import java.util.concurrent.BlockingQueue;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.ExecutorService;
|
||||||
@ -81,7 +83,7 @@ public class LocalContainerLauncher extends AbstractService implements
|
|||||||
private static final Log LOG = LogFactory.getLog(LocalContainerLauncher.class);
|
private static final Log LOG = LogFactory.getLog(LocalContainerLauncher.class);
|
||||||
|
|
||||||
private FileContext curFC = null;
|
private FileContext curFC = null;
|
||||||
private final HashSet<File> localizedFiles;
|
private Set<File> localizedFiles = new HashSet<File>();
|
||||||
private final AppContext context;
|
private final AppContext context;
|
||||||
private final TaskUmbilicalProtocol umbilical;
|
private final TaskUmbilicalProtocol umbilical;
|
||||||
private final ClassLoader jobClassLoader;
|
private final ClassLoader jobClassLoader;
|
||||||
@ -121,9 +123,12 @@ public LocalContainerLauncher(AppContext context,
|
|||||||
// users who do that get what they deserve (and will have to disable
|
// users who do that get what they deserve (and will have to disable
|
||||||
// uberization in order to run correctly).
|
// uberization in order to run correctly).
|
||||||
File[] curLocalFiles = curDir.listFiles();
|
File[] curLocalFiles = curDir.listFiles();
|
||||||
localizedFiles = new HashSet<File>(curLocalFiles.length);
|
if (curLocalFiles != null) {
|
||||||
for (int j = 0; j < curLocalFiles.length; ++j) {
|
HashSet<File> lf = new HashSet<File>(curLocalFiles.length);
|
||||||
localizedFiles.add(curLocalFiles[j]);
|
for (int j = 0; j < curLocalFiles.length; ++j) {
|
||||||
|
lf.add(curLocalFiles[j]);
|
||||||
|
}
|
||||||
|
localizedFiles = Collections.unmodifiableSet(lf);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Relocalization note/future FIXME (per chrisdo, 20110315): At moment,
|
// Relocalization note/future FIXME (per chrisdo, 20110315): At moment,
|
||||||
@ -521,26 +526,29 @@ private void runSubtask(org.apache.hadoop.mapred.Task task,
|
|||||||
*/
|
*/
|
||||||
private void relocalize() {
|
private void relocalize() {
|
||||||
File[] curLocalFiles = curDir.listFiles();
|
File[] curLocalFiles = curDir.listFiles();
|
||||||
for (int j = 0; j < curLocalFiles.length; ++j) {
|
if (curLocalFiles != null) {
|
||||||
if (!localizedFiles.contains(curLocalFiles[j])) {
|
for (int j = 0; j < curLocalFiles.length; ++j) {
|
||||||
// found one that wasn't there before: delete it
|
if (!localizedFiles.contains(curLocalFiles[j])) {
|
||||||
boolean deleted = false;
|
// found one that wasn't there before: delete it
|
||||||
try {
|
boolean deleted = false;
|
||||||
if (curFC != null) {
|
try {
|
||||||
// this is recursive, unlike File delete():
|
if (curFC != null) {
|
||||||
deleted = curFC.delete(new Path(curLocalFiles[j].getName()),true);
|
// this is recursive, unlike File delete():
|
||||||
|
deleted =
|
||||||
|
curFC.delete(new Path(curLocalFiles[j].getName()), true);
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
deleted = false;
|
||||||
|
}
|
||||||
|
if (!deleted) {
|
||||||
|
LOG.warn("Unable to delete unexpected local file/dir "
|
||||||
|
+ curLocalFiles[j].getName()
|
||||||
|
+ ": insufficient permissions?");
|
||||||
}
|
}
|
||||||
} catch (IOException e) {
|
|
||||||
deleted = false;
|
|
||||||
}
|
|
||||||
if (!deleted) {
|
|
||||||
LOG.warn("Unable to delete unexpected local file/dir "
|
|
||||||
+ curLocalFiles[j].getName() + ": insufficient permissions?");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
} // end EventHandler
|
} // end EventHandler
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -572,13 +572,15 @@ protected AMPreemptionPolicy createPreemptionPolicy(Configuration conf) {
|
|||||||
private boolean isJobNamePatternMatch(JobConf conf, String jobTempDir) {
|
private boolean isJobNamePatternMatch(JobConf conf, String jobTempDir) {
|
||||||
// Matched staging files should be preserved after job is finished.
|
// Matched staging files should be preserved after job is finished.
|
||||||
if (conf.getKeepTaskFilesPattern() != null && jobTempDir != null) {
|
if (conf.getKeepTaskFilesPattern() != null && jobTempDir != null) {
|
||||||
String jobFileName = Paths.get(jobTempDir).getFileName().toString();
|
java.nio.file.Path pathName = Paths.get(jobTempDir).getFileName();
|
||||||
Pattern pattern = Pattern.compile(conf.getKeepTaskFilesPattern());
|
if (pathName != null) {
|
||||||
Matcher matcher = pattern.matcher(jobFileName);
|
String jobFileName = pathName.toString();
|
||||||
return matcher.find();
|
Pattern pattern = Pattern.compile(conf.getKeepTaskFilesPattern());
|
||||||
} else {
|
Matcher matcher = pattern.matcher(jobFileName);
|
||||||
return false;
|
return matcher.find();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isKeepFailedTaskFiles(JobConf conf) {
|
private boolean isKeepFailedTaskFiles(JobConf conf) {
|
||||||
|
@ -98,7 +98,7 @@ public int compareTo(JVMId that) {
|
|||||||
int jobComp = this.jobId.compareTo(that.jobId);
|
int jobComp = this.jobId.compareTo(that.jobId);
|
||||||
if(jobComp == 0) {
|
if(jobComp == 0) {
|
||||||
if(this.isMap == that.isMap) {
|
if(this.isMap == that.isMap) {
|
||||||
return Long.valueOf(this.jvmId).compareTo(that.jvmId);
|
return Long.compare(this.jvmId, that.jvmId);
|
||||||
} else {
|
} else {
|
||||||
return this.isMap ? -1 : 1;
|
return this.isMap ? -1 : 1;
|
||||||
}
|
}
|
||||||
|
@ -35,11 +35,19 @@ public enum Operation {
|
|||||||
SET_JOB_PRIORITY(QueueACL.ADMINISTER_JOBS, JobACL.MODIFY_JOB),
|
SET_JOB_PRIORITY(QueueACL.ADMINISTER_JOBS, JobACL.MODIFY_JOB),
|
||||||
SUBMIT_JOB(QueueACL.SUBMIT_JOB, null);
|
SUBMIT_JOB(QueueACL.SUBMIT_JOB, null);
|
||||||
|
|
||||||
public QueueACL qACLNeeded;
|
private final QueueACL qACLNeeded;
|
||||||
public JobACL jobACLNeeded;
|
private final JobACL jobACLNeeded;
|
||||||
|
|
||||||
Operation(QueueACL qACL, JobACL jobACL) {
|
Operation(QueueACL qACL, JobACL jobACL) {
|
||||||
this.qACLNeeded = qACL;
|
this.qACLNeeded = qACL;
|
||||||
this.jobACLNeeded = jobACL;
|
this.jobACLNeeded = jobACL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public QueueACL getqACLNeeded() {
|
||||||
|
return qACLNeeded;
|
||||||
|
}
|
||||||
|
|
||||||
|
public JobACL getJobACLNeeded() {
|
||||||
|
return jobACLNeeded;
|
||||||
|
}
|
||||||
}
|
}
|
@ -407,17 +407,12 @@ synchronized void moveToDone() throws IOException {
|
|||||||
}
|
}
|
||||||
JobId jobId = jobIndexInfo.getJobId();
|
JobId jobId = jobIndexInfo.getJobId();
|
||||||
|
|
||||||
List<Path> paths = new ArrayList<Path>(2);
|
|
||||||
if (historyFile == null) {
|
if (historyFile == null) {
|
||||||
LOG.info("No file for job-history with " + jobId + " found in cache!");
|
LOG.info("No file for job-history with " + jobId + " found in cache!");
|
||||||
} else {
|
|
||||||
paths.add(historyFile);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (confFile == null) {
|
if (confFile == null) {
|
||||||
LOG.info("No file for jobConf with " + jobId + " found in cache!");
|
LOG.info("No file for jobConf with " + jobId + " found in cache!");
|
||||||
} else {
|
|
||||||
paths.add(confFile);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (summaryFile == null || !intermediateDoneDirFc.util().exists(
|
if (summaryFile == null || !intermediateDoneDirFc.util().exists(
|
||||||
|
@ -67,8 +67,12 @@ private static void parseLine(final String line, Map<Parameter, List<TaskResult>
|
|||||||
private void parse(File f, Map<Parameter, List<TaskResult>> sums) throws IOException {
|
private void parse(File f, Map<Parameter, List<TaskResult>> sums) throws IOException {
|
||||||
if (f.isDirectory()) {
|
if (f.isDirectory()) {
|
||||||
println("Process directory " + f);
|
println("Process directory " + f);
|
||||||
for(File child : f.listFiles())
|
File[] files = f.listFiles();
|
||||||
parse(child, sums);
|
if (files != null) {
|
||||||
|
for(File child : files) {
|
||||||
|
parse(child, sums);
|
||||||
|
}
|
||||||
|
}
|
||||||
} else if (f.getName().endsWith(".txt")) {
|
} else if (f.getName().endsWith(".txt")) {
|
||||||
println("Parse file " + f);
|
println("Parse file " + f);
|
||||||
final Map<Parameter, List<TaskResult>> m = new TreeMap<Parameter, List<TaskResult>>();
|
final Map<Parameter, List<TaskResult>> m = new TreeMap<Parameter, List<TaskResult>>();
|
||||||
|
Loading…
Reference in New Issue
Block a user