HADOOP-8341. Fix or filter findbugs issues in hadoop-tools (bobby)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1335505 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Joseph Evans 2012-05-08 13:20:56 +00:00
parent 1d7b980592
commit a9808de0d9
15 changed files with 114 additions and 27 deletions

View File

@ -545,6 +545,8 @@ Release 0.23.3 - UNRELEASED
HADOOP-8327. distcpv2 and distcpv1 jars should not coexist (Dave Thompson HADOOP-8327. distcpv2 and distcpv1 jars should not coexist (Dave Thompson
via bobby) via bobby)
HADOOP-8341. Fix or filter findbugs issues in hadoop-tools (bobby)
Release 0.23.2 - UNRELEASED Release 0.23.2 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -117,7 +117,7 @@ public void setConf(Configuration conf) {
// will when running the mapreduce job. // will when running the mapreduce job.
String testJar = System.getProperty(TEST_HADOOP_ARCHIVES_JAR_PATH, null); String testJar = System.getProperty(TEST_HADOOP_ARCHIVES_JAR_PATH, null);
if (testJar != null) { if (testJar != null) {
((JobConf)conf).setJar(testJar); this.conf.setJar(testJar);
} }
} }

View File

@ -136,10 +136,13 @@ public Job execute() throws Exception {
Job job = null; Job job = null;
try { try {
metaFolder = createMetaFolderPath(); synchronized(this) {
jobFS = metaFolder.getFileSystem(getConf()); //Don't cleanup while we are setting up.
metaFolder = createMetaFolderPath();
jobFS = metaFolder.getFileSystem(getConf());
job = createJob(); job = createJob();
}
createInputFileListing(job); createInputFileListing(job);
job.submit(); job.submit();

View File

@ -65,9 +65,9 @@
public class Logalyzer { public class Logalyzer {
// Constants // Constants
private static Configuration fsConfig = new Configuration(); private static Configuration fsConfig = new Configuration();
public static String SORT_COLUMNS = public static final String SORT_COLUMNS =
"logalizer.logcomparator.sort.columns"; "logalizer.logcomparator.sort.columns";
public static String COLUMN_SEPARATOR = public static final String COLUMN_SEPARATOR =
"logalizer.logcomparator.column.separator"; "logalizer.logcomparator.column.separator";
static { static {

View File

@ -0,0 +1,31 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<FindBugsFilter>
<And>
<Class name="org.apache.hadoop.tools.rumen.LoggedJob"/>
<Method name="getMapperTriesToSucceed"/>
<Bug pattern="EI_EXPOSE_REP"/>
<Bug code="EI"/>
</And>
<And>
<Class name="org.apache.hadoop.tools.rumen.ZombieJob"/>
<Method name="getInputSplits"/>
<Bug pattern="EI_EXPOSE_REP"/>
<Bug code="EI"/>
</And>
</FindBugsFilter>

View File

@ -90,6 +90,16 @@
<build> <build>
<plugins> <plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId>
<configuration>
<findbugsXmlOutput>true</findbugsXmlOutput>
<xmlOutput>true</xmlOutput>
<excludeFilterFile>${basedir}/dev-support/findbugs-exclude.xml</excludeFilterFile>
<effort>Max</effort>
</configuration>
</plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId> <artifactId>maven-antrun-plugin</artifactId>

View File

@ -20,6 +20,7 @@
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.io.Serializable;
import java.util.Comparator; import java.util.Comparator;
import java.util.Iterator; import java.util.Iterator;
import java.util.PriorityQueue; import java.util.PriorityQueue;
@ -59,7 +60,8 @@ public class DeskewedJobTraceReader implements Closeable {
static final private Log LOG = static final private Log LOG =
LogFactory.getLog(DeskewedJobTraceReader.class); LogFactory.getLog(DeskewedJobTraceReader.class);
static private class JobComparator implements Comparator<LoggedJob> { static private class JobComparator implements Comparator<LoggedJob>,
Serializable {
@Override @Override
public int compare(LoggedJob j1, LoggedJob j2) { public int compare(LoggedJob j1, LoggedJob j2) {
return (j1.getSubmitTime() < j2.getSubmitTime()) ? -1 : (j1 return (j1.getSubmitTime() < j2.getSubmitTime()) ? -1 : (j1

View File

@ -17,6 +17,8 @@
*/ */
package org.apache.hadoop.tools.rumen; package org.apache.hadoop.tools.rumen;
import java.util.Arrays;
import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.MRJobConfig;
public enum JobConfPropertyNames { public enum JobConfPropertyNames {
@ -33,6 +35,6 @@ public enum JobConfPropertyNames {
} }
public String[] getCandidates() { public String[] getCandidates() {
return candidates; return Arrays.copyOf(candidates, candidates.length);
} }
} }

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.tools.rumen; package org.apache.hadoop.tools.rumen;
import java.io.Serializable;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
@ -68,7 +69,8 @@ public void setUnknownAttribute(String attributeName, Object ignored) {
* order. * order.
* *
*/ */
static class TopoSort implements Comparator<LoggedNetworkTopology> { static class TopoSort implements Comparator<LoggedNetworkTopology>,
Serializable {
public int compare(LoggedNetworkTopology t1, LoggedNetworkTopology t2) { public int compare(LoggedNetworkTopology t1, LoggedNetworkTopology t2) {
return t1.name.getValue().compareTo(t2.name.getValue()); return t1.name.getValue().compareTo(t2.name.getValue());
} }

View File

@ -20,6 +20,7 @@
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.Serializable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Comparator; import java.util.Comparator;
@ -98,7 +99,7 @@ static class MyOptions {
* history file names should result in the order of jobs' submission times. * history file names should result in the order of jobs' submission times.
*/ */
private static class HistoryLogsComparator private static class HistoryLogsComparator
implements Comparator<FileStatus> { implements Comparator<FileStatus>, Serializable {
@Override @Override
public int compare(FileStatus file1, FileStatus file2) { public int compare(FileStatus file1, FileStatus file2) {
return file1.getPath().getName().compareTo( return file1.getPath().getName().compareTo(

View File

@ -27,7 +27,7 @@
* //TODO There is no caching for saving memory. * //TODO There is no caching for saving memory.
*/ */
public class WordListAnonymizerUtility { public class WordListAnonymizerUtility {
public static final String[] KNOWN_WORDS = static final String[] KNOWN_WORDS =
new String[] {"job", "tmp", "temp", "home", "homes", "usr", "user", "test"}; new String[] {"job", "tmp", "temp", "home", "homes", "usr", "user", "test"};
/** /**

View File

@ -93,16 +93,8 @@ public NodeName(String nodeName) {
} }
public NodeName(String rName, String hName) { public NodeName(String rName, String hName) {
rName = (rName == null) rName = (rName == null || rName.length() == 0) ? null : rName;
? rName hName = (hName == null || hName.length() == 0) ? null : hName;
: rName.length() == 0
? null
: rName;
hName = (hName == null)
? hName
: hName.length() == 0
? null
: hName;
if (hName == null) { if (hName == null) {
nodeName = rName; nodeName = rName;
rackName = rName; rackName = rName;

View File

@ -0,0 +1,30 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<FindBugsFilter>
<Match>
<Or>
<Class name="org.apache.hadoop.streaming.PipeMapper" />
<Class name="org.apache.hadoop.streaming.PipeReducer"/>
</Or>
<Or>
<Method name="getFieldSeparator"/>
<Method name="getInputSeparator"/>
</Or>
<Bug pattern="EI_EXPOSE_REP"/>
</Match>
</FindBugsFilter>

View File

@ -96,6 +96,16 @@
<build> <build>
<plugins> <plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId>
<configuration>
<findbugsXmlOutput>true</findbugsXmlOutput>
<xmlOutput>true</xmlOutput>
<excludeFilterFile>${basedir}/dev-support/findbugs-exclude.xml</excludeFilterFile>
<effort>Max</effort>
</configuration>
</plugin>
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId> <artifactId>maven-antrun-plugin</artifactId>

View File

@ -91,7 +91,7 @@ public class StreamJob implements Tool {
@Deprecated @Deprecated
public StreamJob(String[] argv, boolean mayExit) { public StreamJob(String[] argv, boolean mayExit) {
this(); this();
argv_ = argv; argv_ = Arrays.copyOf(argv, argv.length);
this.config_ = new Configuration(); this.config_ = new Configuration();
} }
@ -113,7 +113,7 @@ public void setConf(Configuration conf) {
@Override @Override
public int run(String[] args) throws Exception { public int run(String[] args) throws Exception {
try { try {
this.argv_ = args; this.argv_ = Arrays.copyOf(args, args.length);
init(); init();
preProcessArgs(); preProcessArgs();
@ -290,7 +290,7 @@ void parseArgv() {
LOG.warn("-file option is deprecated, please use generic option" + LOG.warn("-file option is deprecated, please use generic option" +
" -files instead."); " -files instead.");
String fileList = null; StringBuffer fileList = new StringBuffer();
for (String file : values) { for (String file : values) {
packageFiles_.add(file); packageFiles_.add(file);
try { try {
@ -298,13 +298,15 @@ void parseArgv() {
Path path = new Path(pathURI); Path path = new Path(pathURI);
FileSystem localFs = FileSystem.getLocal(config_); FileSystem localFs = FileSystem.getLocal(config_);
String finalPath = path.makeQualified(localFs).toString(); String finalPath = path.makeQualified(localFs).toString();
fileList = fileList == null ? finalPath : fileList + "," + finalPath; if(fileList.length() > 0) {
fileList.append(',');
}
fileList.append(finalPath);
} catch (Exception e) { } catch (Exception e) {
throw new IllegalArgumentException(e); throw new IllegalArgumentException(e);
} }
} }
config_.set("tmpfiles", config_.get("tmpfiles", "") + config_.set("tmpfiles", config_.get("tmpfiles", "") + fileList);
(fileList == null ? "" : fileList));
validate(packageFiles_); validate(packageFiles_);
} }