MAPREDUCE-5780. SliveTest should use the specified path to get the particular FileSystem instead of using the default FileSystem.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1575049 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
278d40f9ac
commit
8d49acf0e9
@ -208,6 +208,9 @@ Release 2.4.0 - UNRELEASED
|
||||
MAPREDUCE-5768. TestMRJobs.testContainerRollingLog fails on trunk (Gera
|
||||
Shegalov via jlowe)
|
||||
|
||||
MAPREDUCE-5780. SliveTest should use the specified path to get the
|
||||
particular FileSystem instead of using the default FileSystem. (szetszwo)
|
||||
|
||||
Release 2.3.1 - UNRELEASED
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
@ -58,7 +58,6 @@
|
||||
* The number of maps is specified by "slive.maps".
|
||||
* The number of reduces is specified by "slive.reduces".
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
public class SliveTest implements Tool {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(SliveTest.class);
|
||||
@ -221,7 +220,7 @@ private void runJob(ConfigExtractor config) throws IOException {
|
||||
private void writeReport(ConfigExtractor cfg) throws Exception {
|
||||
Path dn = cfg.getOutputPath();
|
||||
LOG.info("Writing report using contents of " + dn);
|
||||
FileSystem fs = FileSystem.get(cfg.getConfig());
|
||||
FileSystem fs = dn.getFileSystem(cfg.getConfig());
|
||||
FileStatus[] reduceFiles = fs.listStatus(dn);
|
||||
BufferedReader fileReader = null;
|
||||
PrintWriter reportWriter = null;
|
||||
@ -292,10 +291,10 @@ private void writeReport(ConfigExtractor cfg) throws Exception {
|
||||
* @throws IOException
|
||||
*/
|
||||
private void cleanup(ConfigExtractor cfg) throws IOException {
|
||||
FileSystem fs = FileSystem.get(cfg.getConfig());
|
||||
Path base = cfg.getBaseDirectory();
|
||||
if (base != null) {
|
||||
LOG.info("Attempting to recursively delete " + base);
|
||||
FileSystem fs = base.getFileSystem(cfg.getConfig());
|
||||
fs.delete(base, true);
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user