MAPREDUCE-6905. HADOOP_JOB_HISTORY_OPTS should be HADOOP_JOB_HISTORYSERVER_OPTS in mapred-config.sh. Contributed by LiXin Ge.

This commit is contained in:
Naganarasimha 2017-07-02 15:56:27 +05:30
parent fa1aaee87b
commit bf1f59937d

View File

@ -294,8 +294,17 @@ private void createControlFile(FileSystem fs,
int nrFiles int nrFiles
) throws IOException { ) throws IOException {
LOG.info("creating control file: "+nrBytes+" bytes, "+nrFiles+" files"); LOG.info("creating control file: "+nrBytes+" bytes, "+nrFiles+" files");
final int maxDirItems = config.getInt(
DFSConfigKeys.DFS_NAMENODE_MAX_DIRECTORY_ITEMS_KEY,
DFSConfigKeys.DFS_NAMENODE_MAX_DIRECTORY_ITEMS_DEFAULT);
Path controlDir = getControlDir(config); Path controlDir = getControlDir(config);
if (nrFiles > maxDirItems) {
final String message = "The directory item limit of " + controlDir +
" is exceeded: limit=" + maxDirItems + " items=" + nrFiles;
throw new IOException(message);
}
fs.delete(controlDir, true); fs.delete(controlDir, true);
for(int i=0; i < nrFiles; i++) { for(int i=0; i < nrFiles; i++) {
@ -310,8 +319,9 @@ private void createControlFile(FileSystem fs,
} catch(Exception e) { } catch(Exception e) {
throw new IOException(e.getLocalizedMessage()); throw new IOException(e.getLocalizedMessage());
} finally { } finally {
if (writer != null) if (writer != null) {
writer.close(); writer.close();
}
writer = null; writer = null;
} }
} }