YARN-720 and MAPREDUCE-5291. container-log4j.properties should not refer to mapreduce properties. Update MRApp to use YARN properties for log setup. Contributed by Zhijie Shen.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1488829 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
f27e484636
commit
a2205a3b72
@ -502,6 +502,9 @@ Release 2.1.0-beta - UNRELEASED
|
|||||||
MAPREDUCE-5177. Use common utils FileUtil#setReadable/Writable/Executable &
|
MAPREDUCE-5177. Use common utils FileUtil#setReadable/Writable/Executable &
|
||||||
FileUtil#canRead/Write/Execute. (Ivan Mitic via suresh)
|
FileUtil#canRead/Write/Execute. (Ivan Mitic via suresh)
|
||||||
|
|
||||||
|
MAPREDUCE-5291. Change MR App to use updated property names in
|
||||||
|
container-log4j.properties. (Zhijie Shen via sseth)
|
||||||
|
|
||||||
Release 2.0.5-alpha - UNRELEASED
|
Release 2.0.5-alpha - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -52,8 +52,8 @@ public void testCommandLine() throws Exception {
|
|||||||
" -Dhadoop.metrics.log.level=WARN" +
|
" -Dhadoop.metrics.log.level=WARN" +
|
||||||
" -Xmx200m -Djava.io.tmpdir=" + envVar("PWD") + "/tmp" +
|
" -Xmx200m -Djava.io.tmpdir=" + envVar("PWD") + "/tmp" +
|
||||||
" -Dlog4j.configuration=container-log4j.properties" +
|
" -Dlog4j.configuration=container-log4j.properties" +
|
||||||
" -Dyarn.app.mapreduce.container.log.dir=<LOG_DIR>" +
|
" -Dyarn.app.container.log.dir=<LOG_DIR>" +
|
||||||
" -Dyarn.app.mapreduce.container.log.filesize=0" +
|
" -Dyarn.app.container.log.filesize=0" +
|
||||||
" -Dhadoop.root.logger=INFO,CLA" +
|
" -Dhadoop.root.logger=INFO,CLA" +
|
||||||
" org.apache.hadoop.mapred.YarnChild 127.0.0.1" +
|
" org.apache.hadoop.mapred.YarnChild 127.0.0.1" +
|
||||||
" 54321" +
|
" 54321" +
|
||||||
|
@ -450,9 +450,10 @@ private static long[] getFileSizes(Configuration conf, String key) {
|
|||||||
public static void addLog4jSystemProperties(
|
public static void addLog4jSystemProperties(
|
||||||
String logLevel, long logSize, List<String> vargs) {
|
String logLevel, long logSize, List<String> vargs) {
|
||||||
vargs.add("-Dlog4j.configuration=container-log4j.properties");
|
vargs.add("-Dlog4j.configuration=container-log4j.properties");
|
||||||
vargs.add("-D" + MRJobConfig.TASK_LOG_DIR + "=" +
|
vargs.add("-D" + YarnConfiguration.YARN_APP_CONTAINER_LOG_DIR + "=" +
|
||||||
ApplicationConstants.LOG_DIR_EXPANSION_VAR);
|
ApplicationConstants.LOG_DIR_EXPANSION_VAR);
|
||||||
vargs.add("-D" + MRJobConfig.TASK_LOG_SIZE + "=" + logSize);
|
vargs.add(
|
||||||
|
"-D" + YarnConfiguration.YARN_APP_CONTAINER_LOG_SIZE + "=" + logSize);
|
||||||
vargs.add("-Dhadoop.root.logger=" + logLevel + ",CLA");
|
vargs.add("-Dhadoop.root.logger=" + logLevel + ",CLA");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -42,9 +42,9 @@
|
|||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.io.SecureIOUtils;
|
import org.apache.hadoop.io.SecureIOUtils;
|
||||||
import org.apache.hadoop.mapreduce.JobID;
|
import org.apache.hadoop.mapreduce.JobID;
|
||||||
import org.apache.hadoop.mapreduce.MRJobConfig;
|
|
||||||
import org.apache.hadoop.mapreduce.util.ProcessTree;
|
import org.apache.hadoop.mapreduce.util.ProcessTree;
|
||||||
import org.apache.hadoop.util.Shell;
|
import org.apache.hadoop.util.Shell;
|
||||||
|
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||||
import org.apache.log4j.Appender;
|
import org.apache.log4j.Appender;
|
||||||
import org.apache.log4j.LogManager;
|
import org.apache.log4j.LogManager;
|
||||||
import org.apache.log4j.Logger;
|
import org.apache.log4j.Logger;
|
||||||
@ -70,7 +70,7 @@ public class TaskLog {
|
|||||||
static LocalFileSystem localFS = null;
|
static LocalFileSystem localFS = null;
|
||||||
|
|
||||||
public static String getMRv2LogDir() {
|
public static String getMRv2LogDir() {
|
||||||
return System.getProperty(MRJobConfig.TASK_LOG_DIR);
|
return System.getProperty(YarnConfiguration.YARN_APP_CONTAINER_LOG_DIR);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static File getTaskLogFile(TaskAttemptID taskid, boolean isCleanup,
|
public static File getTaskLogFile(TaskAttemptID taskid, boolean isCleanup,
|
||||||
|
@ -599,11 +599,6 @@ public interface MRJobConfig {
|
|||||||
// Containers.
|
// Containers.
|
||||||
public static final String APPLICATION_TOKENS_FILE = "appTokens";
|
public static final String APPLICATION_TOKENS_FILE = "appTokens";
|
||||||
|
|
||||||
/** The log directory for the containers */
|
|
||||||
public static final String TASK_LOG_DIR = MR_PREFIX + "container.log.dir";
|
|
||||||
|
|
||||||
public static final String TASK_LOG_SIZE = MR_PREFIX + "container.log.filesize";
|
|
||||||
|
|
||||||
public static final String MAPREDUCE_V2_CHILD_CLASS =
|
public static final String MAPREDUCE_V2_CHILD_CLASS =
|
||||||
"org.apache.hadoop.mapred.YarnChild";
|
"org.apache.hadoop.mapred.YarnChild";
|
||||||
|
|
||||||
|
@ -17,6 +17,11 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
|
import static junit.framework.Assert.assertEquals;
|
||||||
|
import static junit.framework.Assert.assertTrue;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
@ -24,11 +29,9 @@
|
|||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.hadoop.mapred.TaskLog.LogName;
|
import org.apache.hadoop.mapred.TaskLog.LogName;
|
||||||
import org.apache.hadoop.mapreduce.MRJobConfig;
|
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||||
|
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import static junit.framework.Assert.*;
|
|
||||||
import static org.mockito.Mockito.*;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* TestCounters checks the sanity and recoverability of Queue
|
* TestCounters checks the sanity and recoverability of Queue
|
||||||
*/
|
*/
|
||||||
@ -42,7 +45,8 @@ public class TestTaskLog {
|
|||||||
@Test (timeout=50000)
|
@Test (timeout=50000)
|
||||||
public void testTaskLog() throws IOException {
|
public void testTaskLog() throws IOException {
|
||||||
// test TaskLog
|
// test TaskLog
|
||||||
System.setProperty(MRJobConfig.TASK_LOG_DIR, "testString");
|
System.setProperty(
|
||||||
|
YarnConfiguration.YARN_APP_CONTAINER_LOG_DIR, "testString");
|
||||||
assertEquals(TaskLog.getMRv2LogDir(), "testString");
|
assertEquals(TaskLog.getMRv2LogDir(), "testString");
|
||||||
TaskAttemptID taid = mock(TaskAttemptID.class);
|
TaskAttemptID taid = mock(TaskAttemptID.class);
|
||||||
JobID jid = new JobID("job", 1);
|
JobID jid = new JobID("job", 1);
|
||||||
@ -115,7 +119,7 @@ public String readTaskLog(TaskLog.LogName filter,
|
|||||||
@Test (timeout=50000)
|
@Test (timeout=50000)
|
||||||
public void testTaskLogWithoutTaskLogDir() throws IOException {
|
public void testTaskLogWithoutTaskLogDir() throws IOException {
|
||||||
// TaskLog tasklog= new TaskLog();
|
// TaskLog tasklog= new TaskLog();
|
||||||
System.clearProperty(MRJobConfig.TASK_LOG_DIR);
|
System.clearProperty(YarnConfiguration.YARN_APP_CONTAINER_LOG_DIR);
|
||||||
|
|
||||||
// test TaskLog
|
// test TaskLog
|
||||||
|
|
||||||
|
@ -155,8 +155,8 @@ private void runStreamJobAndValidateEnv() throws IOException {
|
|||||||
String env = MapReduceTestUtil.readOutput(outputPath, mr.createJobConf());
|
String env = MapReduceTestUtil.readOutput(outputPath, mr.createJobConf());
|
||||||
long logSize = USERLOG_LIMIT_KB * 1024;
|
long logSize = USERLOG_LIMIT_KB * 1024;
|
||||||
assertTrue("environment set for child is wrong", env.contains("INFO,CLA")
|
assertTrue("environment set for child is wrong", env.contains("INFO,CLA")
|
||||||
&& env.contains("-Dyarn.app.mapreduce.container.log.dir=")
|
&& env.contains("-Dyarn.app.container.log.dir=")
|
||||||
&& env.contains("-Dyarn.app.mapreduce.container.log.filesize=" + logSize)
|
&& env.contains("-Dyarn.app.container.log.filesize=" + logSize)
|
||||||
&& env.contains("-Dlog4j.configuration="));
|
&& env.contains("-Dlog4j.configuration="));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -84,6 +84,9 @@ Release 2.1.0-beta - UNRELEASED
|
|||||||
YARN-749. Rename ResourceRequest.(get,set)HostName to
|
YARN-749. Rename ResourceRequest.(get,set)HostName to
|
||||||
ResourceRequest.(get,set)ResourceName. (acmurthy)
|
ResourceRequest.(get,set)ResourceName. (acmurthy)
|
||||||
|
|
||||||
|
YARN-720. container-log4j.properties should not refer to mapreduce
|
||||||
|
property names. (Zhijie Shen via sseth)
|
||||||
|
|
||||||
NEW FEATURES
|
NEW FEATURES
|
||||||
|
|
||||||
YARN-482. FS: Extend SchedulingMode to intermediate queues.
|
YARN-482. FS: Extend SchedulingMode to intermediate queues.
|
||||||
|
@ -695,6 +695,14 @@ public class YarnConfiguration extends Configuration {
|
|||||||
*/
|
*/
|
||||||
public static boolean DEFAULT_YARN_MINICLUSTER_FIXED_PORTS = false;
|
public static boolean DEFAULT_YARN_MINICLUSTER_FIXED_PORTS = false;
|
||||||
|
|
||||||
|
|
||||||
|
/** The log directory for the containers */
|
||||||
|
public static final String YARN_APP_CONTAINER_LOG_DIR =
|
||||||
|
YARN_PREFIX + "app.container.log.dir";
|
||||||
|
|
||||||
|
public static final String YARN_APP_CONTAINER_LOG_SIZE =
|
||||||
|
YARN_PREFIX + "app.container.log.filesize";
|
||||||
|
|
||||||
////////////////////////////////
|
////////////////////////////////
|
||||||
// Other Configs
|
// Other Configs
|
||||||
////////////////////////////////
|
////////////////////////////////
|
||||||
|
@ -25,12 +25,12 @@ log4j.threshold=ALL
|
|||||||
#
|
#
|
||||||
|
|
||||||
#Default values
|
#Default values
|
||||||
yarn.app.mapreduce.container.log.dir=null
|
yarn.app.container.log.dir=null
|
||||||
yarn.app.mapreduce.container.log.filesize=100
|
yarn.app.container.log.filesize=100
|
||||||
|
|
||||||
log4j.appender.CLA=org.apache.hadoop.yarn.ContainerLogAppender
|
log4j.appender.CLA=org.apache.hadoop.yarn.ContainerLogAppender
|
||||||
log4j.appender.CLA.containerLogDir=${yarn.app.mapreduce.container.log.dir}
|
log4j.appender.CLA.containerLogDir=${yarn.app.container.log.dir}
|
||||||
log4j.appender.CLA.totalLogFileSize=${yarn.app.mapreduce.container.log.filesize}
|
log4j.appender.CLA.totalLogFileSize=${yarn.app.container.log.filesize}
|
||||||
|
|
||||||
log4j.appender.CLA.layout=org.apache.log4j.PatternLayout
|
log4j.appender.CLA.layout=org.apache.log4j.PatternLayout
|
||||||
log4j.appender.CLA.layout.ConversionPattern=%d{ISO8601} %p [%t] %c: %m%n
|
log4j.appender.CLA.layout.ConversionPattern=%d{ISO8601} %p [%t] %c: %m%n
|
||||||
|
Loading…
Reference in New Issue
Block a user