From 67699c2d187a8480a46acf5031652ff19196823d Mon Sep 17 00:00:00 2001 From: Jason Darrell Lowe Date: Tue, 19 Mar 2013 19:29:15 +0000 Subject: [PATCH] YARN-200. yarn log does not output all needed information, and is in a binary format. Contributed by Ravi Prakash git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1458466 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-yarn-project/CHANGES.txt | 3 ++ .../logaggregation/AggregatedLogFormat.java | 14 +++++---- .../hadoop/yarn/logaggregation/LogDumper.java | 30 ++++++++++-------- .../TestLogAggregationService.java | 31 +++++++++++-------- 4 files changed, 46 insertions(+), 32 deletions(-) diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index 5fe8c01dbd..c5227d588d 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -414,6 +414,9 @@ Release 0.23.7 - UNRELEASED YARN-468. coverage fix for org.apache.hadoop.yarn.server.webproxy.amfilter (Aleksey Gorshkov via bobby) + YARN-200. yarn log does not output all needed information, and is in a + binary format (Ravi Prakash via jlowe) + OPTIMIZATIONS YARN-357. App submission should not be synchronized (daryn) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java index a310ab696b..c519f17959 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java @@ -27,6 +27,7 @@ import java.io.FileInputStream; import java.io.InputStreamReader; import java.io.IOException; +import java.io.PrintStream; import java.io.Writer; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; @@ -505,7 +506,7 @@ public static void readAcontainerLogs(DataInputStream valueStream, * @throws IOException */ public static void readAContainerLogsForALogType( - DataInputStream valueStream, DataOutputStream out) + DataInputStream valueStream, PrintStream out) throws IOException { byte[] buf = new byte[65535]; @@ -513,11 +514,11 @@ public static void readAContainerLogsForALogType( String fileType = valueStream.readUTF(); String fileLengthStr = valueStream.readUTF(); long fileLength = Long.parseLong(fileLengthStr); - out.writeUTF("\nLogType:"); - out.writeUTF(fileType); - out.writeUTF("\nLogLength:"); - out.writeUTF(fileLengthStr); - out.writeUTF("\nLog Contents:\n"); + out.print("LogType: "); + out.println(fileType); + out.print("LogLength: "); + out.println(fileLengthStr); + out.println("Log Contents:"); int curRead = 0; long pendingRead = fileLength - curRead; @@ -533,6 +534,7 @@ public static void readAContainerLogsForALogType( pendingRead > buf.length ? buf.length : (int) pendingRead; len = valueStream.read(buf, 0, toRead); } + out.println(""); } public void close() throws IOException { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogDumper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogDumper.java index c81b772e78..d7da036635 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogDumper.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogDumper.java @@ -19,10 +19,10 @@ package org.apache.hadoop.yarn.logaggregation; import java.io.DataInputStream; -import java.io.DataOutputStream; import java.io.EOFException; import java.io.FileNotFoundException; import java.io.IOException; +import java.io.PrintStream; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; @@ -30,6 +30,7 @@ import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; +import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileContext; @@ -57,10 +58,13 @@ public class LogDumper extends Configured implements Tool { public int run(String[] args) throws Exception { Options opts = new Options(); - opts.addOption(APPLICATION_ID_OPTION, true, "ApplicationId"); - opts.addOption(CONTAINER_ID_OPTION, true, "ContainerId"); - opts.addOption(NODE_ADDRESS_OPTION, true, "NodeAddress"); - opts.addOption(APP_OWNER_OPTION, true, "AppOwner"); + opts.addOption(APPLICATION_ID_OPTION, true, "ApplicationId (required)"); + opts.addOption(CONTAINER_ID_OPTION, true, + "ContainerId (must be specified if node address is specified)"); + opts.addOption(NODE_ADDRESS_OPTION, true, "NodeAddress in the format " + + "nodename:port (must be specified if container id is specified)"); + opts.addOption(APP_OWNER_OPTION, true, + "AppOwner (assumed to be current user if not specified)"); if (args.length < 1) { HelpFormatter formatter = new HelpFormatter(); @@ -99,14 +103,12 @@ public int run(String[] args) throws Exception { ApplicationId appId = ConverterUtils.toApplicationId(recordFactory, appIdStr); - DataOutputStream out = new DataOutputStream(System.out); - if (appOwner == null || appOwner.isEmpty()) { appOwner = UserGroupInformation.getCurrentUser().getShortUserName(); } int resultCode = 0; if (containerIdStr == null && nodeAddress == null) { - resultCode = dumpAllContainersLogs(appId, appOwner, out); + resultCode = dumpAllContainersLogs(appId, appOwner, System.out); } else if ((containerIdStr == null && nodeAddress != null) || (containerIdStr != null && nodeAddress == null)) { System.out.println("ContainerId or NodeAddress cannot be null!"); @@ -125,7 +127,7 @@ public int run(String[] args) throws Exception { appOwner, ConverterUtils.toNodeId(nodeAddress), LogAggregationUtils.getRemoteNodeLogDirSuffix(getConf()))); - resultCode = dumpAContainerLogs(containerIdStr, reader, out); + resultCode = dumpAContainerLogs(containerIdStr, reader, System.out); } return resultCode; @@ -149,12 +151,11 @@ public int dumpAContainersLogs(String appId, String containerId, "Log aggregation has not completed or is not enabled."); return -1; } - DataOutputStream out = new DataOutputStream(System.out); - return dumpAContainerLogs(containerId, reader, out); + return dumpAContainerLogs(containerId, reader, System.out); } private int dumpAContainerLogs(String containerIdStr, - AggregatedLogFormat.LogReader reader, DataOutputStream out) + AggregatedLogFormat.LogReader reader, PrintStream out) throws IOException { DataInputStream valueStream; LogKey key = new LogKey(); @@ -183,7 +184,7 @@ private int dumpAContainerLogs(String containerIdStr, } private int dumpAllContainersLogs(ApplicationId appId, String appOwner, - DataOutputStream out) throws IOException { + PrintStream out) throws IOException { Path remoteRootLogDir = new Path(getConf().get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR)); @@ -216,6 +217,9 @@ private int dumpAllContainersLogs(ApplicationId appId, String appOwner, valueStream = reader.next(key); while (valueStream != null) { + String containerString = "\n\nContainer: " + key + " on " + thisNodeFile.getPath().getName(); + out.println(containerString); + out.println(StringUtils.repeat("=", containerString.length())); while (true) { try { LogReader.readAContainerLogsForALogType(valueStream, out); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java index 168f619adf..2408681ddb 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java @@ -22,11 +22,13 @@ import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertTrue; +import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.EOFException; import java.io.File; import java.io.FileWriter; import java.io.IOException; +import java.io.PrintStream; import java.io.PrintWriter; import java.io.Writer; import java.lang.reflect.Method; @@ -40,6 +42,7 @@ import junit.framework.Assert; +import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnsupportedFileSystemException; @@ -531,24 +534,26 @@ private void verifyContainerLogs( while (true) { try { - DataOutputBuffer dob = new DataOutputBuffer(); - LogReader.readAContainerLogsForALogType(valueStream, dob); + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + PrintStream ps = new PrintStream(baos); + LogReader.readAContainerLogsForALogType(valueStream, ps); - DataInputBuffer dib = new DataInputBuffer(); - dib.reset(dob.getData(), dob.getLength()); + String writtenLines[] = baos.toString().split( + System.getProperty("line.separator")); - Assert.assertEquals("\nLogType:", dib.readUTF()); - String fileType = dib.readUTF(); + Assert.assertEquals("LogType:", writtenLines[0].substring(0, 8)); + String fileType = writtenLines[0].substring(9); - Assert.assertEquals("\nLogLength:", dib.readUTF()); - String fileLengthStr = dib.readUTF(); + Assert.assertEquals("LogLength:", writtenLines[1].substring(0, 10)); + String fileLengthStr = writtenLines[1].substring(11); long fileLength = Long.parseLong(fileLengthStr); - Assert.assertEquals("\nLog Contents:\n", dib.readUTF()); - byte[] buf = new byte[(int) fileLength]; // cast is okay in this - // test. - dib.read(buf, 0, (int) fileLength); - perContainerMap.put(fileType, new String(buf)); + Assert.assertEquals("Log Contents:", + writtenLines[2].substring(0, 13)); + + String logContents = StringUtils.join( + Arrays.copyOfRange(writtenLines, 3, writtenLines.length), "\n"); + perContainerMap.put(fileType, logContents); LOG.info("LogType:" + fileType); LOG.info("LogType:" + fileLength);