diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
index 56907431d1..47d2389254 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
@@ -77,6 +77,8 @@
import org.codehaus.jackson.node.ObjectNode;
import com.google.common.annotations.VisibleForTesting;
+import com.sun.jersey.api.client.ClientHandlerException;
+
/**
* The job history events get routed to this class. This class writes the Job
* history events to the DFS directly into a staging dir and then moved to a
@@ -1032,12 +1034,9 @@ private void processEventForTimelineServer(HistoryEvent event, JobId jobId,
+ error.getErrorCode());
}
}
- } catch (IOException ex) {
+ } catch (YarnException | IOException | ClientHandlerException ex) {
LOG.error("Error putting entity " + tEntity.getEntityId() + " to Timeline"
- + "Server", ex);
- } catch (YarnException ex) {
- LOG.error("Error putting entity " + tEntity.getEntityId() + " to Timeline"
- + "Server", ex);
+ + "Server", ex);
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml
index c118603d5e..dba8fc0a4d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml
@@ -131,6 +131,12 @@
test-jar
test
+
+ org.apache.hadoop
+ hadoop-yarn-common
+ test-jar
+ test
+
org.apache.hadoop
hadoop-hdfs
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java
index cbe0348055..2b85ba8dc8 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java
@@ -104,6 +104,7 @@
import org.apache.log4j.LogManager;
import com.google.common.annotations.VisibleForTesting;
+import com.sun.jersey.api.client.ClientHandlerException;
/**
* An ApplicationMaster for executing shell commands on a set of launched
@@ -1149,13 +1150,14 @@ private void publishContainerStartEvent(
putContainerEntity(timelineClient,
container.getId().getApplicationAttemptId(),
entity));
- } catch (YarnException | IOException e) {
+ } catch (YarnException | IOException | ClientHandlerException e) {
LOG.error("Container start event could not be published for "
+ container.getId().toString(), e);
}
}
- private void publishContainerEndEvent(
+ @VisibleForTesting
+ void publishContainerEndEvent(
final TimelineClient timelineClient, ContainerStatus container,
String domainId, UserGroupInformation ugi) {
final TimelineEntity entity = new TimelineEntity();
@@ -1177,7 +1179,7 @@ private void publishContainerEndEvent(
putContainerEntity(timelineClient,
container.getContainerId().getApplicationAttemptId(),
entity));
- } catch (YarnException | IOException e) {
+ } catch (YarnException | IOException | ClientHandlerException e) {
LOG.error("Container end event could not be published for "
+ container.getContainerId().toString(), e);
}
@@ -1212,7 +1214,7 @@ private void publishApplicationAttemptEvent(
try {
TimelinePutResponse response = timelineClient.putEntities(entity);
processTimelineResponseErrors(response);
- } catch (YarnException | IOException e) {
+ } catch (YarnException | IOException | ClientHandlerException e) {
LOG.error("App Attempt "
+ (appEvent.equals(DSEvent.DS_APP_ATTEMPT_START) ? "start" : "end")
+ " event could not be published for "
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
index 65360508ca..2b46fca4b4 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
@@ -18,6 +18,10 @@
package org.apache.hadoop.yarn.applications.distributedshell;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
@@ -27,6 +31,7 @@
import java.io.OutputStream;
import java.io.PrintWriter;
import java.net.InetAddress;
+import java.net.URI;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
@@ -46,14 +51,24 @@
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.net.ServerSocketUtil;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.JarFinder;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.api.records.ContainerState;
+import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
+import org.apache.hadoop.yarn.applications.distributedshell.ApplicationMaster;
+import org.apache.hadoop.yarn.client.api.impl.DirectTimelineWriter;
+import org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl;
+import org.apache.hadoop.yarn.client.api.impl.TimelineWriter;
+import org.apache.hadoop.yarn.client.api.impl.TestTimelineClient;
+import org.apache.hadoop.yarn.client.api.TimelineClient;
import org.apache.hadoop.yarn.client.api.YarnClient;
+
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.MiniYARNCluster;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
@@ -61,6 +76,7 @@
import org.apache.hadoop.yarn.server.timeline.NameValuePair;
import org.apache.hadoop.yarn.server.timeline.TimelineVersion;
import org.apache.hadoop.yarn.server.timeline.TimelineVersionWatcher;
+import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.junit.After;
import org.junit.Assert;
@@ -69,6 +85,8 @@
import org.junit.Test;
import org.junit.rules.Timeout;
+import com.sun.jersey.api.client.ClientHandlerException;
+
public class TestDistributedShell {
private static final Log LOG =
@@ -77,6 +95,7 @@ public class TestDistributedShell {
protected MiniYARNCluster yarnCluster = null;
protected MiniDFSCluster hdfsCluster = null;
private FileSystem fs = null;
+ private TimelineWriter spyTimelineWriter;
protected YarnConfiguration conf = null;
private static final int NUM_NMS = 1;
private static final float DEFAULT_TIMELINE_VERSION = 1.0f;
@@ -865,6 +884,37 @@ public void testDSShellWithInvalidArgs() throws Exception {
}
}
+ @Test
+ public void testDSTimelineClientWithConnectionRefuse() throws Exception {
+ ApplicationMaster am = new ApplicationMaster();
+
+ TimelineClientImpl client = new TimelineClientImpl() {
+ @Override
+ protected TimelineWriter createTimelineWriter(Configuration conf,
+ UserGroupInformation authUgi, com.sun.jersey.api.client.Client client,
+ URI resURI) throws IOException {
+ TimelineWriter timelineWriter =
+ new DirectTimelineWriter(authUgi, client, resURI);
+ spyTimelineWriter = spy(timelineWriter);
+ return spyTimelineWriter;
+ }
+ };
+ client.init(conf);
+ client.start();
+ TestTimelineClient.mockEntityClientResponse(spyTimelineWriter, null,
+ false, true);
+ try {
+ UserGroupInformation ugi = mock(UserGroupInformation.class);
+ when(ugi.getShortUserName()).thenReturn("user1");
+ // verify no ClientHandlerException get thrown out.
+ am.publishContainerEndEvent(client, ContainerStatus.newInstance(
+ BuilderUtils.newContainerId(1, 1, 1, 1), ContainerState.COMPLETE, "",
+ 1), "domainId", ugi);
+ } finally {
+ client.stop();
+ }
+ }
+
protected void waitForNMsToRegister() throws Exception {
int sec = 60;
while (sec >= 0) {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java
index 39fc8deb3a..d5e186c9b6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestTimelineClient.java
@@ -298,7 +298,7 @@ private void assertException(TimelineClientImpl client, RuntimeException ce) {
client.connectionRetry.getRetired());
}
- private static ClientResponse mockEntityClientResponse(
+ public static ClientResponse mockEntityClientResponse(
TimelineWriter spyTimelineWriter, ClientResponse.Status status,
boolean hasError, boolean hasRuntimeError) {
ClientResponse response = mock(ClientResponse.class);