diff --git a/hadoop-common-project/hadoop-common/CHANGES.HDFS-1623.txt b/hadoop-common-project/hadoop-common/CHANGES.HDFS-1623.txt index c9dd46062f..748ff939ce 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.HDFS-1623.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.HDFS-1623.txt @@ -49,3 +49,5 @@ core-default.xml file. (Uma Maheswara Rao G via atm) HADOOP-8041. Log a warning when a failover is first attempted (todd) HADOOP-8068. void methods can swallow exceptions when going through failover path (todd) + +HADOOP-8116. RetriableCommand is using RetryPolicy incorrectly after HADOOP-7896. (atm) diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/RetriableCommand.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/RetriableCommand.java index 1d248f082a..563372e009 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/RetriableCommand.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/RetriableCommand.java @@ -22,7 +22,9 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.io.retry.RetryPolicy; +import org.apache.hadoop.io.retry.RetryPolicy.RetryAction; import org.apache.hadoop.io.retry.RetryPolicies; +import org.apache.hadoop.util.ThreadUtil; import java.io.IOException; import java.util.concurrent.TimeUnit; @@ -80,7 +82,7 @@ public RetriableCommand(String description, RetryPolicy retryPolicy) { public Object execute(Object... arguments) throws Exception { Exception latestException; int counter = 0; - do { + while (true) { try { return doExecute(arguments); } catch(Exception exception) { @@ -88,7 +90,13 @@ public Object execute(Object... arguments) throws Exception { latestException = exception; } counter++; - } while (retryPolicy.shouldRetry(latestException, counter, 0, true).equals(RetryPolicy.RetryAction.RETRY)); + RetryAction action = retryPolicy.shouldRetry(latestException, counter, 0, true); + if (action.action == RetryPolicy.RetryAction.RetryDecision.RETRY) { + ThreadUtil.sleepAtLeastIgnoreInterrupts(action.delayMillis); + } else { + break; + } + } throw new IOException("Couldn't run retriable-command: " + description, latestException); diff --git a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java index e5ab0595c3..5ba5eb8867 100644 --- a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java +++ b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java @@ -545,7 +545,12 @@ public Integer run() { Assert.fail("Didn't expect the file to be copied"); } catch (AccessControlException ignore) { } catch (Exception e) { - if (e.getCause() == null || !(e.getCause() instanceof AccessControlException)) { + // We want to make sure the underlying cause of the exception is + // due to permissions error. The exception we're interested in is + // wrapped twice - once in RetriableCommand and again in CopyMapper + // itself. + if (e.getCause() == null || e.getCause().getCause() == null || + !(e.getCause().getCause() instanceof AccessControlException)) { throw new RuntimeException(e); } }