diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/MultiObjectDeleteException.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/MultiObjectDeleteException.java index 6082c2f08d..72ead1fb15 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/MultiObjectDeleteException.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/MultiObjectDeleteException.java @@ -22,6 +22,8 @@ import java.nio.file.AccessDeniedException; import java.util.List; +import software.amazon.awssdk.awscore.exception.AwsErrorDetails; +import software.amazon.awssdk.http.SdkHttpResponse; import software.amazon.awssdk.services.s3.model.S3Error; import software.amazon.awssdk.services.s3.model.S3Exception; import org.slf4j.Logger; @@ -55,10 +57,39 @@ public class MultiObjectDeleteException extends S3Exception { */ public static final String ACCESS_DENIED = "AccessDenied"; + /** + * Field value for the superclass builder: {@value}. + */ + private static final int STATUS_CODE = SC_200_OK; + + /** + * Field value for the superclass builder: {@value}. + */ + private static final String ERROR_CODE = "MultiObjectDeleteException"; + + /** + * Field value for the superclass builder: {@value}. + */ + private static final String SERVICE_NAME = "Amazon S3"; + + /** + * Extracted error list. + */ private final List errors; public MultiObjectDeleteException(List errors) { - super(builder().message(errors.toString()).statusCode(SC_200_OK)); + super(builder() + .message(errors.toString()) + .awsErrorDetails( + AwsErrorDetails.builder() + .errorCode(ERROR_CODE) + .errorMessage(ERROR_CODE) + .serviceName(SERVICE_NAME) + .sdkHttpResponse(SdkHttpResponse.builder() + .statusCode(STATUS_CODE) + .build()) + .build()) + .statusCode(STATUS_CODE)); this.errors = errors; } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestErrorTranslation.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestErrorTranslation.java index 71536880dd..dd4abe64c6 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestErrorTranslation.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestErrorTranslation.java @@ -22,15 +22,20 @@ import java.net.ConnectException; import java.net.NoRouteToHostException; import java.net.UnknownHostException; +import java.util.Collections; +import org.assertj.core.api.Assertions; import org.junit.Test; +import software.amazon.awssdk.awscore.retry.conditions.RetryOnErrorCodeCondition; import software.amazon.awssdk.core.exception.SdkClientException; +import software.amazon.awssdk.core.retry.RetryPolicyContext; import org.apache.hadoop.fs.PathIOException; import org.apache.hadoop.test.AbstractHadoopTestBase; import static org.apache.hadoop.fs.s3a.impl.ErrorTranslation.maybeExtractNetworkException; import static org.apache.hadoop.test.LambdaTestUtils.intercept; +import static org.junit.Assert.assertTrue; /** * Unit tests related to the {@link ErrorTranslation} class. @@ -112,4 +117,21 @@ public NoConstructorIOE() { } } + + @Test + public void testMultiObjectExceptionFilledIn() throws Throwable { + + MultiObjectDeleteException ase = + new MultiObjectDeleteException(Collections.emptyList()); + RetryPolicyContext context = RetryPolicyContext.builder() + .exception(ase) + .build(); + RetryOnErrorCodeCondition retry = RetryOnErrorCodeCondition.create(""); + assertTrue("retry policy of MultiObjectException", + retry.shouldRetry(context)); + + Assertions.assertThat(retry.shouldRetry(context)) + .describedAs("retry policy of MultiObjectException") + .isFalse(); + } }