diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
index f0769c1237..92bb99ee9d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
@@ -82,6 +82,7 @@
import org.apache.hadoop.fs.Options.ChecksumOpt;
import org.apache.hadoop.fs.ParentNotDirectoryException;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
import org.apache.hadoop.fs.QuotaUsage;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.StorageType;
@@ -1620,7 +1621,8 @@ public boolean delete(String src, boolean recursive) throws IOException {
FileNotFoundException.class,
SafeModeException.class,
UnresolvedPathException.class,
- SnapshotAccessControlException.class);
+ SnapshotAccessControlException.class,
+ PathIsNotEmptyDirectoryException.class);
}
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
index fbef037361..0d77037d9d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
@@ -26,6 +26,7 @@
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries;
+import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
import org.apache.hadoop.hdfs.AddBlockFlag;
import org.apache.hadoop.fs.CacheFlag;
import org.apache.hadoop.fs.ContentSummary;
@@ -625,6 +626,8 @@ boolean truncate(String src, long newLength, String clientName)
* @throws org.apache.hadoop.fs.UnresolvedLinkException If src
* contains a symlink
* @throws SnapshotAccessControlException if path is in RO snapshot
+ * @throws PathIsNotEmptyDirectoryException if path is a non-empty directory
+ * and recursive
is set to false
* @throws IOException If an I/O error occurred
*/
@AtMostOnce
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
index 823c747a05..072ee9fcfd 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
@@ -67,6 +67,7 @@
import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
import org.apache.hadoop.fs.Options.ChecksumOpt;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.StorageStatistics.LongStatistic;
import org.apache.hadoop.fs.StorageType;
@@ -571,6 +572,22 @@ public void testDFSClient() throws Exception {
in.close();
fs.close();
}
+
+ {
+ // Test PathIsNotEmptyDirectoryException while deleting non-empty dir
+ FileSystem fs = cluster.getFileSystem();
+ fs.mkdirs(new Path("/test/nonEmptyDir"));
+ fs.create(new Path("/tmp/nonEmptyDir/emptyFile")).close();
+ try {
+ fs.delete(new Path("/tmp/nonEmptyDir"), false);
+ Assert.fail("Expecting PathIsNotEmptyDirectoryException");
+ } catch (PathIsNotEmptyDirectoryException ex) {
+ // This is the proper exception to catch; move on.
+ }
+ Assert.assertTrue(fs.exists(new Path("/test/nonEmptyDir")));
+ fs.delete(new Path("/tmp/nonEmptyDir"), true);
+ }
+
}
finally {
if (cluster != null) {cluster.shutdown();}