diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.HDFS-2802.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.HDFS-2802.txt index 89149b0cd2..95933574d8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.HDFS-2802.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.HDFS-2802.txt @@ -170,3 +170,6 @@ Branch-2802 Snapshot (Unreleased) HDFS-4524. Update SnapshotManager#snapshottables when loading fsimage. (Jing Zhao via szetszwo) + + HDFS-4520. Support listing snapshots under a snapshottable directory using ls. + (Jing Zhao via szetszwo) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java index 0384e7938b..b8a458ee6f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java @@ -62,6 +62,7 @@ import org.apache.hadoop.hdfs.server.namenode.INodeDirectory.INodesInPath; import org.apache.hadoop.hdfs.server.namenode.snapshot.INodeDirectorySnapshottable; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; +import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot.Root; import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotAccessControlException; import org.apache.hadoop.hdfs.util.ByteArray; import org.apache.hadoop.hdfs.util.ReadOnlyList; @@ -1228,6 +1229,9 @@ DirectoryListing getListing(String src, byte[] startAfter, readLock(); try { + if (srcs.endsWith(Path.SEPARATOR + HdfsConstants.DOT_SNAPSHOT_DIR)) { + return getSnapshotsListing(srcs, startAfter); + } final INodesInPath inodesInPath = rootDir.getLastINodeInPath(srcs, true); final Snapshot snapshot = inodesInPath.getPathSnapshot(); final INode targetNode = inodesInPath.getINode(0); @@ -1257,6 +1261,35 @@ DirectoryListing getListing(String src, byte[] startAfter, readUnlock(); } } + + /** + * Get a listing of all the snapshots of a snapshottable directory + */ + private DirectoryListing getSnapshotsListing(String src, byte[] startAfter) + throws UnresolvedLinkException, IOException { + assert hasReadLock(); + final String dotSnapshot = Path.SEPARATOR + HdfsConstants.DOT_SNAPSHOT_DIR; + Preconditions.checkArgument(src.endsWith(dotSnapshot), + src + " does not end with " + dotSnapshot); + + final String dirPath = normalizePath(src.substring(0, + src.length() - HdfsConstants.DOT_SNAPSHOT_DIR.length())); + + final INode node = this.getINode(dirPath); + final INodeDirectorySnapshottable dirNode = INodeDirectorySnapshottable + .valueOf(node, dirPath); + final ReadOnlyList snapshots = dirNode.getSnapshotList(); + int skipSize = ReadOnlyList.Util.binarySearch(snapshots, startAfter); + skipSize = skipSize < 0 ? -skipSize - 1 : skipSize + 1; + int numOfListing = Math.min(snapshots.size() - skipSize, this.lsLimit); + final HdfsFileStatus listing[] = new HdfsFileStatus[numOfListing]; + for (int i = 0; i < numOfListing; i++) { + Root sRoot = snapshots.get(i + skipSize).getRoot(); + listing[i] = createFileStatus(sRoot.getLocalNameBytes(), sRoot, null); + } + return new DirectoryListing( + listing, snapshots.size() - skipSize - numOfListing); + } /** Get the file info for a specific file. * @param src The string representation of the path to the file @@ -1269,6 +1302,9 @@ HdfsFileStatus getFileInfo(String src, boolean resolveLink) String srcs = normalizePath(src); readLock(); try { + if (srcs.endsWith(Path.SEPARATOR + HdfsConstants.DOT_SNAPSHOT_DIR)) { + return getFileInfo4DotSnapshot(srcs); + } final INodesInPath inodesInPath = rootDir.getLastINodeInPath(srcs, resolveLink); final INode i = inodesInPath.getINode(0); return i == null? null: createFileStatus(HdfsFileStatus.EMPTY_NAME, i, @@ -1277,6 +1313,23 @@ HdfsFileStatus getFileInfo(String src, boolean resolveLink) readUnlock(); } } + + private HdfsFileStatus getFileInfo4DotSnapshot(String src) + throws UnresolvedLinkException { + final String dotSnapshot = Path.SEPARATOR + HdfsConstants.DOT_SNAPSHOT_DIR; + Preconditions.checkArgument(src.endsWith(dotSnapshot), + src + " does not end with " + dotSnapshot); + + final String dirPath = normalizePath(src.substring(0, + src.length() - HdfsConstants.DOT_SNAPSHOT_DIR.length())); + + final INode node = this.getINode(dirPath); + if (node instanceof INodeDirectorySnapshottable) { + return new HdfsFileStatus(0, true, 0, 0, 0, 0, null, null, null, null, + HdfsFileStatus.EMPTY_NAME, -1L); + } + return null; + } /** * Get the blocks associated with the file. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectorySnapshottable.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectorySnapshottable.java index 5f6c3bcc79..b07abf7fb5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectorySnapshottable.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectorySnapshottable.java @@ -197,6 +197,11 @@ public Snapshot getSnapshot(byte[] snapshotName) { return i < 0? null: snapshotsByNames.get(i); } + /** @return {@link #snapshotsByNames} as a {@link ReadOnlyList} */ + public ReadOnlyList getSnapshotList() { + return ReadOnlyList.Util.asReadOnlyList(snapshotsByNames); + } + /** * Rename a snapshot * @param path