diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 8980ec424a..cb9961d647 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -366,6 +366,9 @@ Release 2.5.0 - UNRELEASED HDFS-5683. Better audit log messages for caching operations. (Abhiraj Butala via wang) + HDFS-6345. DFS.listCacheDirectives() should allow filtering based on + cache directive ID. (wang) + OPTIMIZATIONS HDFS-6214. Webhdfs has poor throughput for files >2GB (daryn) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CacheDirectiveIterator.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CacheDirectiveIterator.java index 1ef5c538e4..676106de10 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CacheDirectiveIterator.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/CacheDirectiveIterator.java @@ -23,6 +23,10 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.BatchedRemoteIterator; +import org.apache.hadoop.fs.InvalidRequestException; +import org.apache.hadoop.ipc.RemoteException; + +import com.google.common.base.Preconditions; /** * CacheDirectiveIterator is a remote iterator that iterates cache directives. @@ -33,7 +37,7 @@ public class CacheDirectiveIterator extends BatchedRemoteIterator { - private final CacheDirectiveInfo filter; + private CacheDirectiveInfo filter; private final ClientProtocol namenode; public CacheDirectiveIterator(ClientProtocol namenode, @@ -43,10 +47,72 @@ public CacheDirectiveIterator(ClientProtocol namenode, this.filter = filter; } + private static CacheDirectiveInfo removeIdFromFilter(CacheDirectiveInfo filter) { + CacheDirectiveInfo.Builder builder = new CacheDirectiveInfo.Builder(filter); + builder.setId(null); + return builder.build(); + } + + /** + * Used for compatibility when communicating with a server version that + * does not support filtering directives by ID. + */ + private static class SingleEntry implements + BatchedEntries { + + private final CacheDirectiveEntry entry; + + public SingleEntry(final CacheDirectiveEntry entry) { + this.entry = entry; + } + + @Override + public CacheDirectiveEntry get(int i) { + if (i > 0) { + return null; + } + return entry; + } + + @Override + public int size() { + return 1; + } + + @Override + public boolean hasMore() { + return false; + } + } + @Override public BatchedEntries makeRequest(Long prevKey) throws IOException { - return namenode.listCacheDirectives(prevKey, filter); + BatchedEntries entries = null; + try { + entries = namenode.listCacheDirectives(prevKey, filter); + } catch (IOException e) { + if (e.getMessage().contains("Filtering by ID is unsupported")) { + // Retry case for old servers, do the filtering client-side + long id = filter.getId(); + filter = removeIdFromFilter(filter); + // Using id - 1 as prevId should get us a window containing the id + // This is somewhat brittle, since it depends on directives being + // returned in order of ascending ID. + entries = namenode.listCacheDirectives(id - 1, filter); + for (int i=0; i replies = new ArrayList(NUM_PRE_ALLOCATED_ENTRIES); int numReplies = 0; @@ -711,6 +721,14 @@ public void removeDirective(long id, FSPermissionChecker pc) } CacheDirective curDirective = cur.getValue(); CacheDirectiveInfo info = cur.getValue().toInfo(); + + // If the requested ID is present, it should be the first item. + // Hitting this case means the ID is not present, or we're on the second + // item and should break out. + if (id != null && + !(info.getId().equals(id))) { + break; + } if (filter.getPool() != null && !info.getPool().equals(filter.getPool())) { continue; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java index b7ac968ded..3325570c7d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java @@ -503,19 +503,21 @@ public String getName() { @Override public String getShortUsage() { - return "[" + getName() + " [-stats] [-path ] [-pool ]]\n"; + return "[" + getName() + + " [-stats] [-path ] [-pool ] [-id ]\n"; } @Override public String getLongUsage() { TableListing listing = getOptionDescriptionListing(); + listing.addRow("-stats", "List path-based cache directive statistics."); listing.addRow("", "List only " + "cache directives with this path. " + "Note that if there is a cache directive for " + "in a cache pool that we don't have read access for, it " + "will not be listed."); listing.addRow("", "List only path cache directives in that pool."); - listing.addRow("-stats", "List path-based cache directive statistics."); + listing.addRow("", "List the cache directive with this id."); return getShortUsage() + "\n" + "List cache directives.\n\n" + listing.toString(); @@ -534,6 +536,10 @@ public int run(Configuration conf, List args) throws IOException { builder.setPool(poolFilter); } boolean printStats = StringUtils.popOption("-stats", args); + String idFilter = StringUtils.popOptionWithArgument("-id", args); + if (idFilter != null) { + builder.setId(Long.parseLong(idFilter)); + } if (!args.isEmpty()) { System.err.println("Can't understand argument: " + args.get(0)); return 1; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCacheDirectives.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCacheDirectives.java index ecc738aadc..ed124603f3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCacheDirectives.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCacheDirectives.java @@ -477,6 +477,12 @@ public void testAddRemoveDirectives() throws Exception { iter = dfs.listCacheDirectives( new CacheDirectiveInfo.Builder().setPool("pool2").build()); validateListAll(iter, betaId); + iter = dfs.listCacheDirectives( + new CacheDirectiveInfo.Builder().setId(alphaId2).build()); + validateListAll(iter, alphaId2); + iter = dfs.listCacheDirectives( + new CacheDirectiveInfo.Builder().setId(relativeId).build()); + validateListAll(iter, relativeId); dfs.removeCacheDirective(betaId); iter = dfs.listCacheDirectives( diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml index 13f1b9ae14..058eec59aa 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml @@ -519,5 +519,29 @@ + + + Testing listing a single cache directive + + -addPool pool1 + -addDirective -path /foo -pool pool1 -ttl 2d + -addDirective -path /bar -pool pool1 -ttl 24h + -addDirective -path /baz -replication 2 -pool pool1 -ttl 60m + -listDirectives -stats -id 30 + + + -removePool pool1 + + + + SubstringComparator + Found 1 entry + + + SubstringComparator + 30 pool1 1 + + +