HDFS-5513. CacheAdmin commands fail when using . as the path. Contributed by Andrew Wang.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1543670 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
587f68b160
commit
00d9b8eedb
@ -379,7 +379,9 @@ Trunk (Unreleased)
|
||||
nextEntryId (cmccabe)
|
||||
|
||||
HDFS-5512. CacheAdmin -listPools fails with NPE when user lacks permissions
|
||||
to view all pools (awang via cmccabe)
|
||||
to view all pools (wang via cmccabe)
|
||||
|
||||
HDFS-5513. CacheAdmin commands fail when using . as the path. (wang)
|
||||
|
||||
Release 2.3.0 - UNRELEASED
|
||||
|
||||
|
@ -17,8 +17,6 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.protocol;
|
||||
|
||||
import java.net.URI;
|
||||
|
||||
import org.apache.commons.lang.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang.builder.HashCodeBuilder;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
@ -61,9 +59,7 @@ public Builder() {
|
||||
*/
|
||||
public Builder(PathBasedCacheDirective directive) {
|
||||
this.id = directive.getId();
|
||||
// deep-copy URI
|
||||
URI uri = directive.getPath().toUri();
|
||||
this.path = new Path(uri.getScheme(), uri.getAuthority(), uri.getPath());
|
||||
this.path = directive.getPath();
|
||||
this.replication = directive.getReplication();
|
||||
this.pool = directive.getPool();
|
||||
}
|
||||
|
@ -31,9 +31,8 @@
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.MappedByteBuffer;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
@ -59,7 +58,6 @@
|
||||
import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective;
|
||||
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor.CachedBlocksList.Type;
|
||||
import org.apache.hadoop.hdfs.server.namenode.EditLogFileOutputStream;
|
||||
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.MappableBlock;
|
||||
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
|
||||
import org.apache.hadoop.io.nativeio.NativeIO;
|
||||
import org.apache.hadoop.io.nativeio.NativeIO.POSIX.CacheManipulator;
|
||||
@ -482,6 +480,15 @@ public void testAddRemoveDirectives() throws Exception {
|
||||
dfs.removePathBasedCacheDirective(relativeId);
|
||||
iter = dfs.listPathBasedCacheDirectives(null);
|
||||
assertFalse(iter.hasNext());
|
||||
|
||||
// Verify that PBCDs with path "." work correctly
|
||||
PathBasedCacheDirective directive =
|
||||
new PathBasedCacheDirective.Builder().setPath(new Path("."))
|
||||
.setPool("pool1").build();
|
||||
long id = dfs.addPathBasedCacheDirective(directive);
|
||||
dfs.modifyPathBasedCacheDirective(new PathBasedCacheDirective.Builder(
|
||||
directive).setId(id).setReplication((short)2).build());
|
||||
dfs.removePathBasedCacheDirective(id);
|
||||
}
|
||||
|
||||
@Test(timeout=60000)
|
||||
|
Loading…
Reference in New Issue
Block a user