HDFS-15628. HttpFS server throws NPE if a file is a symlink. Contributed by Ahmed Hussein.
This commit is contained in:
parent
ddc0ee27fa
commit
e45407128d
@ -199,6 +199,7 @@ public static FILE_TYPE getType(FileStatus fileStatus) {
|
|||||||
public static final String XATTR_VALUE_JSON = "value";
|
public static final String XATTR_VALUE_JSON = "value";
|
||||||
public static final String XATTRNAMES_JSON = "XAttrNames";
|
public static final String XATTRNAMES_JSON = "XAttrNames";
|
||||||
public static final String ECPOLICY_JSON = "ecPolicyObj";
|
public static final String ECPOLICY_JSON = "ecPolicyObj";
|
||||||
|
public static final String SYMLINK_JSON = "symlink";
|
||||||
|
|
||||||
public static final String FILE_CHECKSUM_JSON = "FileChecksum";
|
public static final String FILE_CHECKSUM_JSON = "FileChecksum";
|
||||||
public static final String CHECKSUM_ALGORITHM_JSON = "algorithm";
|
public static final String CHECKSUM_ALGORITHM_JSON = "algorithm";
|
||||||
@ -1101,6 +1102,9 @@ private FileStatus createFileStatus(Path parent, JSONObject json) {
|
|||||||
String pathSuffix = (String) json.get(PATH_SUFFIX_JSON);
|
String pathSuffix = (String) json.get(PATH_SUFFIX_JSON);
|
||||||
Path path = (pathSuffix.equals("")) ? parent : new Path(parent, pathSuffix);
|
Path path = (pathSuffix.equals("")) ? parent : new Path(parent, pathSuffix);
|
||||||
FILE_TYPE type = FILE_TYPE.valueOf((String) json.get(TYPE_JSON));
|
FILE_TYPE type = FILE_TYPE.valueOf((String) json.get(TYPE_JSON));
|
||||||
|
String symLinkValue =
|
||||||
|
type == FILE_TYPE.SYMLINK ? (String) json.get(SYMLINK_JSON) : null;
|
||||||
|
Path symLink = symLinkValue == null ? null : new Path(symLinkValue);
|
||||||
long len = (Long) json.get(LENGTH_JSON);
|
long len = (Long) json.get(LENGTH_JSON);
|
||||||
String owner = (String) json.get(OWNER_JSON);
|
String owner = (String) json.get(OWNER_JSON);
|
||||||
String group = (String) json.get(GROUP_JSON);
|
String group = (String) json.get(GROUP_JSON);
|
||||||
@ -1125,11 +1129,12 @@ private FileStatus createFileStatus(Path parent, JSONObject json) {
|
|||||||
new FsPermissionExtension(permission, aBit, eBit, ecBit);
|
new FsPermissionExtension(permission, aBit, eBit, ecBit);
|
||||||
FileStatus fileStatus = new FileStatus(len, FILE_TYPE.DIRECTORY == type,
|
FileStatus fileStatus = new FileStatus(len, FILE_TYPE.DIRECTORY == type,
|
||||||
replication, blockSize, mTime, aTime, deprecatedPerm, owner, group,
|
replication, blockSize, mTime, aTime, deprecatedPerm, owner, group,
|
||||||
null, path, FileStatus.attributes(aBit, eBit, ecBit, seBit));
|
symLink, path, FileStatus.attributes(aBit, eBit, ecBit, seBit));
|
||||||
return fileStatus;
|
return fileStatus;
|
||||||
} else {
|
} else {
|
||||||
return new FileStatus(len, FILE_TYPE.DIRECTORY == type,
|
return new FileStatus(len, FILE_TYPE.DIRECTORY == type,
|
||||||
replication, blockSize, mTime, aTime, permission, owner, group, path);
|
replication, blockSize, mTime, aTime, permission, owner, group,
|
||||||
|
symLink, path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -33,6 +33,7 @@
|
|||||||
import org.apache.hadoop.fs.XAttrCodec;
|
import org.apache.hadoop.fs.XAttrCodec;
|
||||||
import org.apache.hadoop.fs.XAttrSetFlag;
|
import org.apache.hadoop.fs.XAttrSetFlag;
|
||||||
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
|
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
|
||||||
|
import org.apache.hadoop.fs.http.client.HttpFSFileSystem.FILE_TYPE;
|
||||||
import org.apache.hadoop.fs.permission.AclEntry;
|
import org.apache.hadoop.fs.permission.AclEntry;
|
||||||
import org.apache.hadoop.fs.permission.AclStatus;
|
import org.apache.hadoop.fs.permission.AclStatus;
|
||||||
import org.apache.hadoop.fs.permission.FsAction;
|
import org.apache.hadoop.fs.permission.FsAction;
|
||||||
@ -111,8 +112,17 @@ private static Map<String, Object> toJsonInner(FileStatus fileStatus,
|
|||||||
Map<String, Object> json = new LinkedHashMap<String, Object>();
|
Map<String, Object> json = new LinkedHashMap<String, Object>();
|
||||||
json.put(HttpFSFileSystem.PATH_SUFFIX_JSON,
|
json.put(HttpFSFileSystem.PATH_SUFFIX_JSON,
|
||||||
(emptyPathSuffix) ? "" : fileStatus.getPath().getName());
|
(emptyPathSuffix) ? "" : fileStatus.getPath().getName());
|
||||||
json.put(HttpFSFileSystem.TYPE_JSON,
|
FILE_TYPE fileType = HttpFSFileSystem.FILE_TYPE.getType(fileStatus);
|
||||||
HttpFSFileSystem.FILE_TYPE.getType(fileStatus).toString());
|
json.put(HttpFSFileSystem.TYPE_JSON, fileType.toString());
|
||||||
|
if (fileType.equals(FILE_TYPE.SYMLINK)) {
|
||||||
|
// put the symlink into Json
|
||||||
|
try {
|
||||||
|
json.put(HttpFSFileSystem.SYMLINK_JSON,
|
||||||
|
fileStatus.getSymlink().getName());
|
||||||
|
} catch (IOException e) {
|
||||||
|
// Can't happen.
|
||||||
|
}
|
||||||
|
}
|
||||||
json.put(HttpFSFileSystem.LENGTH_JSON, fileStatus.getLen());
|
json.put(HttpFSFileSystem.LENGTH_JSON, fileStatus.getLen());
|
||||||
json.put(HttpFSFileSystem.OWNER_JSON, fileStatus.getOwner());
|
json.put(HttpFSFileSystem.OWNER_JSON, fileStatus.getOwner());
|
||||||
json.put(HttpFSFileSystem.GROUP_JSON, fileStatus.getGroup());
|
json.put(HttpFSFileSystem.GROUP_JSON, fileStatus.getGroup());
|
||||||
|
@ -366,6 +366,42 @@ private void testDelete() throws Exception {
|
|||||||
fs.close();
|
fs.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void testListSymLinkStatus() throws Exception {
|
||||||
|
if (isLocalFS()) {
|
||||||
|
// do not test the the symlink for local FS.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
FileSystem fs = FileSystem.get(getProxiedFSConf());
|
||||||
|
boolean isWebhdfs = fs instanceof WebHdfsFileSystem;
|
||||||
|
Path path =
|
||||||
|
new Path(getProxiedFSTestDir() + "-symlink", "targetFoo.txt");
|
||||||
|
OutputStream os = fs.create(path);
|
||||||
|
os.write(1);
|
||||||
|
os.close();
|
||||||
|
Path linkPath =
|
||||||
|
new Path(getProxiedFSTestDir()+ "-symlink", "symlinkFoo.txt");
|
||||||
|
fs.createSymlink(path, linkPath, false);
|
||||||
|
fs = getHttpFSFileSystem();
|
||||||
|
FileStatus linkStatus = fs.getFileStatus(linkPath);
|
||||||
|
FileStatus status1 = fs.getFileStatus(path);
|
||||||
|
|
||||||
|
FileStatus[] stati = fs.listStatus(path.getParent());
|
||||||
|
assertEquals(2, stati.length);
|
||||||
|
|
||||||
|
int countSymlink = 0;
|
||||||
|
for (int i = 0; i < stati.length; i++) {
|
||||||
|
FileStatus fStatus = stati[i];
|
||||||
|
countSymlink += fStatus.isSymlink() ? 1 : 0;
|
||||||
|
}
|
||||||
|
assertEquals(1, countSymlink);
|
||||||
|
|
||||||
|
assertFalse(status1.isSymlink());
|
||||||
|
if (isWebhdfs) {
|
||||||
|
assertTrue(linkStatus.isSymlink());
|
||||||
|
}
|
||||||
|
fs.close();
|
||||||
|
}
|
||||||
|
|
||||||
private void testListStatus() throws Exception {
|
private void testListStatus() throws Exception {
|
||||||
FileSystem fs = FileSystem.get(getProxiedFSConf());
|
FileSystem fs = FileSystem.get(getProxiedFSConf());
|
||||||
boolean isDFS = fs instanceof DistributedFileSystem;
|
boolean isDFS = fs instanceof DistributedFileSystem;
|
||||||
@ -1191,6 +1227,7 @@ private void operation(Operation op) throws Exception {
|
|||||||
break;
|
break;
|
||||||
case LIST_STATUS:
|
case LIST_STATUS:
|
||||||
testListStatus();
|
testListStatus();
|
||||||
|
testListSymLinkStatus();
|
||||||
break;
|
break;
|
||||||
case WORKING_DIRECTORY:
|
case WORKING_DIRECTORY:
|
||||||
testWorkingdirectory();
|
testWorkingdirectory();
|
||||||
|
Loading…
Reference in New Issue
Block a user