HDFS-9695. HTTPFS - CHECKACCESS operation missing. Contributed by hemanthboyina

This commit is contained in:
Takanobu Asanuma 2019-12-03 09:38:36 +09:00
parent 6b2d6d4aaf
commit 4ede8bce28
6 changed files with 150 additions and 5 deletions

View File

@ -44,6 +44,7 @@
import org.apache.hadoop.fs.XAttrSetFlag;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
@ -131,6 +132,7 @@ public class HttpFSFileSystem extends FileSystem
public static final String POLICY_NAME_PARAM = "storagepolicy";
public static final String SNAPSHOT_NAME_PARAM = "snapshotname";
public static final String OLD_SNAPSHOT_NAME_PARAM = "oldsnapshotname";
public static final String FSACTION_MODE_PARAM = "fsaction";
public static final Short DEFAULT_PERMISSION = 0755;
public static final String ACLSPEC_DEFAULT = "";
@ -254,7 +256,8 @@ public enum Operation {
ALLOWSNAPSHOT(HTTP_PUT), DISALLOWSNAPSHOT(HTTP_PUT),
CREATESNAPSHOT(HTTP_PUT), DELETESNAPSHOT(HTTP_DELETE),
RENAMESNAPSHOT(HTTP_PUT), GETSNAPSHOTDIFF(HTTP_GET),
GETSNAPSHOTTABLEDIRECTORYLIST(HTTP_GET), GETSERVERDEFAULTS(HTTP_GET);
GETSNAPSHOTTABLEDIRECTORYLIST(HTTP_GET), GETSERVERDEFAULTS(HTTP_GET),
CHECKACCESS(HTTP_GET);
private String httpMethod;
@ -1609,4 +1612,14 @@ public FsServerDefaults getServerDefaults() throws IOException {
public FsServerDefaults getServerDefaults(Path p) throws IOException {
return getServerDefaults();
}
@Override
public void access(final Path path, final FsAction mode) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.CHECKACCESS.toString());
params.put(FSACTION_MODE_PARAM, mode.SYMBOL);
HttpURLConnection conn =
getConnection(Operation.CHECKACCESS.getMethod(), params, path, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
}

View File

@ -35,6 +35,7 @@
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
@ -1856,4 +1857,41 @@ public String execute(FileSystem fs) throws IOException {
return JsonUtil.toJsonString(sds);
}
}
}
/**
* Executor that performs a check access operation.
*/
@InterfaceAudience.Private
public static class FSAccess
implements FileSystemAccess.FileSystemExecutor<Void> {
private Path path;
private FsAction mode;
/**
* Creates a access executor.
*/
public FSAccess(String path, FsAction mode) {
this.path = new Path(path);
this.mode = mode;
}
/**
* Executes the filesystem operation.
* @param fs filesystem instance to use.
* @throws IOException thrown if an IO error occurred.
*/
@Override
public Void execute(FileSystem fs) throws IOException {
if (fs instanceof DistributedFileSystem) {
DistributedFileSystem dfs = (DistributedFileSystem) fs;
dfs.access(path, mode);
} else {
throw new UnsupportedOperationException("checkaccess is "
+ "not supported for HttpFs on " + fs.getClass()
+ ". Please check your fs.defaultFS configuration");
}
return null;
}
}
}

View File

@ -33,7 +33,6 @@
import org.apache.hadoop.lib.wsrs.ShortParam;
import org.apache.hadoop.lib.wsrs.StringParam;
import org.apache.hadoop.util.StringUtils;
import javax.ws.rs.ext.Provider;
import java.util.HashMap;
import java.util.Map;
@ -118,6 +117,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
SnapshotNameParam.class});
PARAMS_DEF.put(Operation.GETSNAPSHOTTABLEDIRECTORYLIST, new Class[] {});
PARAMS_DEF.put(Operation.GETSERVERDEFAULTS, new Class[] {});
PARAMS_DEF.put(Operation.CHECKACCESS, new Class[] {FsActionParam.class});
}
public HttpFSParametersProvider() {
@ -664,4 +664,34 @@ public OldSnapshotNameParam() {
}
}
/**
* Class for FsAction parameter.
*/
@InterfaceAudience.Private
public static class FsActionParam extends StringParam {
private static final String FILE_SYSTEM_ACTION = "[r-][w-][x-]";
private static final Pattern FSACTION_PATTERN =
Pattern.compile(FILE_SYSTEM_ACTION);
/**
* Parameter name.
*/
public static final String NAME = HttpFSFileSystem.FSACTION_MODE_PARAM;
/**
* Constructor.
*/
public FsActionParam() {
super(NAME, null);
}
/**
* Constructor.
* @param str a string representation of the parameter value.
*/
public FsActionParam(final String str) {
super(NAME, str, FSACTION_PATTERN);
}
}
}

View File

@ -32,6 +32,7 @@
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.DataParam;
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.DestinationParam;
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.FilterParam;
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.FsActionParam;
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.GroupParam;
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.LenParam;
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.ModifiedTimeParam;
@ -53,6 +54,7 @@
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.XAttrNameParam;
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.XAttrSetFlagParam;
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.XAttrValueParam;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.hdfs.web.JsonUtil;
import org.apache.hadoop.http.JettyUtils;
import org.apache.hadoop.lib.service.FileSystemAccess;
@ -426,6 +428,16 @@ public InputStream run() throws Exception {
response = Response.ok(js).type(MediaType.APPLICATION_JSON).build();
break;
}
case CHECKACCESS: {
String mode = params.get(FsActionParam.NAME, FsActionParam.class);
FsActionParam fsparam = new FsActionParam(mode);
FSOperations.FSAccess command = new FSOperations.FSAccess(path,
FsAction.getFsAction(fsparam.value()));
fsExecute(user, command);
AUDIT_LOG.info("[{}]", "/");
response = Response.ok().build();
break;
}
default: {
throw new IOException(
MessageFormat.format("Invalid HTTP GET operation [{0}]", op.value()));

View File

@ -1143,7 +1143,7 @@ protected enum Operation {
CREATE_SNAPSHOT, RENAME_SNAPSHOT, DELETE_SNAPSHOT,
ALLOW_SNAPSHOT, DISALLOW_SNAPSHOT, DISALLOW_SNAPSHOT_EXCEPTION,
FILE_STATUS_ATTR, GET_SNAPSHOT_DIFF, GET_SNAPSHOTTABLE_DIRECTORY_LIST,
GET_SERVERDEFAULTS
GET_SERVERDEFAULTS, CHECKACCESS
}
private void operation(Operation op) throws Exception {
@ -1267,6 +1267,9 @@ private void operation(Operation op) throws Exception {
case GET_SERVERDEFAULTS:
testGetServerDefaults();
break;
case CHECKACCESS:
testAccess();
break;
}
}
@ -1737,4 +1740,32 @@ private void testGetServerDefaults() throws Exception {
verifyGetServerDefaults(fs, dfs);
}
}
}
private void testAccess() throws Exception {
if (!this.isLocalFS()) {
FileSystem fs = this.getHttpFSFileSystem();
Path path1 = new Path("/");
DistributedFileSystem dfs = (DistributedFileSystem) FileSystem
.get(path1.toUri(), this.getProxiedFSConf());
verifyAccess(fs, dfs);
}
}
private void verifyAccess(FileSystem fs, DistributedFileSystem dfs)
throws Exception {
Path p1 = new Path("/p1");
dfs.mkdirs(p1);
dfs.setOwner(p1, "user1", "group1");
dfs.setPermission(p1, new FsPermission((short) 0444));
if (fs instanceof HttpFSFileSystem) {
HttpFSFileSystem httpFS = (HttpFSFileSystem) fs;
httpFS.access(p1, FsAction.READ);
} else if (fs instanceof WebHdfsFileSystem) {
WebHdfsFileSystem webHdfsFileSystem = (WebHdfsFileSystem) fs;
webHdfsFileSystem.access(p1, FsAction.READ);
} else {
Assert.fail(fs.getClass().getSimpleName() + " doesn't support access");
}
}
}

View File

@ -1667,4 +1667,25 @@ public void testGetServerDefaults() throws Exception {
.get(path1.toUri(), TestHdfsHelper.getHdfsConf());
verifyGetServerDefaults(dfs);
}
@Test
@TestDir
@TestJetty
@TestHdfs
public void testAccess() throws Exception {
createHttpFSServer(false, false);
final String dir = "/xattrTest";
Path path1 = new Path(dir);
DistributedFileSystem dfs = (DistributedFileSystem) FileSystem
.get(path1.toUri(), TestHdfsHelper.getHdfsConf());
dfs.mkdirs(new Path(dir));
HttpURLConnection conn =
sendRequestToHttpFSServer(dir, "CHECKACCESS", "fsaction=r--");
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
HttpURLConnection conn1 =
sendRequestToHttpFSServer(dir, "CHECKACCESS", "fsaction=-w-");
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn1.getResponseCode());
}
}