HDFS-14949. Add getServerDefaults() support to HttpFS. Contributed by hemanthboyina.
This commit is contained in:
parent
fd264b8265
commit
3037762b2c
@ -34,6 +34,7 @@
|
||||
import org.apache.hadoop.fs.FileChecksum;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.FsServerDefaults;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.PositionedReadable;
|
||||
import org.apache.hadoop.fs.QuotaUsage;
|
||||
@ -253,7 +254,7 @@ public enum Operation {
|
||||
ALLOWSNAPSHOT(HTTP_PUT), DISALLOWSNAPSHOT(HTTP_PUT),
|
||||
CREATESNAPSHOT(HTTP_PUT), DELETESNAPSHOT(HTTP_DELETE),
|
||||
RENAMESNAPSHOT(HTTP_PUT), GETSNAPSHOTDIFF(HTTP_GET),
|
||||
GETSNAPSHOTTABLEDIRECTORYLIST(HTTP_GET);
|
||||
GETSNAPSHOTTABLEDIRECTORYLIST(HTTP_GET), GETSERVERDEFAULTS(HTTP_GET);
|
||||
|
||||
private String httpMethod;
|
||||
|
||||
@ -1591,4 +1592,21 @@ public boolean hasPathCapability(final Path path, final String capability)
|
||||
return super.hasPathCapability(p, capability);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public FsServerDefaults getServerDefaults() throws IOException {
|
||||
Map<String, String> params = new HashMap<String, String>();
|
||||
params.put(OP_PARAM, Operation.GETSERVERDEFAULTS.toString());
|
||||
HttpURLConnection conn =
|
||||
getConnection(Operation.GETSERVERDEFAULTS.getMethod(), params,
|
||||
new Path(getUri().toString(), "/"), true);
|
||||
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
|
||||
JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
|
||||
return JsonUtilClient.toFsServerDefaults(json);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FsServerDefaults getServerDefaults(Path p) throws IOException {
|
||||
return getServerDefaults();
|
||||
}
|
||||
}
|
||||
|
@ -24,6 +24,7 @@
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.FilterFileSystem;
|
||||
import org.apache.hadoop.fs.FsServerDefaults;
|
||||
import org.apache.hadoop.fs.GlobFilter;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.PathFilter;
|
||||
@ -1821,4 +1822,38 @@ public String execute(FileSystem fs) throws IOException {
|
||||
return JsonUtil.toJsonString(sds);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Executor that performs a getServerDefaults operation.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public static class FSGetServerDefaults
|
||||
implements FileSystemAccess.FileSystemExecutor<String> {
|
||||
|
||||
/**
|
||||
* Creates a getServerDefaults executor.
|
||||
*/
|
||||
public FSGetServerDefaults() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the filesystem operation.
|
||||
* @param fs filesystem instance to use.
|
||||
* @return A JSON string.
|
||||
* @throws IOException thrown if an IO error occurred.
|
||||
*/
|
||||
@Override
|
||||
public String execute(FileSystem fs) throws IOException {
|
||||
FsServerDefaults sds = null;
|
||||
if (fs instanceof DistributedFileSystem) {
|
||||
DistributedFileSystem dfs = (DistributedFileSystem) fs;
|
||||
sds = dfs.getServerDefaults();
|
||||
} else {
|
||||
throw new UnsupportedOperationException("getServerDefaults is "
|
||||
+ "not supported for HttpFs on " + fs.getClass()
|
||||
+ ". Please check your fs.defaultFS configuration");
|
||||
}
|
||||
return JsonUtil.toJsonString(sds);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -117,6 +117,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
|
||||
new Class[] {OldSnapshotNameParam.class,
|
||||
SnapshotNameParam.class});
|
||||
PARAMS_DEF.put(Operation.GETSNAPSHOTTABLEDIRECTORYLIST, new Class[] {});
|
||||
PARAMS_DEF.put(Operation.GETSERVERDEFAULTS, new Class[] {});
|
||||
}
|
||||
|
||||
public HttpFSParametersProvider() {
|
||||
|
@ -418,6 +418,14 @@ public InputStream run() throws Exception {
|
||||
response = Response.ok(js).type(MediaType.APPLICATION_JSON).build();
|
||||
break;
|
||||
}
|
||||
case GETSERVERDEFAULTS: {
|
||||
FSOperations.FSGetServerDefaults command =
|
||||
new FSOperations.FSGetServerDefaults();
|
||||
String js = fsExecute(user, command);
|
||||
AUDIT_LOG.info("[{}]", "/");
|
||||
response = Response.ok(js).type(MediaType.APPLICATION_JSON).build();
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
throw new IOException(
|
||||
MessageFormat.format("Invalid HTTP GET operation [{0}]", op.value()));
|
||||
|
@ -27,6 +27,7 @@
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.FileSystemTestHelper;
|
||||
import org.apache.hadoop.fs.FsServerDefaults;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.QuotaUsage;
|
||||
import org.apache.hadoop.fs.RemoteIterator;
|
||||
@ -1141,7 +1142,8 @@ protected enum Operation {
|
||||
LIST_STATUS_BATCH, GETTRASHROOT, STORAGEPOLICY, ERASURE_CODING,
|
||||
CREATE_SNAPSHOT, RENAME_SNAPSHOT, DELETE_SNAPSHOT,
|
||||
ALLOW_SNAPSHOT, DISALLOW_SNAPSHOT, DISALLOW_SNAPSHOT_EXCEPTION,
|
||||
FILE_STATUS_ATTR, GET_SNAPSHOT_DIFF, GET_SNAPSHOTTABLE_DIRECTORY_LIST
|
||||
FILE_STATUS_ATTR, GET_SNAPSHOT_DIFF, GET_SNAPSHOTTABLE_DIRECTORY_LIST,
|
||||
GET_SERVERDEFAULTS
|
||||
}
|
||||
|
||||
private void operation(Operation op) throws Exception {
|
||||
@ -1262,7 +1264,11 @@ private void operation(Operation op) throws Exception {
|
||||
case GET_SNAPSHOTTABLE_DIRECTORY_LIST:
|
||||
testGetSnapshottableDirListing();
|
||||
break;
|
||||
case GET_SERVERDEFAULTS:
|
||||
testGetServerDefaults();
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Parameterized.Parameters
|
||||
@ -1702,4 +1708,33 @@ private void testFileAclsCustomizedUserAndGroupNames() throws Exception {
|
||||
// Clean up
|
||||
proxyFs.delete(new Path(dir), true);
|
||||
}
|
||||
|
||||
private void verifyGetServerDefaults(FileSystem fs, DistributedFileSystem dfs)
|
||||
throws Exception {
|
||||
FsServerDefaults sds = null;
|
||||
if (fs instanceof HttpFSFileSystem) {
|
||||
HttpFSFileSystem httpFS = (HttpFSFileSystem) fs;
|
||||
sds = httpFS.getServerDefaults();
|
||||
} else if (fs instanceof WebHdfsFileSystem) {
|
||||
WebHdfsFileSystem webHdfsFileSystem = (WebHdfsFileSystem) fs;
|
||||
sds = webHdfsFileSystem.getServerDefaults();
|
||||
} else {
|
||||
Assert.fail(
|
||||
fs.getClass().getSimpleName() + " doesn't support getServerDefaults");
|
||||
}
|
||||
// Verify result with DFS
|
||||
FsServerDefaults dfssds = dfs.getServerDefaults();
|
||||
Assert.assertEquals(JsonUtil.toJsonString(sds),
|
||||
JsonUtil.toJsonString(dfssds));
|
||||
}
|
||||
|
||||
private void testGetServerDefaults() throws Exception {
|
||||
if (!this.isLocalFS()) {
|
||||
FileSystem fs = this.getHttpFSFileSystem();
|
||||
Path path1 = new Path("/");
|
||||
DistributedFileSystem dfs = (DistributedFileSystem) FileSystem
|
||||
.get(path1.toUri(), this.getProxiedFSConf());
|
||||
verifyGetServerDefaults(fs, dfs);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -54,6 +54,7 @@
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.FsServerDefaults;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.XAttrCodec;
|
||||
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.DataParam;
|
||||
@ -1636,4 +1637,34 @@ public void testNoRedirect() throws Exception {
|
||||
Assert.assertEquals(28L, checksum.get("length"));
|
||||
Assert.assertEquals("MD5-of-0MD5-of-512CRC32C", checksum.get("algorithm"));
|
||||
}
|
||||
|
||||
private void verifyGetServerDefaults(DistributedFileSystem dfs)
|
||||
throws Exception {
|
||||
// Send a request
|
||||
HttpURLConnection conn =
|
||||
sendRequestToHttpFSServer("/", "GETSERVERDEFAULTS", "");
|
||||
// Should return HTTP_OK
|
||||
Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
|
||||
// Verify the response
|
||||
BufferedReader reader =
|
||||
new BufferedReader(new InputStreamReader(conn.getInputStream()));
|
||||
// The response should be a one-line JSON string.
|
||||
String dirLst = reader.readLine();
|
||||
FsServerDefaults dfsDirLst = dfs.getServerDefaults();
|
||||
Assert.assertNotNull(dfsDirLst);
|
||||
Assert.assertEquals(dirLst, JsonUtil.toJsonString(dfsDirLst));
|
||||
}
|
||||
|
||||
@Test
|
||||
@TestDir
|
||||
@TestJetty
|
||||
@TestHdfs
|
||||
public void testGetServerDefaults() throws Exception {
|
||||
createHttpFSServer(false, false);
|
||||
String pathStr1 = "/";
|
||||
Path path1 = new Path(pathStr1);
|
||||
DistributedFileSystem dfs = (DistributedFileSystem) FileSystem
|
||||
.get(path1.toUri(), TestHdfsHelper.getHdfsConf());
|
||||
verifyGetServerDefaults(dfs);
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user