HDFS-6404. HttpFS should use a 000 umask for mkdir and create operations. (yoderme via tucu)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1598668 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
66598697a6
commit
e4c06854b4
@ -23,6 +23,7 @@
|
|||||||
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.lib.server.BaseService;
|
import org.apache.hadoop.lib.server.BaseService;
|
||||||
import org.apache.hadoop.lib.server.ServiceException;
|
import org.apache.hadoop.lib.server.ServiceException;
|
||||||
import org.apache.hadoop.lib.service.FileSystemAccess;
|
import org.apache.hadoop.lib.service.FileSystemAccess;
|
||||||
@ -395,6 +396,10 @@ public Configuration getFileSystemConfiguration() {
|
|||||||
Configuration conf = new Configuration(true);
|
Configuration conf = new Configuration(true);
|
||||||
ConfigurationUtils.copy(serviceHadoopConf, conf);
|
ConfigurationUtils.copy(serviceHadoopConf, conf);
|
||||||
conf.setBoolean(FILE_SYSTEM_SERVICE_CREATED, true);
|
conf.setBoolean(FILE_SYSTEM_SERVICE_CREATED, true);
|
||||||
|
|
||||||
|
// Force-clear server-side umask to make HttpFS match WebHDFS behavior
|
||||||
|
conf.set(FsPermission.UMASK_LABEL, "000");
|
||||||
|
|
||||||
return conf;
|
return conf;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -231,6 +231,105 @@ public void testGlobFilter() throws Exception {
|
|||||||
reader.close();
|
reader.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Talks to the http interface to create a file.
|
||||||
|
*
|
||||||
|
* @param filename The file to create
|
||||||
|
* @param perms The permission field, if any (may be null)
|
||||||
|
* @throws Exception
|
||||||
|
*/
|
||||||
|
private void createWithHttp ( String filename, String perms )
|
||||||
|
throws Exception {
|
||||||
|
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
|
||||||
|
String pathOps;
|
||||||
|
if ( perms == null ) {
|
||||||
|
pathOps = MessageFormat.format(
|
||||||
|
"/webhdfs/v1/{0}?user.name={1}&op=CREATE",
|
||||||
|
filename, user);
|
||||||
|
} else {
|
||||||
|
pathOps = MessageFormat.format(
|
||||||
|
"/webhdfs/v1/{0}?user.name={1}&permission={2}&op=CREATE",
|
||||||
|
filename, user, perms);
|
||||||
|
}
|
||||||
|
URL url = new URL(TestJettyHelper.getJettyURL(), pathOps);
|
||||||
|
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
||||||
|
conn.addRequestProperty("Content-Type", "application/octet-stream");
|
||||||
|
conn.setRequestMethod("PUT");
|
||||||
|
conn.connect();
|
||||||
|
Assert.assertEquals(HttpURLConnection.HTTP_CREATED, conn.getResponseCode());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Talks to the http interface to get the json output of the GETFILESTATUS
|
||||||
|
* command on the given file.
|
||||||
|
*
|
||||||
|
* @param filename The file to query.
|
||||||
|
* @return A string containing the JSON output describing the file.
|
||||||
|
* @throws Exception
|
||||||
|
*/
|
||||||
|
private String getFileStatus ( String filename ) throws Exception {
|
||||||
|
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
|
||||||
|
String pathOps = MessageFormat.format(
|
||||||
|
"/webhdfs/v1/{0}?user.name={1}&op=GETFILESTATUS",
|
||||||
|
filename, user);
|
||||||
|
URL url = new URL(TestJettyHelper.getJettyURL(), pathOps);
|
||||||
|
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
||||||
|
conn.connect();
|
||||||
|
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
|
||||||
|
|
||||||
|
BufferedReader reader =
|
||||||
|
new BufferedReader(new InputStreamReader(conn.getInputStream()));
|
||||||
|
|
||||||
|
return reader.readLine();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given the JSON output from the GETFILESTATUS call, return the
|
||||||
|
* 'permission' value.
|
||||||
|
*
|
||||||
|
* @param statusJson JSON from GETFILESTATUS
|
||||||
|
* @return The value of 'permission' in statusJson
|
||||||
|
* @throws Exception
|
||||||
|
*/
|
||||||
|
private String getPerms ( String statusJson ) throws Exception {
|
||||||
|
JSONParser parser = new JSONParser();
|
||||||
|
JSONObject jsonObject = (JSONObject) parser.parse(statusJson);
|
||||||
|
JSONObject details = (JSONObject) jsonObject.get("FileStatus");
|
||||||
|
return (String) details.get("permission");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate that files are created with 755 permissions when no
|
||||||
|
* 'permissions' attribute is specified, and when 'permissions'
|
||||||
|
* is specified, that value is honored.
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
@TestDir
|
||||||
|
@TestJetty
|
||||||
|
@TestHdfs
|
||||||
|
public void testPerms() throws Exception {
|
||||||
|
createHttpFSServer(false);
|
||||||
|
|
||||||
|
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
|
||||||
|
fs.mkdirs(new Path("/perm"));
|
||||||
|
|
||||||
|
createWithHttp("/perm/none", null);
|
||||||
|
String statusJson = getFileStatus("/perm/none");
|
||||||
|
Assert.assertTrue("755".equals(getPerms(statusJson)));
|
||||||
|
|
||||||
|
createWithHttp("/perm/p-777", "777");
|
||||||
|
statusJson = getFileStatus("/perm/p-777");
|
||||||
|
Assert.assertTrue("777".equals(getPerms(statusJson)));
|
||||||
|
|
||||||
|
createWithHttp("/perm/p-654", "654");
|
||||||
|
statusJson = getFileStatus("/perm/p-654");
|
||||||
|
Assert.assertTrue("654".equals(getPerms(statusJson)));
|
||||||
|
|
||||||
|
createWithHttp("/perm/p-321", "321");
|
||||||
|
statusJson = getFileStatus("/perm/p-321");
|
||||||
|
Assert.assertTrue("321".equals(getPerms(statusJson)));
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@TestDir
|
@TestDir
|
||||||
@TestJetty
|
@TestJetty
|
||||||
|
@ -620,6 +620,9 @@ Release 2.5.0 - UNRELEASED
|
|||||||
|
|
||||||
HDFS-6462. NFS: fsstat request fails with the secure hdfs (brandonli)
|
HDFS-6462. NFS: fsstat request fails with the secure hdfs (brandonli)
|
||||||
|
|
||||||
|
HDFS-6404. HttpFS should use a 000 umask for mkdir and create
|
||||||
|
operations. (yoderme via tucu)
|
||||||
|
|
||||||
Release 2.4.1 - UNRELEASED
|
Release 2.4.1 - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
Loading…
Reference in New Issue
Block a user