HDFS-6269. NameNode Audit Log should differentiate between webHDFS open and HDFS open. (Eric Payne via jeagles)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1591117 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jonathan Turner Eagles 2014-04-29 21:51:14 +00:00
parent 693025a3d4
commit 0532b8bad1
4 changed files with 60 additions and 2 deletions

View File

@ -329,6 +329,9 @@ Release 2.5.0 - UNRELEASED
HDFS-6210. Support GETACLSTATUS operation in WebImageViewer. HDFS-6210. Support GETACLSTATUS operation in WebImageViewer.
(Akira Ajisaka via wheat9) (Akira Ajisaka via wheat9)
HDFS-6269. NameNode Audit Log should differentiate between webHDFS open and
HDFS open. (Eric Payne via jeagles)
OPTIMIZATIONS OPTIMIZATIONS
HDFS-6214. Webhdfs has poor throughput for files >2GB (daryn) HDFS-6214. Webhdfs has poor throughput for files >2GB (daryn)

View File

@ -7760,6 +7760,8 @@ public void logAuditEvent(boolean succeeded, String userName,
} }
sb.append(trackingId); sb.append(trackingId);
} }
sb.append("\t").append("proto=");
sb.append(NamenodeWebHdfsMethods.isWebHdfsInvocation() ? "webhdfs" : "rpc");
logAuditMessage(sb.toString()); logAuditMessage(sb.toString());
} }
} }

View File

@ -91,6 +91,9 @@ public TestAuditLogs(boolean useAsyncLog) {
"perm=.*?"); "perm=.*?");
static final Pattern successPattern = Pattern.compile( static final Pattern successPattern = Pattern.compile(
".*allowed=true.*"); ".*allowed=true.*");
static final Pattern webOpenPattern = Pattern.compile(
".*cmd=open.*proto=webhdfs.*");
static final String username = "bob"; static final String username = "bob";
static final String[] groups = { "group1" }; static final String[] groups = { "group1" };
static final String fileName = "/srcdat"; static final String fileName = "/srcdat";
@ -240,6 +243,22 @@ public void testAuditWebHdfsDenied() throws Exception {
verifyAuditLogsRepeat(false, 2); verifyAuditLogsRepeat(false, 2);
} }
/** test that open via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfsOpen() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short)0644));
fs.setOwner(file, "root", null);
setupAuditLogs();
WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME);
webfs.open(file);
verifyAuditLogsCheckPattern(true, 3, webOpenPattern);
}
/** Sets up log4j logger for auditlogs */ /** Sets up log4j logger for auditlogs */
private void setupAuditLogs() throws IOException { private void setupAuditLogs() throws IOException {
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger(); Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
@ -303,4 +322,38 @@ private void verifyAuditLogsRepeat(boolean expectSuccess, int ndupe)
reader.close(); reader.close();
} }
} }
// Ensure audit log has exactly N entries
private void verifyAuditLogsCheckPattern(boolean expectSuccess, int ndupe, Pattern pattern)
throws IOException {
// Turn off the logs
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
logger.setLevel(Level.OFF);
// Close the appenders and force all logs to be flushed
Enumeration<?> appenders = logger.getAllAppenders();
while (appenders.hasMoreElements()) {
Appender appender = (Appender)appenders.nextElement();
appender.close();
}
BufferedReader reader = new BufferedReader(new FileReader(auditLogFile));
String line = null;
boolean ret = true;
boolean patternMatches = false;
try {
for (int i = 0; i < ndupe; i++) {
line = reader.readLine();
assertNotNull(line);
patternMatches |= pattern.matcher(line).matches();
ret &= successPattern.matcher(line).matches();
}
assertNull("Unexpected event in audit log", reader.readLine());
assertTrue("Expected audit event not found in audit log", patternMatches);
assertTrue("Expected success=" + expectSuccess, ret == expectSuccess);
} finally {
reader.close();
}
}
} }

View File

@ -99,13 +99,13 @@ public class TestFsck {
"ugi=.*?\\s" + "ugi=.*?\\s" +
"ip=/\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\s" + "ip=/\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\s" +
"cmd=fsck\\ssrc=\\/\\sdst=null\\s" + "cmd=fsck\\ssrc=\\/\\sdst=null\\s" +
"perm=null"); "perm=null\\s" + "proto=.*");
static final Pattern getfileinfoPattern = Pattern.compile( static final Pattern getfileinfoPattern = Pattern.compile(
"allowed=.*?\\s" + "allowed=.*?\\s" +
"ugi=.*?\\s" + "ugi=.*?\\s" +
"ip=/\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\s" + "ip=/\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\s" +
"cmd=getfileinfo\\ssrc=\\/\\sdst=null\\s" + "cmd=getfileinfo\\ssrc=\\/\\sdst=null\\s" +
"perm=null"); "perm=null\\s" + "proto=.*");
static final Pattern numCorruptBlocksPattern = Pattern.compile( static final Pattern numCorruptBlocksPattern = Pattern.compile(
".*Corrupt blocks:\t\t([0123456789]*).*"); ".*Corrupt blocks:\t\t([0123456789]*).*");