HDFS-15902. Improve the log for HTTPFS server operation. Contributed by Bhavik Patel.

(cherry picked from commit b4d97a8dc7)
This commit is contained in:
Takanobu Asanuma 2021-03-24 17:51:35 +09:00
parent 2d83369f6f
commit 9e1e89b1b9
3 changed files with 12 additions and 15 deletions

View File

@ -295,10 +295,8 @@ public HttpURLConnection openConnection(URL url, Token token, String doAs)
// delegation token
Credentials creds = UserGroupInformation.getCurrentUser().
getCredentials();
if (LOG.isDebugEnabled()) {
LOG.debug("Token not set, looking for delegation token. Creds:{},"
+ " size:{}", creds.getAllTokens(), creds.numberOfTokens());
}
if (!creds.getAllTokens().isEmpty()) {
dToken = selectDelegationToken(url, creds);
if (dToken != null) {

View File

@ -284,7 +284,7 @@ public InputStream run() throws Exception {
}
});
} catch (InterruptedException ie) {
LOG.info("Open interrupted.", ie);
LOG.warn("Open interrupted.", ie);
Thread.currentThread().interrupt();
}
Long offset = params.get(OffsetParam.NAME, OffsetParam.class);
@ -317,7 +317,7 @@ public InputStream run() throws Exception {
enforceRootPath(op.value(), path);
FSOperations.FSHomeDir command = new FSOperations.FSHomeDir();
JSONObject json = fsExecute(user, command);
AUDIT_LOG.info("");
AUDIT_LOG.info("Home Directory for [{}]", user);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
@ -339,7 +339,7 @@ public InputStream run() throws Exception {
FSOperations.FSContentSummary command =
new FSOperations.FSContentSummary(path);
Map json = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
AUDIT_LOG.info("Content summary for [{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
@ -347,7 +347,7 @@ public InputStream run() throws Exception {
FSOperations.FSQuotaUsage command =
new FSOperations.FSQuotaUsage(path);
Map json = fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
AUDIT_LOG.info("Quota Usage for [{}]", path);
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
break;
}
@ -650,14 +650,11 @@ public Response post(InputStream is,
break;
}
case CONCAT: {
System.out.println("HTTPFS SERVER CONCAT");
String sources = params.get(SourcesParam.NAME, SourcesParam.class);
FSOperations.FSConcat command =
new FSOperations.FSConcat(path, sources.split(","));
fsExecute(user, command);
AUDIT_LOG.info("[{}]", path);
System.out.println("SENT RESPONSE");
response = Response.ok().build();
break;
}

View File

@ -194,10 +194,12 @@ protected void init() throws ServiceException {
throw new ServiceException(FileSystemAccessException.ERROR.H11, ex.toString(), ex);
}
if (LOG.isDebugEnabled()) {
LOG.debug("FileSystemAccess FileSystem configuration:");
for (Map.Entry entry : serviceHadoopConf) {
LOG.debug(" {} = {}", entry.getKey(), entry.getValue());
}
}
setRequiredServiceHadoopConf(serviceHadoopConf);
nameNodeWhitelist = toLowerCase(getServiceConfig().getTrimmedStringCollection(NAME_NODE_WHITELIST));
@ -262,7 +264,7 @@ public void run() {
LOG.warn("Error while purging filesystem, " + ex.toString(), ex);
}
}
LOG.debug("Purged [{}} filesystem instances", count);
LOG.debug("Purged [{}] filesystem instances", count);
}
}