YARN-6916. Moving logging APIs over to slf4j in hadoop-yarn-server-common. Contributed by Bibin A Chundatt and Akira Ajisaka.

This commit is contained in:
Akira Ajisaka 2017-10-04 06:06:36 +09:00
parent 107c177782
commit 4a87773718
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
10 changed files with 60 additions and 75 deletions

View File

@ -21,17 +21,18 @@
import java.io.IOException;
import java.net.InetSocketAddress;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.client.RMProxy;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import com.google.common.base.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ServerRMProxy<T> extends RMProxy<T> {
private static final Log LOG = LogFactory.getLog(ServerRMProxy.class);
private static final Logger LOG =
LoggerFactory.getLogger(ServerRMProxy.class);
private ServerRMProxy() {
super();

View File

@ -18,8 +18,6 @@
package org.apache.hadoop.yarn.server.scheduler;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
@ -42,6 +40,8 @@
import org.apache.hadoop.yarn.util.resource.DominantResourceCalculator;
import org.apache.hadoop.yarn.util.resource.ResourceCalculator;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetSocketAddress;
import java.util.ArrayList;
@ -180,8 +180,8 @@ public List<ResourceRequest> getOpportunistic() {
}
}
private static final Log LOG =
LogFactory.getLog(OpportunisticContainerAllocator.class);
private static final Logger LOG =
LoggerFactory.getLogger(OpportunisticContainerAllocator.class);
private static final ResourceCalculator RESOURCE_CALCULATOR =
new DominantResourceCalculator();
@ -255,12 +255,11 @@ private Map<Resource, List<Container>> allocate(long rmIdentifier,
appContext.getContainerIdGenerator(), appContext.getBlacklist(),
appAttId, appContext.getNodeMap(), userName, containers, anyAsk);
if (!containers.isEmpty()) {
LOG.info("Opportunistic allocation requested for ["
+ "priority=" + anyAsk.getPriority()
+ ", allocationRequestId=" + anyAsk.getAllocationRequestId()
+ ", num_containers=" + anyAsk.getNumContainers()
+ ", capability=" + anyAsk.getCapability() + "]"
+ " allocated = " + containers.keySet());
LOG.info("Opportunistic allocation requested for [priority={}, "
+ "allocationRequestId={}, num_containers={}, capability={}] "
+ "allocated = {}", anyAsk.getPriority(),
anyAsk.getAllocationRequestId(), anyAsk.getNumContainers(),
anyAsk.getCapability(), containers.keySet());
}
}
return containers;
@ -286,8 +285,7 @@ private void allocateContainersInternal(long rmIdentifier,
}
if (nodesForScheduling.isEmpty()) {
LOG.warn("No nodes available for allocating opportunistic containers. [" +
"allNodes=" + allNodes + ", " +
"blacklist=" + blacklist + "]");
"allNodes={}, blacklist={}]", allNodes, blacklist);
return;
}
int numAllocated = 0;
@ -305,9 +303,9 @@ private void allocateContainersInternal(long rmIdentifier,
}
cList.add(container);
numAllocated++;
LOG.info("Allocated [" + container.getId() + "] as opportunistic.");
LOG.info("Allocated [{}] as opportunistic.", container.getId());
}
LOG.info("Allocated " + numAllocated + " opportunistic containers.");
LOG.info("Allocated {} opportunistic containers.", numAllocated);
}
private Container buildContainer(long rmIdentifier,

View File

@ -23,14 +23,14 @@
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.server.api.records.MasterKey;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* SecretManager for ContainerTokens. Extended by both RM and NM and hence is
@ -40,8 +40,8 @@
public class BaseContainerTokenSecretManager extends
SecretManager<ContainerTokenIdentifier> {
private static Log LOG = LogFactory
.getLog(BaseContainerTokenSecretManager.class);
private static final Logger LOG =
LoggerFactory.getLogger(BaseContainerTokenSecretManager.class);
protected int serialNo = new SecureRandom().nextInt();
@ -86,11 +86,9 @@ public MasterKey getCurrentKey() {
@Override
public byte[] createPassword(ContainerTokenIdentifier identifier) {
if (LOG.isDebugEnabled()) {
LOG.debug("Creating password for " + identifier.getContainerID()
+ " for user " + identifier.getUser() + " to be run on NM "
+ identifier.getNmHostAddress());
}
LOG.debug("Creating password for {} for user {} to be run on NM {}",
identifier.getContainerID(), identifier.getUser(),
identifier.getNmHostAddress());
this.readLock.lock();
try {
return createPassword(identifier.getBytes(),
@ -114,11 +112,9 @@ public byte[] retrievePassword(ContainerTokenIdentifier identifier)
protected byte[] retrievePasswordInternal(ContainerTokenIdentifier identifier,
MasterKeyData masterKey)
throws org.apache.hadoop.security.token.SecretManager.InvalidToken {
if (LOG.isDebugEnabled()) {
LOG.debug("Retrieving password for " + identifier.getContainerID()
+ " for user " + identifier.getUser() + " to be run on NM "
+ identifier.getNmHostAddress());
}
LOG.debug("Retrieving password for {} for user {} to be run on NM {}",
identifier.getContainerID(), identifier.getUser(),
identifier.getNmHostAddress());
return createPassword(identifier.getBytes(), masterKey.getSecretKey());
}

View File

@ -24,8 +24,6 @@
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.SecurityUtil;
@ -35,12 +33,14 @@
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.security.NMTokenIdentifier;
import org.apache.hadoop.yarn.server.api.records.MasterKey;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class BaseNMTokenSecretManager extends
SecretManager<NMTokenIdentifier> {
private static Log LOG = LogFactory
.getLog(BaseNMTokenSecretManager.class);
private static final Logger LOG =
LoggerFactory.getLogger(BaseNMTokenSecretManager.class);
protected int serialNo = new SecureRandom().nextInt();
@ -71,12 +71,9 @@ public MasterKey getCurrentKey() {
@Override
protected byte[] createPassword(NMTokenIdentifier identifier) {
if (LOG.isDebugEnabled()) {
LOG.debug("creating password for "
+ identifier.getApplicationAttemptId() + " for user "
+ identifier.getApplicationSubmitter() + " to run on NM "
+ identifier.getNodeId());
}
LOG.debug("creating password for {} for user {} to run on NM {}",
identifier.getApplicationAttemptId(),
identifier.getApplicationSubmitter(), identifier.getNodeId());
readLock.lock();
try {
return createPassword(identifier.getBytes(),
@ -99,12 +96,9 @@ public byte[] retrievePassword(NMTokenIdentifier identifier)
protected byte[] retrivePasswordInternal(NMTokenIdentifier identifier,
MasterKeyData masterKey) {
if (LOG.isDebugEnabled()) {
LOG.debug("creating password for "
+ identifier.getApplicationAttemptId() + " for user "
+ identifier.getApplicationSubmitter() + " to run on NM "
+ identifier.getNodeId());
}
LOG.debug("retriving password for {} for user {} to run on NM {}",
identifier.getApplicationAttemptId(),
identifier.getApplicationSubmitter(), identifier.getNodeId());
return createPassword(identifier.getBytes(), masterKey.getSecretKey());
}
/**

View File

@ -18,13 +18,13 @@
package org.apache.hadoop.yarn.server.sharedcache;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A utility class that contains helper methods for dealing with the internal
@ -34,7 +34,8 @@
@Unstable
public class SharedCacheUtil {
private static final Log LOG = LogFactory.getLog(SharedCacheUtil.class);
private static final Logger LOG =
LoggerFactory.getLogger(SharedCacheUtil.class);
@Private
public static int getCacheDepth(Configuration conf) {
@ -44,9 +45,8 @@ public static int getCacheDepth(Configuration conf) {
if (cacheDepth <= 0) {
LOG.warn("Specified cache depth was less than or equal to zero."
+ " Using default value instead. Default: "
+ YarnConfiguration.DEFAULT_SHARED_CACHE_NESTED_LEVEL
+ ", Specified: " + cacheDepth);
+ " Using default value instead. Default: {}, Specified: {}",
YarnConfiguration.DEFAULT_SHARED_CACHE_NESTED_LEVEL, cacheDepth);
cacheDepth = YarnConfiguration.DEFAULT_SHARED_CACHE_NESTED_LEVEL;
}

View File

@ -26,8 +26,6 @@
import java.util.List;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
@ -46,10 +44,13 @@
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import org.apache.hadoop.yarn.webapp.view.InfoBlock;
import com.google.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AppAttemptBlock extends HtmlBlock {
private static final Log LOG = LogFactory.getLog(AppAttemptBlock.class);
private static final Logger LOG =
LoggerFactory.getLogger(AppAttemptBlock.class);
protected ApplicationBaseProtocol appBaseProt;
protected ApplicationAttemptId appAttemptId = null;

View File

@ -29,8 +29,6 @@
import java.util.Map;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.security.UserGroupInformation;
@ -67,10 +65,12 @@
import org.apache.hadoop.yarn.webapp.view.InfoBlock;
import com.google.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AppBlock extends HtmlBlock {
private static final Log LOG = LogFactory.getLog(AppBlock.class);
private static final Logger LOG = LoggerFactory.getLogger(AppBlock.class);
protected ApplicationBaseProtocol appBaseProt;
protected Configuration conf;
protected ApplicationId appID = null;

View File

@ -34,8 +34,6 @@
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.math.LongRange;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
@ -51,10 +49,12 @@
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import com.google.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AppsBlock extends HtmlBlock {
private static final Log LOG = LogFactory.getLog(AppsBlock.class);
private static final Logger LOG = LoggerFactory.getLogger(AppsBlock.class);
protected ApplicationBaseProtocol appBaseProt;
protected EnumSet<YarnApplicationState> reqAppStates;
protected UserGroupInformation callerUGI;

View File

@ -23,8 +23,6 @@
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
@ -38,10 +36,13 @@
import org.apache.hadoop.yarn.webapp.view.InfoBlock;
import com.google.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ContainerBlock extends HtmlBlock {
private static final Log LOG = LogFactory.getLog(ContainerBlock.class);
private static final Logger LOG =
LoggerFactory.getLogger(ContainerBlock.class);
protected ApplicationBaseProtocol appBaseProt;
@Inject

View File

@ -19,11 +19,9 @@
package org.apache.hadoop.yarn.server.webapp;
import com.google.inject.Inject;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.GenericsUtil;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.yarn.security.AdminACLsManager;
import org.apache.hadoop.yarn.util.Log4jWarningErrorMetricsAppender;
@ -60,8 +58,6 @@ public class ErrorsAndWarningsBlock extends HtmlBlock {
@Override
protected void render(Block html) {
Log log = LogFactory.getLog(ErrorsAndWarningsBlock.class);
boolean isAdmin = false;
UserGroupInformation callerUGI = this.getCallerUGI();
@ -78,7 +74,7 @@ protected void render(Block html) {
return;
}
if (log instanceof Log4JLogger) {
if (GenericsUtil.isLog4jLogger(ErrorsAndWarningsBlock.class)) {
html.__(ErrorMetrics.class);
html.__(WarningMetrics.class);
html.div().button().$onclick("reloadPage()").b("View data for the last ")
@ -180,8 +176,7 @@ public static class MetricsBase extends HtmlBlock {
cutoffs.add((now - 43200 * 1000) / 1000);
cutoffs.add((now - 84600 * 1000) / 1000);
Log log = LogFactory.getLog(ErrorsAndWarningsBlock.class);
if (log instanceof Log4JLogger) {
if (GenericsUtil.isLog4jLogger(ErrorsAndWarningsBlock.class)) {
appender =
Log4jWarningErrorMetricsAppender.findAppender();
}
@ -193,8 +188,7 @@ List<Long> getCutoffs() {
@Override
protected void render(Block html) {
Log log = LogFactory.getLog(ErrorsAndWarningsBlock.class);
if (log instanceof Log4JLogger) {
if (GenericsUtil.isLog4jLogger(ErrorsAndWarningsBlock.class)) {
Hamlet.DIV<Hamlet> div =
html.div().$class("metrics").$style("padding-bottom: 20px");
div.h3(tableHeading).table("#metricsoverview").thead()