YARN-7141. Move logging APIs to slf4j in timelineservice after ATSv2 merge. Contributed by Varun Saxena

This commit is contained in:
bibinchundatt 2017-09-01 11:29:16 +05:30
parent 1fbb662c70
commit dcd0bedcc8
6 changed files with 23 additions and 34 deletions

View File

@ -18,8 +18,6 @@
package org.apache.hadoop.yarn.server.resourcemanager;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.api.records.AppCollectorData;
@ -40,8 +38,6 @@
* transition.
*/
public class TestRMHATimelineCollectors extends RMHATestBase {
public static final Log LOG = LogFactory
.getLog(TestSubmitApplicationWithRMHA.class);
@Before
@Override

View File

@ -18,8 +18,6 @@
package org.apache.hadoop.yarn.server.timelineservice.storage.reader;
import com.google.common.base.Preconditions;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Result;
@ -36,6 +34,8 @@
import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKey;
import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKeyPrefix;
import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Arrays;
@ -49,7 +49,8 @@
*/
public final class EntityTypeReader extends AbstractTimelineStorageReader {
private static final Log LOG = LogFactory.getLog(EntityTypeReader.class);
private static final Logger LOG =
LoggerFactory.getLogger(EntityTypeReader.class);
private static final EntityTable ENTITY_TABLE = new EntityTable();
public EntityTypeReader(TimelineReaderContext context) {
@ -85,9 +86,7 @@ public Set<String> readEntityTypes(Configuration hbaseConf,
typeFilterList.addFilter(new FirstKeyOnlyFilter());
typeFilterList.addFilter(new KeyOnlyFilter());
typeFilterList.addFilter(new PageFilter(1));
if (LOG.isDebugEnabled()) {
LOG.debug("FilterList created for scan is - " + typeFilterList);
}
LOG.debug("FilterList created for scan is - {}", typeFilterList);
int counter = 0;
while (true) {
@ -110,10 +109,7 @@ public Set<String> readEntityTypes(Configuration hbaseConf,
currRowKey = getNextRowKey(prefix.getRowKeyPrefix(), currType);
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Scanned " + counter + "records for "
+ types.size() + "types");
}
LOG.debug("Scanned {} records for {} types", counter, types.size());
return types;
}

View File

@ -19,8 +19,6 @@
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
@ -30,6 +28,8 @@
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineHBaseSchemaConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The sub application table has column families:
@ -103,7 +103,7 @@ public class SubApplicationTable extends BaseTable<SubApplicationTable> {
/** default max number of versions. */
private static final int DEFAULT_METRICS_MAX_VERSIONS = 10000;
private static final Log LOG = LogFactory.getLog(
private static final Logger LOG = LoggerFactory.getLogger(
SubApplicationTable.class);
public SubApplicationTable() {

View File

@ -19,14 +19,14 @@
package org.apache.hadoop.yarn.server.timelineservice.collector;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import java.util.HashSet;
@ -46,7 +46,8 @@
@Unstable
public class AppLevelTimelineCollectorWithAgg
extends AppLevelTimelineCollector {
private static final Log LOG = LogFactory.getLog(TimelineCollector.class);
private static final Logger LOG =
LoggerFactory.getLogger(TimelineCollector.class);
private final static int AGGREGATION_EXECUTOR_NUM_THREADS = 1;
private final static int AGGREGATION_EXECUTOR_EXEC_INTERVAL_SECS = 15;
@ -111,9 +112,7 @@ protected Set<String> getEntityTypesSkipAggregation() {
private class AppLevelAggregator implements Runnable {
private void aggregate() {
if (LOG.isDebugEnabled()) {
LOG.debug("App-level real-time aggregating");
}
LOG.debug("App-level real-time aggregating");
if (!isReadyToAggregate()) {
LOG.warn("App-level collector is not ready, skip aggregation. ");
return;
@ -136,9 +135,7 @@ private void aggregate() {
} catch (Exception e) {
LOG.error("Error aggregating timeline metrics", e);
}
if (LOG.isDebugEnabled()) {
LOG.debug("App-level real-time aggregation complete");
}
LOG.debug("App-level real-time aggregation complete");
}
@Override

View File

@ -31,13 +31,13 @@
import javax.ws.rs.core.Response.Status;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.webapp.ForbiddenException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderWebServicesUtils;
/**
@ -48,8 +48,8 @@ public class TimelineReaderWhitelistAuthorizationFilter implements Filter {
public static final String EMPTY_STRING = "";
private static final Log LOG =
LogFactory.getLog(TimelineReaderWhitelistAuthorizationFilter.class);
private static final Logger LOG =
LoggerFactory.getLogger(TimelineReaderWhitelistAuthorizationFilter.class);
private boolean isWhitelistReadAuthEnabled = false;

View File

@ -20,8 +20,6 @@
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.io.Text;
@ -30,6 +28,8 @@
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier;
import org.apache.hadoop.yarn.server.timeline.security.TimelineDelgationTokenSecretManagerService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The service wrapper of {@link TimelineV2DelegationTokenSecretManager}.
@ -75,8 +75,8 @@ public void cancelToken(Token<TimelineDelegationTokenIdentifier> token,
public static class TimelineV2DelegationTokenSecretManager extends
AbstractDelegationTokenSecretManager<TimelineDelegationTokenIdentifier> {
private static final Log LOG =
LogFactory.getLog(TimelineV2DelegationTokenSecretManager.class);
private static final Logger LOG =
LoggerFactory.getLogger(TimelineV2DelegationTokenSecretManager.class);
/**
* Create a timeline v2 secret manager.