HDFS-12804. Use slf4j instead of log4j in FSEditLog. Contributed by Mukul Kumar Singh.

This commit is contained in:
Chen Liang 2017-11-20 12:49:53 -08:00
parent 0d781dd03b
commit 60fc2a1388
5 changed files with 24 additions and 25 deletions

View File

@ -29,8 +29,6 @@
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
@ -114,6 +112,8 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* FSEditLog maintains a log of the namespace modifications.
@ -122,9 +122,7 @@
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class FSEditLog implements LogsPurgeable {
public static final Log LOG = LogFactory.getLog(FSEditLog.class);
public static final Logger LOG = LoggerFactory.getLogger(FSEditLog.class);
/**
* State machine for edit log.
*
@ -329,7 +327,8 @@ synchronized void openForWrite(int layoutVersion) throws IOException {
String error = String.format("Cannot start writing at txid %s " +
"when there is a stream available for read: %s",
segmentTxId, streams.get(0));
IOUtils.cleanup(LOG, streams.toArray(new EditLogInputStream[0]));
IOUtils.cleanupWithLogger(LOG,
streams.toArray(new EditLogInputStream[0]));
throw new IllegalStateException(error);
}
@ -689,9 +688,9 @@ protected void logSync(long mytxid) {
"Could not sync enough journals to persistent storage " +
"due to " + e.getMessage() + ". " +
"Unsynced transactions: " + (txid - synctxid);
LOG.fatal(msg, new Exception());
LOG.error(msg, new Exception());
synchronized(journalSetLock) {
IOUtils.cleanup(LOG, journalSet);
IOUtils.cleanupWithLogger(LOG, journalSet);
}
terminate(1, msg);
}
@ -715,9 +714,9 @@ protected void logSync(long mytxid) {
final String msg =
"Could not sync enough journals to persistent storage. "
+ "Unsynced transactions: " + (txid - synctxid);
LOG.fatal(msg, new Exception());
LOG.error(msg, new Exception());
synchronized(journalSetLock) {
IOUtils.cleanup(LOG, journalSet);
IOUtils.cleanupWithLogger(LOG, journalSet);
}
terminate(1, msg);
}
@ -772,7 +771,7 @@ private void printStatistics(boolean force) {
buf.append(editLogStream.getNumSync());
buf.append(" SyncTimes(ms): ");
buf.append(journalSet.getSyncTimes());
LOG.info(buf);
LOG.info(buf.toString());
}
/** Record the RPC IDs if necessary */
@ -1711,7 +1710,7 @@ public Collection<EditLogInputStream> selectInputStreams(long fromTxId,
if (recovery != null) {
// If recovery mode is enabled, continue loading even if we know we
// can't load up to toAtLeastTxId.
LOG.error(e);
LOG.error("Exception while selecting input streams", e);
} else {
closeAllStreams(streams);
throw e;

View File

@ -130,7 +130,7 @@ public static Configuration getConf() {
/**
* A garbage mkdir op which is used for testing
* {@link EditLogFileInputStream#scanEditLog(File)}
* {@link EditLogFileInputStream#scanEditLog(File, long, boolean)}
*/
public static class GarbageMkdirOp extends FSEditLogOp {
public GarbageMkdirOp() {
@ -1141,7 +1141,7 @@ static class AbortSpec {
/**
* Construct the failure specification.
* @param roll number to fail after. e.g. 1 to fail after the first roll
* @param loginfo index of journal to fail.
* @param logindex index of journal to fail.
*/
AbortSpec(int roll, int logindex) {
this.roll = roll;

View File

@ -27,8 +27,6 @@
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_EDIT_LOG_AUTOROLL_CHECK_INTERVAL_MS;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_EDIT_LOG_AUTOROLL_MULTIPLIER_THRESHOLD;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -38,7 +36,9 @@
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem.NameNodeEditLogRoller;
import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.event.Level;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@ -51,7 +51,7 @@
@RunWith(Parameterized.class)
public class TestEditLogAutoroll {
static {
GenericTestUtils.setLogLevel(FSEditLog.LOG, Level.ALL);
GenericTestUtils.setLogLevel(FSEditLog.LOG, Level.DEBUG);
}
@Parameters
@ -74,7 +74,7 @@ public TestEditLogAutoroll(Boolean async) {
private FSEditLog editLog;
private final Random random = new Random();
private static final Log LOG = LogFactory.getLog(TestEditLog.class);
public static final Logger LOG = LoggerFactory.getLogger(FSEditLog.class);
@Before
public void setUp() throws Exception {

View File

@ -52,7 +52,7 @@
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time;
import org.apache.log4j.Level;
import org.slf4j.event.Level;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@ -67,7 +67,7 @@
@RunWith(Parameterized.class)
public class TestEditLogRace {
static {
GenericTestUtils.setLogLevel(FSEditLog.LOG, Level.ALL);
GenericTestUtils.setLogLevel(FSEditLog.LOG, Level.DEBUG);
}
@Parameters

View File

@ -60,7 +60,7 @@
@RunWith(Parameterized.class)
public class TestEditLogTailer {
static {
GenericTestUtils.setLogLevel(FSEditLog.LOG, Level.ALL);
GenericTestUtils.setLogLevel(FSEditLog.LOG, org.slf4j.event.Level.DEBUG);
}
@Parameters
@ -82,9 +82,9 @@ public TestEditLogTailer(Boolean async) {
static final long NN_LAG_TIMEOUT = 10 * 1000;
static {
GenericTestUtils.setLogLevel(FSImage.LOG, Level.ALL);
GenericTestUtils.setLogLevel(FSEditLog.LOG, Level.ALL);
GenericTestUtils.setLogLevel(EditLogTailer.LOG, Level.ALL);
GenericTestUtils.setLogLevel(FSImage.LOG, Level.DEBUG);
GenericTestUtils.setLogLevel(FSEditLog.LOG, org.slf4j.event.Level.DEBUG);
GenericTestUtils.setLogLevel(EditLogTailer.LOG, Level.DEBUG);
}
private static Configuration getConf() {