log4j
log4j
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
index f0a9b0b695..7d792f8dc7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
@@ -32,7 +32,6 @@
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
@@ -246,30 +245,6 @@ public static void skipFully(InputStream in, long len) throws IOException {
}
}
- /**
- * Close the Closeable objects and ignore any {@link Throwable} or
- * null pointers. Must only be used for cleanup in exception handlers.
- *
- * @param log the log to record problems to at debug level. Can be null.
- * @param closeables the objects to close
- * @deprecated use {@link #cleanupWithLogger(Logger, java.io.Closeable...)}
- * instead
- */
- @Deprecated
- public static void cleanup(Log log, java.io.Closeable... closeables) {
- for (java.io.Closeable c : closeables) {
- if (c != null) {
- try {
- c.close();
- } catch(Throwable e) {
- if (log != null && log.isDebugEnabled()) {
- log.debug("Exception in closing " + c, e);
- }
- }
- }
- }
- }
-
/**
* Close the Closeable objects and ignore any {@link Throwable} or
* null pointers. Must only be used for cleanup in exception handlers.
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java
index e2ad16fce2..6785e2f672 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java
@@ -34,10 +34,6 @@
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.impl.Jdk14Logger;
-import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -51,6 +47,8 @@
import org.apache.hadoop.util.ServletUtil;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
/**
* Change log level in runtime.
@@ -340,22 +338,14 @@ public void doGet(HttpServletRequest request, HttpServletResponse response
out.println(MARKER
+ "Submitted Class Name: " + logName + "
");
- Log log = LogFactory.getLog(logName);
+ Logger log = Logger.getLogger(logName);
out.println(MARKER
+ "Log Class: " + log.getClass().getName() +"
");
if (level != null) {
out.println(MARKER + "Submitted Level: " + level + "
");
}
- if (log instanceof Log4JLogger) {
- process(((Log4JLogger)log).getLogger(), level, out);
- }
- else if (log instanceof Jdk14Logger) {
- process(((Jdk14Logger)log).getLogger(), level, out);
- }
- else {
- out.println("Sorry, " + log.getClass() + " not supported.
");
- }
+ process(log, level, out);
}
out.println(FORMS);
@@ -371,14 +361,14 @@ else if (log instanceof Jdk14Logger) {
+ ""
+ "";
- private static void process(org.apache.log4j.Logger log, String level,
+ private static void process(Logger log, String level,
PrintWriter out) throws IOException {
if (level != null) {
- if (!level.equalsIgnoreCase(org.apache.log4j.Level.toLevel(level)
+ if (!level.equalsIgnoreCase(Level.toLevel(level)
.toString())) {
out.println(MARKER + "Bad Level : " + level + "
");
} else {
- log.setLevel(org.apache.log4j.Level.toLevel(level));
+ log.setLevel(Level.toLevel(level));
out.println(MARKER + "Setting Level to " + level + " ...
");
}
}
@@ -386,21 +376,5 @@ private static void process(org.apache.log4j.Logger log, String level,
+ "Effective Level: " + log.getEffectiveLevel() + "
");
}
- private static void process(java.util.logging.Logger log, String level,
- PrintWriter out) throws IOException {
- if (level != null) {
- String levelToUpperCase = level.toUpperCase();
- try {
- log.setLevel(java.util.logging.Level.parse(levelToUpperCase));
- } catch (IllegalArgumentException e) {
- out.println(MARKER + "Bad Level : " + level + "
");
- }
- out.println(MARKER + "Setting Level to " + level + " ...
");
- }
-
- java.util.logging.Level lev;
- for(; (lev = log.getLevel()) == null; log = log.getParent());
- out.println(MARKER + "Effective Level: " + lev + "
");
- }
}
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java
index 726a83da25..57f91886f4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java
@@ -21,7 +21,6 @@
import java.util.ArrayList;
import java.util.List;
-import org.apache.commons.logging.Log;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.slf4j.Logger;
@@ -75,9 +74,10 @@ public static Exception stopQuietly(Service service) {
* @param log the log to warn at
* @param service a service; may be null
* @return any exception that was caught; null if none was.
- * @see ServiceOperations#stopQuietly(Service)
+ * @deprecated to be removed with 3.4.0. Use {@link #stopQuietly(Logger, Service)} instead.
*/
- public static Exception stopQuietly(Log log, Service service) {
+ @Deprecated
+ public static Exception stopQuietly(org.apache.commons.logging.Log log, Service service) {
try {
stop(service);
} catch (Exception e) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LogAdapter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LogAdapter.java
deleted file mode 100644
index b2bcbf57ef..0000000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/LogAdapter.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.util;
-
-import org.apache.commons.logging.Log;
-import org.slf4j.Logger;
-
-class LogAdapter {
- private Log LOG;
- private Logger LOGGER;
-
- private LogAdapter(Log LOG) {
- this.LOG = LOG;
- }
-
- private LogAdapter(Logger LOGGER) {
- this.LOGGER = LOGGER;
- }
-
- /**
- * @deprecated use {@link #create(Logger)} instead
- */
- @Deprecated
- public static LogAdapter create(Log LOG) {
- return new LogAdapter(LOG);
- }
-
- public static LogAdapter create(Logger LOGGER) {
- return new LogAdapter(LOGGER);
- }
-
- public void info(String msg) {
- if (LOG != null) {
- LOG.info(msg);
- } else if (LOGGER != null) {
- LOGGER.info(msg);
- }
- }
-
- public void warn(String msg, Throwable t) {
- if (LOG != null) {
- LOG.warn(msg, t);
- } else if (LOGGER != null) {
- LOGGER.warn(msg, t);
- }
- }
-
- public void debug(Throwable t) {
- if (LOG != null) {
- LOG.debug(t);
- } else if (LOGGER != null) {
- LOGGER.debug("", t);
- }
- }
-
- public void error(String msg) {
- if (LOG != null) {
- LOG.error(msg);
- } else if (LOGGER != null) {
- LOGGER.error(msg);
- }
- }
-}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java
index 155c4f9c5f..26bcd4a41c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java
@@ -36,7 +36,6 @@
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
-import org.apache.commons.logging.Log;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
@@ -222,16 +221,18 @@ public synchronized static void printThreadInfo(PrintStream stream,
}
private static long previousLogTime = 0;
-
+
/**
* Log the current thread stacks at INFO level.
* @param log the logger that logs the stack trace
* @param title a descriptive title for the call stacks
- * @param minInterval the minimum time from the last
+ * @param minInterval the minimum time from the last
+ * @deprecated to be removed with 3.4.0. Use {@link #logThreadInfo(Logger, String, long)} instead.
*/
- public static void logThreadInfo(Log log,
- String title,
- long minInterval) {
+ @Deprecated
+ public static void logThreadInfo(org.apache.commons.logging.Log log,
+ String title,
+ long minInterval) {
boolean dumpStack = false;
if (log.isInfoEnabled()) {
synchronized (ReflectionUtils.class) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalLogger.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalLogger.java
index 605352443e..9f112906b2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalLogger.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalLogger.java
@@ -18,10 +18,10 @@
package org.apache.hadoop.util;
+import org.slf4j.Logger;
import sun.misc.Signal;
import sun.misc.SignalHandler;
-import org.apache.commons.logging.Log;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -42,11 +42,11 @@ public enum SignalLogger {
* Our signal handler.
*/
private static class Handler implements SignalHandler {
- final private LogAdapter LOG;
+ final private Logger log;
final private SignalHandler prevHandler;
- Handler(String name, LogAdapter LOG) {
- this.LOG = LOG;
+ Handler(String name, Logger log) {
+ this.log = log;
prevHandler = Signal.handle(new Signal(name), this);
}
@@ -57,7 +57,7 @@ private static class Handler implements SignalHandler {
*/
@Override
public void handle(Signal signal) {
- LOG.error("RECEIVED SIGNAL " + signal.getNumber() +
+ log.error("RECEIVED SIGNAL " + signal.getNumber() +
": SIG" + signal.getName());
prevHandler.handle(signal);
}
@@ -66,13 +66,9 @@ public void handle(Signal signal) {
/**
* Register some signal handlers.
*
- * @param LOG The log4j logfile to use in the signal handlers.
+ * @param log The log4j logfile to use in the signal handlers.
*/
- public void register(final Log LOG) {
- register(LogAdapter.create(LOG));
- }
-
- void register(final LogAdapter LOG) {
+ public void register(final Logger log) {
if (registered) {
throw new IllegalStateException("Can't re-install the signal handlers.");
}
@@ -83,15 +79,15 @@ void register(final LogAdapter LOG) {
String separator = "";
for (String signalName : SIGNALS) {
try {
- new Handler(signalName, LOG);
+ new Handler(signalName, log);
bld.append(separator)
.append(signalName);
separator = ", ";
} catch (Exception e) {
- LOG.debug(e);
+ log.debug("Error: ", e);
}
}
bld.append("]");
- LOG.info(bld.toString());
+ log.info(bld.toString());
}
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
index b620ba7322..3debd36da7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
@@ -740,42 +740,26 @@ public static String toStartupShutdownString(String prefix, String[] msg) {
* Print a log message for starting up and shutting down
* @param clazz the class of the server
* @param args arguments
- * @param LOG the target log object
+ * @param log the target log object
*/
public static void startupShutdownMessage(Class> clazz, String[] args,
- final org.apache.commons.logging.Log LOG) {
- startupShutdownMessage(clazz, args, LogAdapter.create(LOG));
- }
-
- /**
- * Print a log message for starting up and shutting down
- * @param clazz the class of the server
- * @param args arguments
- * @param LOG the target log object
- */
- public static void startupShutdownMessage(Class> clazz, String[] args,
- final org.slf4j.Logger LOG) {
- startupShutdownMessage(clazz, args, LogAdapter.create(LOG));
- }
-
- static void startupShutdownMessage(Class> clazz, String[] args,
- final LogAdapter LOG) {
+ final org.slf4j.Logger log) {
final String hostname = NetUtils.getHostname();
final String classname = clazz.getSimpleName();
- LOG.info(createStartupShutdownMessage(classname, hostname, args));
+ log.info(createStartupShutdownMessage(classname, hostname, args));
if (SystemUtils.IS_OS_UNIX) {
try {
- SignalLogger.INSTANCE.register(LOG);
+ SignalLogger.INSTANCE.register(log);
} catch (Throwable t) {
- LOG.warn("failed to register any UNIX signal loggers: ", t);
+ log.warn("failed to register any UNIX signal loggers: ", t);
}
}
ShutdownHookManager.get().addShutdownHook(
new Runnable() {
@Override
public void run() {
- LOG.info(toStartupShutdownString("SHUTDOWN_MSG: ", new String[]{
+ log.info(toStartupShutdownString("SHUTDOWN_MSG: ", new String[]{
"Shutting down " + classname + " at " + hostname}));
LogManager.shutdown();
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemLocalFileSystem.java
index adc5db87e7..d88730b005 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemLocalFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemLocalFileSystem.java
@@ -25,8 +25,6 @@
import java.io.IOException;
import java.net.URI;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -39,7 +37,8 @@
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
*
@@ -51,8 +50,8 @@
*/
public class TestViewFileSystemLocalFileSystem extends ViewFileSystemBaseTest {
- private static final Log LOG =
- LogFactory.getLog(TestViewFileSystemLocalFileSystem.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestViewFileSystemLocalFileSystem.class);
@Override
@Before
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemOverloadSchemeLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemOverloadSchemeLocalFileSystem.java
index ac7a1a6899..1e86a91c14 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemOverloadSchemeLocalFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFileSystemOverloadSchemeLocalFileSystem.java
@@ -21,8 +21,6 @@
import java.net.URI;
import java.net.URISyntaxException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -35,6 +33,8 @@
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
*
@@ -43,8 +43,8 @@
*/
public class TestViewFileSystemOverloadSchemeLocalFileSystem {
private static final String FILE = "file";
- private static final Log LOG =
- LogFactory.getLog(TestViewFileSystemOverloadSchemeLocalFileSystem.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(TestViewFileSystemOverloadSchemeLocalFileSystem.class);
private FileSystem fsTarget;
private Configuration conf;
private Path targetTestRoot;
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpnego.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpnego.java
index ea7c8cd4e6..dfcd98801d 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpnego.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpnego.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.http;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.minikdc.MiniKdc;
@@ -53,8 +51,6 @@
*/
public class TestHttpServerWithSpnego {
- static final Log LOG = LogFactory.getLog(TestHttpServerWithSpnego.class);
-
private static final String SECRET_STR = "secret";
private static final String HTTP_USER = "HTTP";
private static final String PREFIX = "hadoop.http.authentication.";
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLog4Json.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLog4Json.java
index d41a58782d..519f14b7fd 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLog4Json.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLog4Json.java
@@ -22,8 +22,6 @@
import com.fasterxml.jackson.databind.node.ContainerNode;
import org.junit.Test;
import static org.junit.Assert.*;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.util.Time;
import org.apache.log4j.Appender;
import org.apache.log4j.Category;
@@ -44,8 +42,6 @@
public class TestLog4Json {
- private static final Log LOG = LogFactory.getLog(TestLog4Json.class);
-
@Test
public void testConstruction() throws Throwable {
Log4Json l4j = new Log4Json();
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogLevel.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogLevel.java
index 3af70e9554..636c03a16d 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogLevel.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/log/TestLogLevel.java
@@ -22,9 +22,6 @@
import java.net.URI;
import java.util.concurrent.Callable;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -70,8 +67,7 @@ public class TestLogLevel extends KerberosSecurityTestcase {
private final String logName = TestLogLevel.class.getName();
private String clientPrincipal;
private String serverPrincipal;
- private final Log testlog = LogFactory.getLog(logName);
- private final Logger log = ((Log4JLogger)testlog).getLogger();
+ private final Logger log = Logger.getLogger(logName);
private final static String PRINCIPAL = "loglevel.principal";
private final static String KEYTAB = "loglevel.keytab";
private static final String PREFIX = "hadoop.http.authentication.";
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java
index 61d5938494..e54971e491 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java
@@ -49,8 +49,6 @@
import java.util.regex.Pattern;
import org.apache.commons.lang3.RandomStringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
@@ -117,29 +115,11 @@ public abstract class GenericTestUtils {
public static final String ERROR_INVALID_ARGUMENT =
"Total wait time should be greater than check interval time";
- /**
- * @deprecated use {@link #disableLog(org.slf4j.Logger)} instead
- */
- @Deprecated
- @SuppressWarnings("unchecked")
- public static void disableLog(Log log) {
- // We expect that commons-logging is a wrapper around Log4j.
- disableLog((Log4JLogger) log);
- }
-
@Deprecated
public static Logger toLog4j(org.slf4j.Logger logger) {
return LogManager.getLogger(logger.getName());
}
- /**
- * @deprecated use {@link #disableLog(org.slf4j.Logger)} instead
- */
- @Deprecated
- public static void disableLog(Log4JLogger log) {
- log.getLogger().setLevel(Level.OFF);
- }
-
/**
* @deprecated use {@link #disableLog(org.slf4j.Logger)} instead
*/
@@ -152,45 +132,6 @@ public static void disableLog(org.slf4j.Logger logger) {
disableLog(toLog4j(logger));
}
- /**
- * @deprecated
- * use {@link #setLogLevel(org.slf4j.Logger, org.slf4j.event.Level)} instead
- */
- @Deprecated
- @SuppressWarnings("unchecked")
- public static void setLogLevel(Log log, Level level) {
- // We expect that commons-logging is a wrapper around Log4j.
- setLogLevel((Log4JLogger) log, level);
- }
-
- /**
- * A helper used in log4j2 migration to accept legacy
- * org.apache.commons.logging apis.
- *
- * And will be removed after migration.
- *
- * @param log a log
- * @param level level to be set
- */
- @Deprecated
- public static void setLogLevel(Log log, org.slf4j.event.Level level) {
- setLogLevel(log, Level.toLevel(level.toString()));
- }
-
- /**
- * @deprecated
- * use {@link #setLogLevel(org.slf4j.Logger, org.slf4j.event.Level)} instead
- */
- @Deprecated
- public static void setLogLevel(Log4JLogger log, Level level) {
- log.getLogger().setLevel(level);
- }
-
- /**
- * @deprecated
- * use {@link #setLogLevel(org.slf4j.Logger, org.slf4j.event.Level)} instead
- */
- @Deprecated
public static void setLogLevel(Logger logger, Level level) {
logger.setLevel(level);
}
@@ -535,13 +476,15 @@ public static class LogCapturer {
private WriterAppender appender;
private Logger logger;
- public static LogCapturer captureLogs(Log l) {
- Logger logger = ((Log4JLogger)l).getLogger();
- return new LogCapturer(logger);
+ public static LogCapturer captureLogs(org.slf4j.Logger logger) {
+ if (logger.getName().equals("root")) {
+ return new LogCapturer(org.apache.log4j.Logger.getRootLogger());
+ }
+ return new LogCapturer(toLog4j(logger));
}
- public static LogCapturer captureLogs(org.slf4j.Logger logger) {
- return new LogCapturer(toLog4j(logger));
+ public static LogCapturer captureLogs(Logger logger) {
+ return new LogCapturer(logger);
}
private LogCapturer(Logger logger) {
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestJarFinder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestJarFinder.java
index e58fb3bffd..109cb191b4 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestJarFinder.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestJarFinder.java
@@ -18,10 +18,10 @@
package org.apache.hadoop.util;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Assert;
import org.junit.Test;
+import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@@ -43,7 +43,7 @@ public class TestJarFinder {
public void testJar() throws Exception {
//picking a class that is for sure in a JAR in the classpath
- String jar = JarFinder.getJar(LogFactory.class);
+ String jar = JarFinder.getJar(LoggerFactory.class);
Assert.assertTrue(new File(jar).exists());
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSignalLogger.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSignalLogger.java
index b61cebc0a6..f6b272e1c6 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSignalLogger.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSignalLogger.java
@@ -32,9 +32,9 @@ public class TestSignalLogger {
@Test(timeout=60000)
public void testInstall() throws Exception {
Assume.assumeTrue(SystemUtils.IS_OS_UNIX);
- SignalLogger.INSTANCE.register(LogAdapter.create(LOG));
+ SignalLogger.INSTANCE.register(LOG);
try {
- SignalLogger.INSTANCE.register(LogAdapter.create(LOG));
+ SignalLogger.INSTANCE.register(LOG);
Assert.fail("expected IllegalStateException from double registration");
} catch (IllegalStateException e) {
// fall through
diff --git a/hadoop-common-project/hadoop-nfs/pom.xml b/hadoop-common-project/hadoop-nfs/pom.xml
index 33d8b3710c..1da5a25ad1 100644
--- a/hadoop-common-project/hadoop-nfs/pom.xml
+++ b/hadoop-common-project/hadoop-nfs/pom.xml
@@ -63,11 +63,6 @@
mockito-core
test
-